diff --git a/.gitignore b/.gitignore index 9baaa2575..0fc77bdbd 100644 --- a/.gitignore +++ b/.gitignore @@ -27,6 +27,8 @@ storybook-static tsconfig.tsbuildinfo tsp-output .bench/ +.ralph/ +.ralphrc # Coverage files *.out diff --git a/apps/cli/cmd/flow.go b/apps/cli/cmd/flow.go index 9a5908061..9651556fa 100644 --- a/apps/cli/cmd/flow.go +++ b/apps/cli/cmd/flow.go @@ -176,6 +176,9 @@ var yamlflowRunCmd = &cobra.Command{ &services.NodeAI, &services.NodeAiProvider, &services.NodeMemory, + &services.NodeGraphQL, + &services.GraphQL, + &services.GraphQLHeader, &services.Workspace, &services.Variable, &services.FlowVariable, diff --git a/apps/cli/internal/common/services.go b/apps/cli/internal/common/services.go index 09780f11a..4a6e2ff5e 100644 --- a/apps/cli/internal/common/services.go +++ b/apps/cli/internal/common/services.go @@ -10,6 +10,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/scredential" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" ) @@ -40,6 +41,11 @@ type Services struct { NodeAI sflow.NodeAIService NodeAiProvider sflow.NodeAiProviderService NodeMemory sflow.NodeMemoryService + NodeGraphQL sflow.NodeGraphQLService + + // GraphQL + GraphQL sgraphql.GraphQLService + GraphQLHeader sgraphql.GraphQLHeaderService // Credentials Credential scredential.CredentialService @@ -87,6 +93,11 @@ func CreateServices(ctx context.Context, db *sql.DB, logger *slog.Logger) (*Serv NodeAI: sflow.NewNodeAIService(queries), NodeAiProvider: sflow.NewNodeAiProviderService(queries), NodeMemory: sflow.NewNodeMemoryService(queries), + NodeGraphQL: sflow.NewNodeGraphQLService(queries), + + // GraphQL + GraphQL: sgraphql.New(queries, logger), + GraphQLHeader: sgraphql.NewGraphQLHeaderService(queries), // Credentials Credential: scredential.NewCredentialService(queries), diff --git a/apps/cli/internal/runner/runner.go b/apps/cli/internal/runner/runner.go index dd83376d7..247ea6d62 100644 --- a/apps/cli/internal/runner/runner.go +++ b/apps/cli/internal/runner/runner.go @@ -13,6 +13,7 @@ import ( "github.com/the-dev-tools/dev-tools/apps/cli/internal/reporter" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/ngraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nrequest" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/runner" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/runner/flowlocalrunner" @@ -251,6 +252,17 @@ func RunFlow(ctx context.Context, flowPtr *mflow.Flow, services RunnerServices, }() defer close(requestRespChan) + // Initialize GraphQL response channel + gqlRespChan := make(chan ngraphql.NodeGraphQLSideResp, requestBufferSize) + go func() { + for resp := range gqlRespChan { + if resp.Done != nil { + close(resp.Done) + } + } + }() + defer close(gqlRespChan) + // Build flow node map using flowbuilder flowNodeMap, startNodeID, err := services.Builder.BuildNodes( ctx, @@ -259,6 +271,7 @@ func RunFlow(ctx context.Context, flowPtr *mflow.Flow, services RunnerServices, nodeTimeout, httpClient, requestRespChan, + gqlRespChan, services.JSClient, ) if err != nil { diff --git a/apps/cli/internal/runner/runner_test.go b/apps/cli/internal/runner/runner_test.go index 5dcc0f06c..ba6a6faea 100644 --- a/apps/cli/internal/runner/runner_test.go +++ b/apps/cli/internal/runner/runner_test.go @@ -113,6 +113,9 @@ func newFlowTestFixture(t *testing.T) *flowTestFixture { nil, // NodeAIService - not needed for CLI tests nil, // NodeAiProviderService - not needed for CLI tests nil, // NodeMemoryService - not needed for CLI tests + nil, // NodeGraphQLService - not needed for CLI tests + nil, // GraphQLService - not needed for CLI tests + nil, // GraphQLHeaderService - not needed for CLI tests &workspaceService, &varService, &flowVariableService, diff --git a/packages/client/src/app/router/route-tree.gen.ts b/packages/client/src/app/router/route-tree.gen.ts index dbbe4df20..adae841a3 100644 --- a/packages/client/src/app/router/route-tree.gen.ts +++ b/packages/client/src/app/router/route-tree.gen.ts @@ -13,12 +13,15 @@ import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesIndexRout import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanRouteRouteImport } from './../../pages/workspace/routes/workspace/$workspaceIdCan/route' import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanIndexRouteImport } from './../../pages/workspace/routes/workspace/$workspaceIdCan/index' import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRouteImport } from './../../pages/http/routes/http/$httpIdCan/route' +import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteImport } from './../../pages/graphql/routes/graphql/$graphqlIdCan/route' import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRouteImport } from './../../pages/flow/routes/flow/$flowIdCan/route' import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanIndexRouteImport } from './../../pages/http/routes/http/$httpIdCan/index' +import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRouteImport } from './../../pages/graphql/routes/graphql/$graphqlIdCan/index' import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanIndexRouteImport } from './../../pages/flow/routes/flow/$flowIdCan/index' import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotCredentialRoutesCredentialCredentialIdCanIndexRouteImport } from './../../pages/credential/routes/credential/$credentialIdCan/index' import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanHistoryRouteImport } from './../../pages/flow/routes/flow/$flowIdCan/history' import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanDeltaDotdeltaHttpIdCanRouteImport } from './../../pages/http/routes/http/$httpIdCan/delta.$deltaHttpIdCan' +import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRouteImport } from './../../pages/graphql/routes/graphql/$graphqlIdCan/delta.$deltaGraphqlIdCan' const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesIndexRoute = dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesIndexRouteImport.update({ @@ -52,6 +55,15 @@ const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoute dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanRouteRoute, } as any, ) +const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRoute = + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteImport.update( + { + id: '/(graphql)/graphql/$graphqlIdCan', + path: '/graphql/$graphqlIdCan', + getParentRoute: () => + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanRouteRoute, + } as any, + ) const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRoute = dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRouteImport.update( { @@ -70,6 +82,15 @@ const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoute dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRoute, } as any, ) +const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRoute = + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRouteImport.update( + { + id: '/', + path: '/', + getParentRoute: () => + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRoute, + } as any, + ) const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanIndexRoute = dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanIndexRouteImport.update( { @@ -106,17 +127,29 @@ const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoute dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRoute, } as any, ) +const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRoute = + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRouteImport.update( + { + id: '/delta/$deltaGraphqlIdCan', + path: '/delta/$deltaGraphqlIdCan', + getParentRoute: () => + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRoute, + } as any, + ) export interface FileRoutesByFullPath { '/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesIndexRoute '/workspace/$workspaceIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanRouteRouteWithChildren '/workspace/$workspaceIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanIndexRoute '/workspace/$workspaceIdCan/flow/$flowIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRouteWithChildren + '/workspace/$workspaceIdCan/graphql/$graphqlIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteWithChildren '/workspace/$workspaceIdCan/http/$httpIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRouteWithChildren '/workspace/$workspaceIdCan/flow/$flowIdCan/history': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanHistoryRoute '/workspace/$workspaceIdCan/credential/$credentialIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotCredentialRoutesCredentialCredentialIdCanIndexRoute '/workspace/$workspaceIdCan/flow/$flowIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanIndexRoute + '/workspace/$workspaceIdCan/graphql/$graphqlIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRoute '/workspace/$workspaceIdCan/http/$httpIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanIndexRoute + '/workspace/$workspaceIdCan/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRoute '/workspace/$workspaceIdCan/http/$httpIdCan/delta/$deltaHttpIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanDeltaDotdeltaHttpIdCanRoute } export interface FileRoutesByTo { @@ -125,7 +158,9 @@ export interface FileRoutesByTo { '/workspace/$workspaceIdCan/flow/$flowIdCan/history': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanHistoryRoute '/workspace/$workspaceIdCan/credential/$credentialIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotCredentialRoutesCredentialCredentialIdCanIndexRoute '/workspace/$workspaceIdCan/flow/$flowIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanIndexRoute + '/workspace/$workspaceIdCan/graphql/$graphqlIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRoute '/workspace/$workspaceIdCan/http/$httpIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanIndexRoute + '/workspace/$workspaceIdCan/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRoute '/workspace/$workspaceIdCan/http/$httpIdCan/delta/$deltaHttpIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanDeltaDotdeltaHttpIdCanRoute } export interface FileRoutesById { @@ -134,11 +169,14 @@ export interface FileRoutesById { '/(dashboard)/(workspace)/workspace/$workspaceIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanRouteRouteWithChildren '/(dashboard)/(workspace)/workspace/$workspaceIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanIndexRoute '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRouteWithChildren + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteWithChildren '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(http)/http/$httpIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRouteWithChildren '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan/history': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanHistoryRoute '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(credential)/credential/$credentialIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotCredentialRoutesCredentialCredentialIdCanIndexRoute '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanIndexRoute + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRoute '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(http)/http/$httpIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanIndexRoute + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRoute '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(http)/http/$httpIdCan/delta/$deltaHttpIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanDeltaDotdeltaHttpIdCanRoute } export interface FileRouteTypes { @@ -148,11 +186,14 @@ export interface FileRouteTypes { | '/workspace/$workspaceIdCan' | '/workspace/$workspaceIdCan/' | '/workspace/$workspaceIdCan/flow/$flowIdCan' + | '/workspace/$workspaceIdCan/graphql/$graphqlIdCan' | '/workspace/$workspaceIdCan/http/$httpIdCan' | '/workspace/$workspaceIdCan/flow/$flowIdCan/history' | '/workspace/$workspaceIdCan/credential/$credentialIdCan' | '/workspace/$workspaceIdCan/flow/$flowIdCan/' + | '/workspace/$workspaceIdCan/graphql/$graphqlIdCan/' | '/workspace/$workspaceIdCan/http/$httpIdCan/' + | '/workspace/$workspaceIdCan/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan' | '/workspace/$workspaceIdCan/http/$httpIdCan/delta/$deltaHttpIdCan' fileRoutesByTo: FileRoutesByTo to: @@ -161,7 +202,9 @@ export interface FileRouteTypes { | '/workspace/$workspaceIdCan/flow/$flowIdCan/history' | '/workspace/$workspaceIdCan/credential/$credentialIdCan' | '/workspace/$workspaceIdCan/flow/$flowIdCan' + | '/workspace/$workspaceIdCan/graphql/$graphqlIdCan' | '/workspace/$workspaceIdCan/http/$httpIdCan' + | '/workspace/$workspaceIdCan/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan' | '/workspace/$workspaceIdCan/http/$httpIdCan/delta/$deltaHttpIdCan' id: | '__root__' @@ -169,11 +212,14 @@ export interface FileRouteTypes { | '/(dashboard)/(workspace)/workspace/$workspaceIdCan' | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/' | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan' + | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan' | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(http)/http/$httpIdCan' | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan/history' | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(credential)/credential/$credentialIdCan/' | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan/' + | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/' | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(http)/http/$httpIdCan/' + | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan' | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(http)/http/$httpIdCan/delta/$deltaHttpIdCan' fileRoutesById: FileRoutesById } @@ -212,6 +258,13 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRouteImport parentRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanRouteRoute } + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan': { + id: '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan' + path: '/graphql/$graphqlIdCan' + fullPath: '/workspace/$workspaceIdCan/graphql/$graphqlIdCan' + preLoaderRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteImport + parentRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanRouteRoute + } '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan': { id: '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan' path: '/flow/$flowIdCan' @@ -226,6 +279,13 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanIndexRouteImport parentRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRoute } + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/': { + id: '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/' + path: '/' + fullPath: '/workspace/$workspaceIdCan/graphql/$graphqlIdCan/' + preLoaderRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRouteImport + parentRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRoute + } '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan/': { id: '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan/' path: '/' @@ -254,6 +314,13 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanDeltaDotdeltaHttpIdCanRouteImport parentRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRoute } + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan': { + id: '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan' + path: '/delta/$deltaGraphqlIdCan' + fullPath: '/workspace/$workspaceIdCan/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan' + preLoaderRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRouteImport + parentRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRoute + } } } @@ -275,6 +342,24 @@ const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoute dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRouteChildren, ) +interface dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteChildren { + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRoute + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRoute +} + +const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteChildren: dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteChildren = + { + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRoute: + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRoute, + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRoute: + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRoute, + } + +const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteWithChildren = + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRoute._addFileChildren( + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteChildren, + ) + interface dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRouteChildren { dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanIndexRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanIndexRoute dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanDeltaDotdeltaHttpIdCanRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanDeltaDotdeltaHttpIdCanRoute @@ -296,6 +381,7 @@ const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoute interface dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanRouteRouteChildren { dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanIndexRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanIndexRoute dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRouteWithChildren + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteWithChildren dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRouteWithChildren dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotCredentialRoutesCredentialCredentialIdCanIndexRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotCredentialRoutesCredentialCredentialIdCanIndexRoute } @@ -306,6 +392,8 @@ const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspace dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanIndexRoute, dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRoute: dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRouteWithChildren, + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRoute: + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteWithChildren, dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRoute: dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRouteWithChildren, dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotCredentialRoutesCredentialCredentialIdCanIndexRoute: diff --git a/packages/client/src/features/file-system/index.tsx b/packages/client/src/features/file-system/index.tsx index d52daeccd..72bb20001 100644 --- a/packages/client/src/features/file-system/index.tsx +++ b/packages/client/src/features/file-system/index.tsx @@ -29,6 +29,7 @@ import { FolderSchema, } from '@the-dev-tools/spec/buf/api/file_system/v1/file_system_pb'; import { FlowSchema, FlowService } from '@the-dev-tools/spec/buf/api/flow/v1/flow_pb'; +import { GraphQLSchema as GraphQLItemSchema } from '@the-dev-tools/spec/buf/api/graph_q_l/v1/graph_q_l_pb'; import { HttpDeltaSchema, HttpMethod, HttpSchema, HttpService } from '@the-dev-tools/spec/buf/api/http/v1/http_pb'; import { CredentialAnthropicCollectionSchema, @@ -38,6 +39,7 @@ import { } from '@the-dev-tools/spec/tanstack-db/v1/api/credential'; import { FileCollectionSchema, FolderCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/file_system'; import { FlowCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/flow'; +import { GraphQLCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; import { HttpCollectionSchema, HttpDeltaCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/http'; import { Button } from '@the-dev-tools/ui/button'; import { FlowsIcon, FolderOpenedIcon } from '@the-dev-tools/ui/icons'; @@ -84,6 +86,7 @@ export const FileCreateMenu = ({ parentFolderId, ...props }: FileCreateMenuProps const { workspaceId } = routes.dashboard.workspace.route.useLoaderData(); const folderCollection = useApiCollection(FolderCollectionSchema); + const graphqlCollection = useApiCollection(GraphQLCollectionSchema); const httpCollection = useApiCollection(HttpCollectionSchema); const flowCollection = useApiCollection(FlowCollectionSchema); @@ -116,6 +119,22 @@ export const FileCreateMenu = ({ parentFolderId, ...props }: FileCreateMenuProps HTTP request + { + const graphqlUlid = Ulid.generate(); + graphqlCollection.utils.insert({ graphqlId: graphqlUlid.bytes, name: 'New GraphQL request' }); + await insertFile({ fileId: graphqlUlid.bytes, kind: FileKind.GRAPH_Q_L }); + if (toNavigate) + await navigate({ + from: router.routesById[routes.dashboard.workspace.route.id].fullPath, + params: { graphqlIdCan: graphqlUlid.toCanonical() }, + to: router.routesById[routes.dashboard.workspace.graphql.route.id].fullPath, + }); + }} + > + GraphQL request + + { const flowUlid = Ulid.generate(); @@ -332,6 +351,7 @@ const FileItem = ({ id }: FileItemProps) => { Match.when(FileKind.HTTP, () => ), Match.when(FileKind.HTTP_DELTA, () => ), Match.when(FileKind.FLOW, () => ), + Match.when(FileKind.GRAPH_Q_L, () => ), Match.when(FileKind.CREDENTIAL, () => ), Match.orElse(() => null), ); @@ -884,6 +904,93 @@ const FlowFile = ({ id }: FileItemProps) => { return toNavigate ? : ; }; +const GraphQLFile = ({ id }: FileItemProps) => { + const router = useRouter(); + const matchRoute = useMatchRoute(); + + const fileCollection = useApiCollection(FileCollectionSchema); + + const { fileId: graphqlId } = useMemo(() => fileCollection.utils.parseKeyUnsafe(id), [fileCollection.utils, id]); + + const graphqlCollection = useApiCollection(GraphQLCollectionSchema); + + const { name } = + useLiveQuery( + (_) => + _.from({ item: graphqlCollection }) + .where((_) => eq(_.item.graphqlId, graphqlId)) + .select((_) => pick(_.item, 'name')) + .findOne(), + [graphqlCollection, graphqlId], + ).data ?? create(GraphQLItemSchema); + + const { containerRef, navigate: toNavigate = false, showControls } = useContext(FileTreeContext); + + const { escapeRef, escapeRender } = useEscapePortal(containerRef); + + const { edit, isEditing, textFieldProps } = useEditableTextState({ + onSuccess: (_) => graphqlCollection.utils.update({ graphqlId, name: _ }), + value: name, + }); + + const { menuProps, menuTriggerProps, onContextMenu } = useContextMenuState(); + + const route = { + from: router.routesById[routes.dashboard.workspace.route.id].fullPath, + params: { graphqlIdCan: Ulid.construct(graphqlId).toCanonical() }, + to: router.routesById[routes.dashboard.workspace.graphql.route.id].fullPath, + } satisfies ToOptions; + + const content = ( + <> + GQL + + + {name} + + + {isEditing && + escapeRender( + , + )} + + {showControls && ( + + + + + void edit()}>Rename + + pipe(fileCollection.utils.parseKeyUnsafe(id), (_) => fileCollection.utils.delete(_))} + variant='danger' + > + Delete + + + + )} + + ); + + const props = { + children: content, + className: toNavigate && matchRoute(route) !== false ? tw`bg-neutral` : '', + id, + onContextMenu, + textValue: name, + } satisfies TreeItemProps; + + return toNavigate ? : ; +}; + const CredentialFile = ({ id }: FileItemProps) => { const router = useRouter(); const matchRoute = useMatchRoute(); diff --git a/packages/client/src/pages/flow/add-node.tsx b/packages/client/src/pages/flow/add-node.tsx index 70d70db11..ed2f9fcda 100644 --- a/packages/client/src/pages/flow/add-node.tsx +++ b/packages/client/src/pages/flow/add-node.tsx @@ -6,7 +6,7 @@ import * as RAC from 'react-aria-components'; import { FiArrowLeft, FiBriefcase, FiChevronRight, FiTerminal, FiX } from 'react-icons/fi'; import { TbRobotFace } from 'react-icons/tb'; import { FileKind } from '@the-dev-tools/spec/buf/api/file_system/v1/file_system_pb'; -import { HandleKind, NodeHttpInsertSchema, NodeKind } from '@the-dev-tools/spec/buf/api/flow/v1/flow_pb'; +import { HandleKind, NodeGraphQLInsertSchema, NodeHttpInsertSchema, NodeKind } from '@the-dev-tools/spec/buf/api/flow/v1/flow_pb'; import { HttpMethod } from '@the-dev-tools/spec/buf/api/http/v1/http_pb'; import { FileCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/file_system'; import { @@ -16,9 +16,11 @@ import { NodeConditionCollectionSchema, NodeForCollectionSchema, NodeForEachCollectionSchema, + NodeGraphQLCollectionSchema, NodeHttpCollectionSchema, NodeJsCollectionSchema, } from '@the-dev-tools/spec/tanstack-db/v1/api/flow'; +import { GraphQLCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; import { HttpCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/http'; import { Button } from '@the-dev-tools/ui/button'; import { FlowsIcon, ForIcon, IfIcon, SendRequestIcon } from '@the-dev-tools/ui/icons'; @@ -247,6 +249,13 @@ const AddCoreNodeSidebar = (props: AddNodeSidebarProps) => { onAction={() => void setSidebar?.((_) => )} title='HTTP Request' /> + + } + onAction={() => void setSidebar?.((_) => )} + title='GraphQL Request' + /> ); @@ -318,6 +327,69 @@ const AddHttpRequestNodeSidebar = ({ handleKind, position, previous, sourceId, t ); }; +const AddGraphQLRequestNodeSidebar = ({ handleKind, position, previous, sourceId, targetId }: AddNodeSidebarProps) => { + const { workspaceId } = routes.dashboard.workspace.route.useLoaderData(); + + const insertNode = useInsertNode(); + + const fileCollection = useApiCollection(FileCollectionSchema); + const graphqlCollection = useApiCollection(GraphQLCollectionSchema); + const nodeGraphQLCollection = useApiCollection(NodeGraphQLCollectionSchema); + + return ( + <> + + +
+ +
+ + { + const nodeId = Ulid.generate().bytes; + const data: MessageInitShape = { nodeId }; + + const file = fileCollection.get(key.toString())!; + + if (file.kind === FileKind.GRAPH_Q_L) { + data.graphqlId = file.fileId; + } else { + return; + } + + nodeGraphQLCollection.utils.insert(data); + insertNode({ handleKind, kind: NodeKind.GRAPH_Q_L, name: 'graphql', nodeId, position, sourceId, targetId }); + }} + showControls + /> + + ); +}; + const AddAiNode = ({ handleKind, position, sourceId, targetId }: AddNodeSidebarProps) => { const insertNode = useInsertNode(); diff --git a/packages/client/src/pages/flow/edit.tsx b/packages/client/src/pages/flow/edit.tsx index 094ccbb72..c9cc947d5 100644 --- a/packages/client/src/pages/flow/edit.tsx +++ b/packages/client/src/pages/flow/edit.tsx @@ -18,6 +18,7 @@ import { FlowCollectionSchema, FlowVariableCollectionSchema, NodeCollectionSchema, + NodeGraphQLCollectionSchema, NodeHttpCollectionSchema, } from '@the-dev-tools/spec/tanstack-db/v1/api/flow'; import { Button, ButtonAsRouteLink } from '@the-dev-tools/ui/button'; @@ -60,6 +61,7 @@ import { import { ConditionNode, ConditionSettings } from './nodes/condition'; import { ForNode, ForSettings } from './nodes/for'; import { ForEachNode, ForEachSettings } from './nodes/for-each'; +import { GraphQLNode, GraphQLSettings } from './nodes/graphql'; import { HttpNode, HttpSettings } from './nodes/http'; import { JavaScriptNode, JavaScriptSettings } from './nodes/javascript'; import { ManualStartNode } from './nodes/manual-start'; @@ -72,6 +74,7 @@ export const nodeTypes: XF.NodeTypes = { [NodeKind.CONDITION]: ConditionNode, [NodeKind.FOR]: ForNode, [NodeKind.FOR_EACH]: ForEachNode, + [NodeKind.GRAPH_Q_L]: GraphQLNode, [NodeKind.HTTP]: HttpNode, [NodeKind.JS]: JavaScriptNode, [NodeKind.MANUAL_START]: ManualStartNode, @@ -108,6 +111,7 @@ export const Flow = ({ children }: PropsWithChildren) => { const flowCollection = useApiCollection(FlowCollectionSchema); const edgeCollection = useApiCollection(EdgeCollectionSchema); const nodeCollection = useApiCollection(NodeCollectionSchema); + const nodeGraphQLCollection = useApiCollection(NodeGraphQLCollectionSchema); const nodeHttpCollection = useApiCollection(NodeHttpCollectionSchema); const nodeEditDialog = useNodeEditDialog(); @@ -214,6 +218,23 @@ export const Flow = ({ children }: PropsWithChildren) => { position, }); } + + if (file?.kind === FileKind.GRAPH_Q_L) { + const nodeId = Ulid.generate().bytes; + + nodeGraphQLCollection.utils.insert({ + graphqlId: file.fileId, + nodeId, + }); + + nodeCollection.utils.insert({ + flowId, + kind: NodeKind.GRAPH_Q_L, + name: `graphql_${getNodes().length}`, + nodeId, + position, + }); + } }, ref, }); @@ -610,6 +631,7 @@ const useNodeEditDialog = () => { Match.when({ kind: NodeKind.FOR }, (_) => ), Match.when({ kind: NodeKind.JS }, (_) => ), Match.when({ kind: NodeKind.HTTP }, (_) => ), + Match.when({ kind: NodeKind.GRAPH_Q_L }, (_) => ), Match.when({ kind: NodeKind.AI }, (_) => ), Match.when({ kind: NodeKind.AI_PROVIDER }, (_) => ), Match.when({ kind: NodeKind.AI_MEMORY }, (_) => ), diff --git a/packages/client/src/pages/flow/nodes/graphql.tsx b/packages/client/src/pages/flow/nodes/graphql.tsx new file mode 100644 index 000000000..02028a612 --- /dev/null +++ b/packages/client/src/pages/flow/nodes/graphql.tsx @@ -0,0 +1,159 @@ +import { create } from '@bufbuild/protobuf'; +import { eq, useLiveQuery } from '@tanstack/react-db'; +import { useRouter } from '@tanstack/react-router'; +import * as XF from '@xyflow/react'; +import { Ulid } from 'id128'; +import { use } from 'react'; +import { FiExternalLink } from 'react-icons/fi'; +import { NodeGraphQLSchema } from '@the-dev-tools/spec/buf/api/flow/v1/flow_pb'; +import { + NodeExecutionCollectionSchema, + NodeGraphQLCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/flow'; +import { GraphQLCollectionSchema, GraphQLDeltaCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { ButtonAsLink } from '@the-dev-tools/ui/button'; +import { SendRequestIcon } from '@the-dev-tools/ui/icons'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useDeltaState } from '~/features/delta'; +import { ReferenceContext } from '~/features/expression'; +import { GraphQLRequestPanel, GraphQLResponseInfo, GraphQLResponsePanel } from '~/pages/graphql/@x/flow'; +import { useApiCollection } from '~/shared/api'; +import { pick } from '~/shared/lib'; +import { routes } from '~/shared/routes'; +import { FlowContext } from '../context'; +import { Handle } from '../handle'; +import { NodeSettingsBody, NodeSettingsOutputProps, NodeSettingsProps, SimpleNode } from '../node'; + +export const GraphQLNode = ({ id, selected }: XF.NodeProps) => { + const nodeId = Ulid.fromCanonical(id).bytes; + + const nodeGraphQLCollection = useApiCollection(NodeGraphQLCollectionSchema); + + const { deltaGraphqlId, graphqlId } = + useLiveQuery( + (_) => + _.from({ item: nodeGraphQLCollection }) + .where((_) => eq(_.item.nodeId, nodeId)) + .select((_) => pick(_.item, 'graphqlId', 'deltaGraphqlId')) + .findOne(), + [nodeGraphQLCollection, nodeId], + ).data ?? create(NodeGraphQLSchema); + + const deltaOptions = { + deltaId: deltaGraphqlId, + deltaSchema: GraphQLDeltaCollectionSchema, + isDelta: deltaGraphqlId !== undefined, + originId: graphqlId, + originSchema: GraphQLCollectionSchema, + }; + + const [name] = useDeltaState({ ...deltaOptions, valueKey: 'name' }); + + return ( + + + + + } + icon={} + nodeId={nodeId} + selected={selected} + title='GraphQL' + > +
+
GQL
+
{name}
+
+
+ ); +}; + +export const GraphQLSettings = ({ nodeId }: NodeSettingsProps) => { + const router = useRouter(); + + const { isReadOnly = false } = use(FlowContext); + + const { workspaceId } = routes.dashboard.workspace.route.useLoaderData(); + const { workspaceIdCan } = routes.dashboard.workspace.route.useParams(); + + const nodeGraphQLCollection = useApiCollection(NodeGraphQLCollectionSchema); + + const { deltaGraphqlId, graphqlId } = + useLiveQuery( + (_) => + _.from({ item: nodeGraphQLCollection }) + .where((_) => eq(_.item.nodeId, nodeId)) + .select((_) => pick(_.item, 'graphqlId', 'deltaGraphqlId')) + .findOne(), + [nodeGraphQLCollection, nodeId], + ).data ?? create(NodeGraphQLSchema); + + return ( + } + settingsHeader={ + + + Open GraphQL + + } + title='GraphQL request' + > + + + + + ); +}; + +const Output = ({ nodeExecutionId }: NodeSettingsOutputProps) => { + const collection = useApiCollection(NodeExecutionCollectionSchema); + + const { graphqlResponseId } = + useLiveQuery( + (_) => + _.from({ item: collection }) + .where((_) => eq(_.item.nodeExecutionId, nodeExecutionId)) + .select((_) => pick(_.item, 'graphqlResponseId')) + .findOne(), + [collection, nodeExecutionId], + ).data ?? {}; + + if (!graphqlResponseId) return null; + + return ( +
+ + +
+ ); +}; diff --git a/packages/client/src/pages/graphql/@x/flow.tsx b/packages/client/src/pages/graphql/@x/flow.tsx new file mode 100644 index 000000000..181c27543 --- /dev/null +++ b/packages/client/src/pages/graphql/@x/flow.tsx @@ -0,0 +1,2 @@ +export { GraphQLRequestPanel } from '../request/panel'; +export { GraphQLResponseInfo, GraphQLResponsePanel } from '../response'; diff --git a/packages/client/src/pages/graphql/@x/workspace.tsx b/packages/client/src/pages/graphql/@x/workspace.tsx new file mode 100644 index 000000000..13825d610 --- /dev/null +++ b/packages/client/src/pages/graphql/@x/workspace.tsx @@ -0,0 +1,3 @@ +import { resolveRoutesTo } from '../../../shared/lib/router'; + +export const resolveRoutesFrom = resolveRoutesTo(import.meta.dirname, '../routes'); diff --git a/packages/client/src/pages/graphql/history.tsx b/packages/client/src/pages/graphql/history.tsx new file mode 100644 index 000000000..292add948 --- /dev/null +++ b/packages/client/src/pages/graphql/history.tsx @@ -0,0 +1,159 @@ +import { eq, useLiveQuery } from '@tanstack/react-db'; +import { Ulid } from 'id128'; +import { Suspense } from 'react'; +import { Collection, Dialog, Tab, TabList, TabPanel, Tabs } from 'react-aria-components'; +import { Panel, Group as PanelGroup, useDefaultLayout } from 'react-resizable-panels'; +import { twJoin } from 'tailwind-merge'; +import { + GraphQLResponseCollectionSchema, + GraphQLVersionCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { Modal } from '@the-dev-tools/ui/modal'; +import { PanelResizeHandle } from '@the-dev-tools/ui/resizable-panel'; +import { Spinner } from '@the-dev-tools/ui/spinner'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useApiCollection } from '~/shared/api'; +import { pick } from '~/shared/lib'; +import { GraphQLRequestPanel } from './request/panel'; +import { GraphQLUrl } from './request/url'; +import { GraphQLResponseInfo, GraphQLResponsePanel } from './response'; + +export interface HistoryModalProps { + deltaGraphqlId?: Uint8Array | undefined; + graphqlId: Uint8Array; +} + +export const HistoryModal = ({ deltaGraphqlId, graphqlId }: HistoryModalProps) => { + 'use no memo'; + + const collection = useApiCollection(GraphQLVersionCollectionSchema); + + const { data: versions } = useLiveQuery( + (_) => + _.from({ item: collection }) + .where((_) => eq(_.item.graphqlId, deltaGraphqlId ?? graphqlId)) + .orderBy((_) => _.item.graphqlVersionId, 'desc'), + [collection, deltaGraphqlId, graphqlId], + ); + + return ( + + + +
+
+
Response History
+
History of your GraphQL responses
+
+
+
+
+
+
+
+
+
+ +
Current Version
+ +
+
+
+
+
+ +
+ {versions.length} previous responses +
+ +
+ + + {(_) => ( + + twJoin( + tw` + flex cursor-pointer items-center gap-1.5 rounded-md px-3 py-1.5 text-md leading-5 + font-semibold text-on-neutral + `, + isSelected && tw`bg-neutral`, + ) + } + id={collection.utils.getKey(_)} + > + {Ulid.construct(_.graphqlVersionId).time.toLocaleString()} + + )} + +
+
+ +
+ + {(_) => ( + + + +
+ } + > + + + + )} + +
+ +
+
+ ); +}; + +interface VersionProps { + graphqlId: Uint8Array; +} + +const Version = ({ graphqlId }: VersionProps) => { + const responseCollection = useApiCollection(GraphQLResponseCollectionSchema); + + const { graphqlResponseId } = + useLiveQuery( + (_) => + _.from({ item: responseCollection }) + .where((_) => eq(_.item.graphqlId, graphqlId)) + .select((_) => pick(_.item, 'graphqlResponseId')) + .orderBy((_) => _.item.graphqlResponseId, 'desc') + .limit(1) + .findOne(), + [responseCollection, graphqlId], + ).data ?? {}; + + const endpointVersionsLayout = useDefaultLayout({ id: 'endpoint-versions' }); + + return ( + + +
+ +
+ + +
+ + {graphqlResponseId && ( + <> + + + + + + + + + )} +
+ ); +}; diff --git a/packages/client/src/pages/graphql/page.tsx b/packages/client/src/pages/graphql/page.tsx new file mode 100644 index 000000000..4c9bc5c0c --- /dev/null +++ b/packages/client/src/pages/graphql/page.tsx @@ -0,0 +1,70 @@ +import { eq, useLiveQuery } from '@tanstack/react-db'; +import { Panel, Group as PanelGroup, useDefaultLayout } from 'react-resizable-panels'; +import { GraphQLResponseCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { PanelResizeHandle } from '@the-dev-tools/ui/resizable-panel'; +import { ReferenceContext } from '~/features/expression'; +import { useApiCollection } from '~/shared/api'; +import { pick } from '~/shared/lib'; +import { routes } from '~/shared/routes'; +import { GraphQLRequestPanel } from './request/panel'; +import { GraphQLTopBar } from './request/top-bar'; +import { GraphQLResponseInfo, GraphQLResponsePanel } from './response'; + +export const GraphQLPage = () => { + const { graphqlId } = routes.dashboard.workspace.graphql.route.useRouteContext(); + return ; +}; + +export const GraphQLDeltaPage = () => { + const { deltaGraphqlId, graphqlId } = routes.dashboard.workspace.graphql.delta.useRouteContext(); + return ; +}; + +interface PageProps { + deltaGraphqlId?: Uint8Array; + graphqlId: Uint8Array; +} + +const Page = ({ deltaGraphqlId, graphqlId }: PageProps) => { + const { workspaceId } = routes.dashboard.workspace.route.useLoaderData(); + + const responseCollection = useApiCollection(GraphQLResponseCollectionSchema); + + const { graphqlResponseId } = + useLiveQuery( + (_) => + _.from({ item: responseCollection }) + .where((_) => eq(_.item.graphqlId, deltaGraphqlId ?? graphqlId)) + .select((_) => pick(_.item, 'graphqlResponseId')) + .orderBy((_) => _.item.graphqlResponseId, 'desc') + .limit(1) + .findOne(), + [responseCollection, deltaGraphqlId, graphqlId], + ).data ?? {}; + + const endpointLayout = useDefaultLayout({ id: 'graphql-endpoint' }); + + return ( + + + + + + + + + + {graphqlResponseId && ( + <> + + + + + + + + + )} + + ); +}; diff --git a/packages/client/src/pages/graphql/request/assert.tsx b/packages/client/src/pages/graphql/request/assert.tsx new file mode 100644 index 000000000..cff648a46 --- /dev/null +++ b/packages/client/src/pages/graphql/request/assert.tsx @@ -0,0 +1,115 @@ +import { eq, or, useLiveQuery } from '@tanstack/react-db'; +import { Ulid } from 'id128'; +import { useDragAndDrop } from 'react-aria-components'; +import { FiPlus } from 'react-icons/fi'; +import { + GraphQLAssertCollectionSchema, + GraphQLAssertDeltaCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { Button } from '@the-dev-tools/ui/button'; +import { DropIndicatorHorizontal } from '@the-dev-tools/ui/reorder'; +import { Table, TableBody, TableCell, TableColumn, TableFooter, TableHeader, TableRow } from '@the-dev-tools/ui/table'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { ColumnActionDeleteDelta, DeltaCheckbox, DeltaReference } from '~/features/delta'; +import { useApiCollection } from '~/shared/api'; +import { getNextOrder, handleCollectionReorder, pick } from '~/shared/lib'; + +export interface GraphQLAssertTableProps { + deltaGraphqlId?: Uint8Array | undefined; + graphqlId: Uint8Array; + isReadOnly?: boolean; +} + +export const GraphQLAssertTable = ({ + deltaGraphqlId, + graphqlId, + isReadOnly = false, +}: GraphQLAssertTableProps) => { + const collection = useApiCollection(GraphQLAssertCollectionSchema); + + const items = useLiveQuery( + (_) => + _.from({ item: collection }) + .where((_) => or(eq(_.item.graphqlId, graphqlId), eq(_.item.graphqlId, deltaGraphqlId))) + .orderBy((_) => _.item.order) + .select((_) => pick(_.item, 'graphqlAssertId', 'order')), + [collection, deltaGraphqlId, graphqlId], + ).data.map((_) => pick(_, 'graphqlAssertId')); + + const deltaColumnOptions = { + deltaKey: 'deltaGraphqlAssertId', + deltaParentKey: { graphqlId: deltaGraphqlId }, + deltaSchema: GraphQLAssertDeltaCollectionSchema, + isDelta: deltaGraphqlId !== undefined, + originKey: 'graphqlAssertId', + originSchema: GraphQLAssertCollectionSchema, + } as const; + + const { dragAndDropHooks } = useDragAndDrop({ + getItems: (keys) => [...keys].map((key) => ({ key: key.toString() })), + onReorder: handleCollectionReorder(collection), + renderDropIndicator: () => , + }); + + return ( + + + + Value + {!isReadOnly && } + + + + {({ graphqlAssertId }) => ( + + + + + + + + + + {!isReadOnly && ( + + + + )} + + )} + + + {!isReadOnly && ( + + + + )} +
+ ); +}; diff --git a/packages/client/src/pages/graphql/request/header.tsx b/packages/client/src/pages/graphql/request/header.tsx new file mode 100644 index 000000000..5a205a886 --- /dev/null +++ b/packages/client/src/pages/graphql/request/header.tsx @@ -0,0 +1,140 @@ +import { eq, or, useLiveQuery } from '@tanstack/react-db'; +import { Ulid } from 'id128'; +import { useDragAndDrop } from 'react-aria-components'; +import { FiPlus } from 'react-icons/fi'; +import { + GraphQLHeaderCollectionSchema, + GraphQLHeaderDeltaCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { Button } from '@the-dev-tools/ui/button'; +import { DropIndicatorHorizontal } from '@the-dev-tools/ui/reorder'; +import { Table, TableBody, TableCell, TableColumn, TableFooter, TableHeader, TableRow } from '@the-dev-tools/ui/table'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { ColumnActionDeleteDelta, DeltaCheckbox, DeltaReference, DeltaTextField } from '~/features/delta'; +import { useApiCollection } from '~/shared/api'; +import { getNextOrder, handleCollectionReorder, pick } from '~/shared/lib'; + +export interface GraphQLHeaderTableProps { + deltaGraphqlId?: Uint8Array | undefined; + graphqlId: Uint8Array; + hideDescription?: boolean; + isReadOnly?: boolean; +} + +export const GraphQLHeaderTable = ({ + deltaGraphqlId, + graphqlId, + hideDescription = false, + isReadOnly = false, +}: GraphQLHeaderTableProps) => { + const collection = useApiCollection(GraphQLHeaderCollectionSchema); + + const items = useLiveQuery( + (_) => + _.from({ item: collection }) + .where((_) => or(eq(_.item.graphqlId, graphqlId), eq(_.item.graphqlId, deltaGraphqlId))) + .orderBy((_) => _.item.order) + .select((_) => pick(_.item, 'graphqlHeaderId', 'order')), + [collection, deltaGraphqlId, graphqlId], + ).data.map((_) => pick(_, 'graphqlHeaderId')); + + const deltaColumnOptions = { + deltaKey: 'deltaGraphqlHeaderId', + deltaParentKey: { graphqlId: deltaGraphqlId }, + deltaSchema: GraphQLHeaderDeltaCollectionSchema, + isDelta: deltaGraphqlId !== undefined, + originKey: 'graphqlHeaderId', + originSchema: GraphQLHeaderCollectionSchema, + } as const; + + const { dragAndDropHooks } = useDragAndDrop({ + getItems: (keys) => [...keys].map((key) => ({ key: key.toString() })), + onReorder: handleCollectionReorder(collection), + renderDropIndicator: () => , + }); + + return ( + + + + Key + Value + {!hideDescription && Description} + {!isReadOnly && } + + + + {({ graphqlHeaderId }) => ( + + + + + + + + + + + + + + {!hideDescription && ( + + + + )} + + {!isReadOnly && ( + + + + )} + + )} + + + {!isReadOnly && ( + + + + )} +
+ ); +}; + diff --git a/packages/client/src/pages/graphql/request/index.tsx b/packages/client/src/pages/graphql/request/index.tsx new file mode 100644 index 000000000..0dcfe6553 --- /dev/null +++ b/packages/client/src/pages/graphql/request/index.tsx @@ -0,0 +1,2 @@ +export { GraphQLRequestPanel, type GraphQLRequestPanelProps } from './panel'; +export { GraphQLTopBar, type GraphQLTopBarProps } from './top-bar'; diff --git a/packages/client/src/pages/graphql/request/panel.tsx b/packages/client/src/pages/graphql/request/panel.tsx new file mode 100644 index 000000000..35901cb37 --- /dev/null +++ b/packages/client/src/pages/graphql/request/panel.tsx @@ -0,0 +1,110 @@ +import { count, eq, or, useLiveQuery } from '@tanstack/react-db'; +import { Suspense } from 'react'; +import { Tab, TabList, TabPanel, Tabs } from 'react-aria-components'; +import { twMerge } from 'tailwind-merge'; +import { + GraphQLAssertCollectionSchema, + GraphQLHeaderCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { Spinner } from '@the-dev-tools/ui/spinner'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useApiCollection } from '~/shared/api'; +import { GraphQLAssertTable } from './assert'; +import { GraphQLHeaderTable } from './header'; +import { GraphQLQueryEditor } from './query-editor'; +import { GraphQLVariablesEditor } from './variables-editor'; + +export interface GraphQLRequestPanelProps { + className?: string; + deltaGraphqlId?: Uint8Array | undefined; + graphqlId: Uint8Array; + isReadOnly?: boolean; +} + +export const GraphQLRequestPanel = ({ + className, + deltaGraphqlId, + graphqlId, + isReadOnly = false, +}: GraphQLRequestPanelProps) => { + const headerCollection = useApiCollection(GraphQLHeaderCollectionSchema); + + const { headerCount = 0 } = + useLiveQuery( + (_) => + _.from({ item: headerCollection }) + .where((_) => or(eq(_.item.graphqlId, graphqlId), eq(_.item.graphqlId, deltaGraphqlId))) + .select((_) => ({ headerCount: count(_.item.graphqlId) })) + .findOne(), + [deltaGraphqlId, graphqlId, headerCollection], + ).data ?? {}; + + const assertCollection = useApiCollection(GraphQLAssertCollectionSchema); + + const { assertCount = 0 } = + useLiveQuery( + (_) => + _.from({ item: assertCollection }) + .where((_) => or(eq(_.item.graphqlId, graphqlId), eq(_.item.graphqlId, deltaGraphqlId))) + .select((_) => ({ assertCount: count(_.item.graphqlId) })) + .findOne(), + [assertCollection, deltaGraphqlId, graphqlId], + ).data ?? {}; + + const tabClass = ({ isSelected }: { isSelected: boolean }) => + twMerge( + tw` + -mb-px cursor-pointer border-b-2 border-transparent py-1.5 text-md leading-5 font-medium tracking-tight + text-on-neutral-low transition-colors + `, + isSelected && tw`border-b-accent text-on-neutral`, + ); + + return ( + + + + Query + + + + Variables + + + + Headers + {headerCount > 0 && ({headerCount})} + + + + Assertion + {assertCount > 0 && ({assertCount})} + + + + + + + } + > + + + + + + + + + + + + + + + + + + ); +}; diff --git a/packages/client/src/pages/graphql/request/query-editor.tsx b/packages/client/src/pages/graphql/request/query-editor.tsx new file mode 100644 index 000000000..e73993f1c --- /dev/null +++ b/packages/client/src/pages/graphql/request/query-editor.tsx @@ -0,0 +1,46 @@ +import CodeMirror from '@uiw/react-codemirror'; +import { + GraphQLCollectionSchema, + GraphQLDeltaCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useTheme } from '@the-dev-tools/ui/theme'; +import { useDeltaState } from '~/features/delta'; + +export interface GraphQLQueryEditorProps { + deltaGraphqlId?: Uint8Array | undefined; + graphqlId: Uint8Array; + isReadOnly?: boolean; +} + +export const GraphQLQueryEditor = ({ + deltaGraphqlId, + graphqlId, + isReadOnly = false, +}: GraphQLQueryEditorProps) => { + const { theme } = useTheme(); + + const deltaOptions = { + deltaId: deltaGraphqlId, + deltaSchema: GraphQLDeltaCollectionSchema, + isDelta: deltaGraphqlId !== undefined, + originId: graphqlId, + originSchema: GraphQLCollectionSchema, + valueKey: 'query', + } as const; + + const [value, setValue] = useDeltaState(deltaOptions); + + return ( + void setValue(_)} + placeholder='Enter your GraphQL query...' + readOnly={isReadOnly} + theme={theme} + value={value ?? ''} + /> + ); +}; diff --git a/packages/client/src/pages/graphql/request/top-bar.tsx b/packages/client/src/pages/graphql/request/top-bar.tsx new file mode 100644 index 000000000..19229b958 --- /dev/null +++ b/packages/client/src/pages/graphql/request/top-bar.tsx @@ -0,0 +1,131 @@ +import { Array, pipe } from 'effect'; +import { useState, useTransition } from 'react'; +import { Button as AriaButton, DialogTrigger, MenuTrigger } from 'react-aria-components'; +import { FiClock, FiMoreHorizontal } from 'react-icons/fi'; +import { GraphQLService } from '@the-dev-tools/spec/buf/api/graph_q_l/v1/graph_q_l_pb'; +import { GraphQLCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { Button } from '@the-dev-tools/ui/button'; +import { Menu, MenuItem, useContextMenuState } from '@the-dev-tools/ui/menu'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { TextInputField, useEditableTextState } from '@the-dev-tools/ui/text-field'; +import { ReferenceField } from '~/features/expression'; +import { request, useApiCollection } from '~/shared/api'; +import { routes } from '~/shared/routes'; +import { HistoryModal } from '../history'; + +export interface GraphQLTopBarProps { + deltaGraphqlId?: Uint8Array | undefined; + graphqlId: Uint8Array; +} + +export const GraphQLTopBar = ({ deltaGraphqlId, graphqlId }: GraphQLTopBarProps) => { + const { transport } = routes.root.useRouteContext(); + + const collection = useApiCollection(GraphQLCollectionSchema); + + const item = collection.get(collection.utils.getKey({ graphqlId })); + + const { menuProps, menuTriggerProps, onContextMenu } = useContextMenuState(); + + const { edit, isEditing, textFieldProps } = useEditableTextState({ + onSuccess: (_) => { + if (_ === item?.name) return; + collection.utils.update({ graphqlId, name: _ }); + }, + value: item?.name ?? '', + }); + + const [isSending, startTransition] = useTransition(); + + const [urlState, setUrlState] = useState(); + + return ( + <> +
+
+ {isEditing ? ( + + ) : ( + void edit()} + > + {item?.name} + + )} +
+ + + + + + + + + + + + void edit()}>Rename + + collection.utils.delete({ graphqlId })} variant='danger'> + Delete + + + +
+ +
+ { + if (urlState !== undefined) { + collection.utils.update({ graphqlId, url: urlState }); + } + }} + onChange={(_) => void setUrlState(_)} + value={urlState ?? item?.url ?? ''} + /> + + +
+ + ); +}; diff --git a/packages/client/src/pages/graphql/request/url.tsx b/packages/client/src/pages/graphql/request/url.tsx new file mode 100644 index 000000000..84ad75b08 --- /dev/null +++ b/packages/client/src/pages/graphql/request/url.tsx @@ -0,0 +1,37 @@ +import { eq, useLiveQuery } from '@tanstack/react-db'; +import { GraphQLCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { ReferenceField } from '~/features/expression'; +import { useApiCollection } from '~/shared/api'; +import { pick } from '~/shared/lib'; + +export interface GraphQLUrlProps { + graphqlId: Uint8Array; + isReadOnly?: boolean; +} + +export const GraphQLUrl = ({ graphqlId, isReadOnly = false }: GraphQLUrlProps) => { + const collection = useApiCollection(GraphQLCollectionSchema); + + const { url } = + useLiveQuery( + (_) => + _.from({ item: collection }) + .where((_) => eq(_.item.graphqlId, graphqlId)) + .select((_) => pick(_.item, 'url')) + .findOne(), + [collection, graphqlId], + ).data ?? {}; + + return ( +
+ +
+ ); +}; diff --git a/packages/client/src/pages/graphql/request/variables-editor.tsx b/packages/client/src/pages/graphql/request/variables-editor.tsx new file mode 100644 index 000000000..e0b8b8db5 --- /dev/null +++ b/packages/client/src/pages/graphql/request/variables-editor.tsx @@ -0,0 +1,51 @@ +import { json } from '@codemirror/lang-json'; +import CodeMirror from '@uiw/react-codemirror'; +import { useMemo } from 'react'; +import { + GraphQLCollectionSchema, + GraphQLDeltaCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useTheme } from '@the-dev-tools/ui/theme'; +import { useDeltaState } from '~/features/delta'; + +export interface GraphQLVariablesEditorProps { + deltaGraphqlId?: Uint8Array | undefined; + graphqlId: Uint8Array; + isReadOnly?: boolean; +} + +export const GraphQLVariablesEditor = ({ + deltaGraphqlId, + graphqlId, + isReadOnly = false, +}: GraphQLVariablesEditorProps) => { + const { theme } = useTheme(); + + const deltaOptions = { + deltaId: deltaGraphqlId, + deltaSchema: GraphQLDeltaCollectionSchema, + isDelta: deltaGraphqlId !== undefined, + originId: graphqlId, + originSchema: GraphQLCollectionSchema, + valueKey: 'variables', + } as const; + + const [value, setValue] = useDeltaState(deltaOptions); + + const extensions = useMemo(() => [json()], []); + + return ( + void setValue(_)} + placeholder='{"key": "value"}' + readOnly={isReadOnly} + theme={theme} + value={value ?? ''} + /> + ); +}; diff --git a/packages/client/src/pages/graphql/response/assert.tsx b/packages/client/src/pages/graphql/response/assert.tsx new file mode 100644 index 000000000..2b99ea19d --- /dev/null +++ b/packages/client/src/pages/graphql/response/assert.tsx @@ -0,0 +1,42 @@ +import { eq, useLiveQuery } from '@tanstack/react-db'; +import { Fragment } from 'react/jsx-runtime'; +import { twJoin } from 'tailwind-merge'; +import { GraphQLResponseAssertCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useApiCollection } from '~/shared/api'; +import { pick } from '~/shared/lib'; + +export interface GraphQLAssertTableProps { + graphqlResponseId: Uint8Array; +} + +export const GraphQLAssertTable = ({ graphqlResponseId }: GraphQLAssertTableProps) => { + const collection = useApiCollection(GraphQLResponseAssertCollectionSchema); + + const { data: items } = useLiveQuery( + (_) => + _.from({ item: collection }) + .where((_) => eq(_.item.graphqlResponseId, graphqlResponseId)) + .select((_) => pick(_.item, 'graphqlResponseAssertId', 'value', 'success')), + [collection, graphqlResponseId], + ); + + return ( +
+ {items.map((_) => ( + +
+ {_.success ? 'Pass' : 'Fail'} +
+ + {_.value} +
+ ))} +
+ ); +}; diff --git a/packages/client/src/pages/graphql/response/body.tsx b/packages/client/src/pages/graphql/response/body.tsx new file mode 100644 index 000000000..736f5de4c --- /dev/null +++ b/packages/client/src/pages/graphql/response/body.tsx @@ -0,0 +1,45 @@ +import { create } from '@bufbuild/protobuf'; +import { eq, useLiveQuery } from '@tanstack/react-db'; +import { useQuery } from '@tanstack/react-query'; +import CodeMirror from '@uiw/react-codemirror'; +import { GraphQLResponseSchema } from '@the-dev-tools/spec/buf/api/graph_q_l/v1/graph_q_l_pb'; +import { GraphQLResponseCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useTheme } from '@the-dev-tools/ui/theme'; +import { prettierFormatQueryOptions, useCodeMirrorLanguageExtensions } from '~/features/expression'; +import { useApiCollection } from '~/shared/api'; +import { pick } from '~/shared/lib'; + +export interface GraphQLResponseBodyProps { + graphqlResponseId: Uint8Array; +} + +export const GraphQLResponseBody = ({ graphqlResponseId }: GraphQLResponseBodyProps) => { + const { theme } = useTheme(); + const collection = useApiCollection(GraphQLResponseCollectionSchema); + + const { body } = + useLiveQuery( + (_) => + _.from({ item: collection }) + .where((_) => eq(_.item.graphqlResponseId, graphqlResponseId)) + .select((_) => pick(_.item, 'body')) + .findOne(), + [collection, graphqlResponseId], + ).data ?? create(GraphQLResponseSchema); + + const { data: prettierBody } = useQuery(prettierFormatQueryOptions({ language: 'json', text: body })); + const extensions = useCodeMirrorLanguageExtensions('json'); + + return ( + + ); +}; diff --git a/packages/client/src/pages/graphql/response/header.tsx b/packages/client/src/pages/graphql/response/header.tsx new file mode 100644 index 000000000..46dc04424 --- /dev/null +++ b/packages/client/src/pages/graphql/response/header.tsx @@ -0,0 +1,40 @@ +import { eq, useLiveQuery } from '@tanstack/react-db'; +import { GraphQLResponseHeaderCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { Table, TableBody, TableCell, TableColumn, TableHeader, TableRow } from '@the-dev-tools/ui/table'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useApiCollection } from '~/shared/api'; +import { pick } from '~/shared/lib'; + +export interface GraphQLResponseHeaderTableProps { + graphqlResponseId: Uint8Array; +} + +export const GraphQLResponseHeaderTable = ({ graphqlResponseId }: GraphQLResponseHeaderTableProps) => { + const collection = useApiCollection(GraphQLResponseHeaderCollectionSchema); + + const { data: items } = useLiveQuery( + (_) => + _.from({ item: collection }) + .where((_) => eq(_.item.graphqlResponseId, graphqlResponseId)) + .select((_) => pick(_.item, 'key', 'value')), + [collection, graphqlResponseId], + ); + + return ( + + + Key + Value + + + + {(_) => ( + + {_.key} + {_.value} + + )} + +
+ ); +}; diff --git a/packages/client/src/pages/graphql/response/index.tsx b/packages/client/src/pages/graphql/response/index.tsx new file mode 100644 index 000000000..84e19d0d0 --- /dev/null +++ b/packages/client/src/pages/graphql/response/index.tsx @@ -0,0 +1,187 @@ +import { create } from '@bufbuild/protobuf'; +import { count, eq, useLiveQuery } from '@tanstack/react-db'; +import { Duration, pipe } from 'effect'; +import { ReactNode, Suspense } from 'react'; +import { Tab, TabList, TabPanel, Tabs } from 'react-aria-components'; +import { twJoin, twMerge } from 'tailwind-merge'; +import { GraphQLResponseSchema } from '@the-dev-tools/spec/buf/api/graph_q_l/v1/graph_q_l_pb'; +import { + GraphQLResponseAssertCollectionSchema, + GraphQLResponseCollectionSchema, + GraphQLResponseHeaderCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { Separator } from '@the-dev-tools/ui/separator'; +import { Spinner } from '@the-dev-tools/ui/spinner'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { formatSize } from '@the-dev-tools/ui/utils'; +import { useApiCollection } from '~/shared/api'; +import { pick } from '~/shared/lib'; +import { GraphQLAssertTable } from './assert'; +import { GraphQLResponseBody } from './body'; +import { GraphQLResponseHeaderTable } from './header'; + +export interface GraphQLResponseInfoProps { + className?: string; + graphqlResponseId: Uint8Array; +} + +export const GraphQLResponseInfo = ({ className, graphqlResponseId }: GraphQLResponseInfoProps) => { + const responseCollection = useApiCollection(GraphQLResponseCollectionSchema); + + const { duration, size, status } = + useLiveQuery( + (_) => + _.from({ item: responseCollection }) + .where((_) => eq(_.item.graphqlResponseId, graphqlResponseId)) + .select((_) => pick(_.item, 'duration', 'size', 'status')) + .findOne(), + [responseCollection, graphqlResponseId], + ).data ?? create(GraphQLResponseSchema); + + return ( +
+
+ Status: + {status} +
+ + + +
+ Time: + {pipe(duration, Duration.millis, Duration.format)} +
+ + + +
+ Size: + {formatSize(size)} +
+
+ ); +}; + +export interface GraphQLResponsePanelProps { + children?: ReactNode; + className?: string; + fullWidth?: boolean; + graphqlResponseId: Uint8Array; +} + +export const GraphQLResponsePanel = ({ + children, + className, + fullWidth = false, + graphqlResponseId, +}: GraphQLResponsePanelProps) => { + const headerCollection = useApiCollection(GraphQLResponseHeaderCollectionSchema); + + const { headerCount = 0 } = + useLiveQuery( + (_) => + _.from({ item: headerCollection }) + .where((_) => eq(_.item.graphqlResponseId, graphqlResponseId)) + .select((_) => ({ headerCount: count(_.item.graphqlResponseHeaderId) })) + .findOne(), + [headerCollection, graphqlResponseId], + ).data ?? {}; + + const assertCollection = useApiCollection(GraphQLResponseAssertCollectionSchema); + + const { assertCount = 0 } = + useLiveQuery( + (_) => + _.from({ item: assertCollection }) + .where((_) => eq(_.item.graphqlResponseId, graphqlResponseId)) + .select((_) => ({ assertCount: count(_.item.graphqlResponseAssertId) })) + .findOne(), + [assertCollection, graphqlResponseId], + ).data ?? {}; + + return ( + +
+ + + twMerge( + tw` + -mb-px cursor-pointer border-b-2 border-transparent py-2 text-md leading-5 font-medium tracking-tight + text-on-neutral-low transition-colors + `, + isSelected && tw`border-b-accent text-on-neutral`, + ) + } + id='body' + > + Body + + + + twMerge( + tw` + -mb-px cursor-pointer border-b-2 border-transparent py-2 text-md leading-5 font-medium tracking-tight + text-on-neutral-low transition-colors + `, + isSelected && tw`border-b-accent text-on-neutral`, + ) + } + id='headers' + > + Headers + {headerCount > 0 && ({headerCount})} + + + + twMerge( + tw` + -mb-px cursor-pointer border-b-2 border-transparent py-2 text-md leading-5 font-medium tracking-tight + text-on-neutral-low transition-colors + `, + isSelected && tw`border-b-accent text-on-neutral`, + ) + } + id='assertions' + > + Assertion + {assertCount > 0 && ({assertCount})} + + + +
+ + {children} +
+ +
+ + +
+ } + > + + + + + + + + + + + + +
+
+ ); +}; diff --git a/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/delta.$deltaGraphqlIdCan.tsx b/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/delta.$deltaGraphqlIdCan.tsx new file mode 100644 index 000000000..59968f9a5 --- /dev/null +++ b/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/delta.$deltaGraphqlIdCan.tsx @@ -0,0 +1,24 @@ +import { createFileRoute } from '@tanstack/react-router'; +import { Ulid } from 'id128'; +import { openTab } from '~/widgets/tabs'; +import { GraphQLDeltaPage } from '../../../page'; +import { GraphQLTab, graphqlTabId } from '../../../tab'; + +export const Route = createFileRoute( + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan', +)({ + component: GraphQLDeltaPage, + context: ({ params: { deltaGraphqlIdCan } }) => { + const deltaGraphqlId = Ulid.fromCanonical(deltaGraphqlIdCan).bytes; + return { deltaGraphqlId }; + }, + onEnter: async (match) => { + const { deltaGraphqlId, graphqlId } = match.context; + + await openTab({ + id: graphqlTabId({ deltaGraphqlId, graphqlId }), + match, + node: , + }); + }, +}); diff --git a/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/index.tsx b/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/index.tsx new file mode 100644 index 000000000..1053d3aca --- /dev/null +++ b/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/index.tsx @@ -0,0 +1,19 @@ +import { createFileRoute } from '@tanstack/react-router'; +import { openTab } from '~/widgets/tabs'; +import { GraphQLPage } from '../../../page'; +import { GraphQLTab, graphqlTabId } from '../../../tab'; + +export const Route = createFileRoute( + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/', +)({ + component: GraphQLPage, + onEnter: async (match) => { + const { graphqlId } = match.context; + + await openTab({ + id: graphqlTabId({ graphqlId }), + match, + node: , + }); + }, +}); diff --git a/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/route.tsx b/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/route.tsx new file mode 100644 index 000000000..e61f74b47 --- /dev/null +++ b/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/route.tsx @@ -0,0 +1,11 @@ +import { createFileRoute } from '@tanstack/react-router'; +import { Ulid } from 'id128'; + +export const Route = createFileRoute( + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan', +)({ + context: ({ params: { graphqlIdCan } }) => { + const graphqlId = Ulid.fromCanonical(graphqlIdCan).bytes; + return { graphqlId }; + }, +}); diff --git a/packages/client/src/pages/graphql/tab.tsx b/packages/client/src/pages/graphql/tab.tsx new file mode 100644 index 000000000..b25dd984a --- /dev/null +++ b/packages/client/src/pages/graphql/tab.tsx @@ -0,0 +1,65 @@ +import { useLiveQuery } from '@tanstack/react-db'; +import { useEffect } from 'react'; +import { + GraphQLCollectionSchema, + GraphQLDeltaCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useDeltaState } from '~/features/delta'; +import { useApiCollection } from '~/shared/api'; +import { eqStruct } from '~/shared/lib'; +import { routes } from '~/shared/routes'; +import { useCloseTab } from '~/widgets/tabs'; + +export interface GraphQLTabProps { + deltaGraphqlId?: Uint8Array; + graphqlId: Uint8Array; +} + +export const graphqlTabId = ({ deltaGraphqlId, graphqlId }: GraphQLTabProps) => + JSON.stringify({ deltaGraphqlId, graphqlId, route: routes.dashboard.workspace.graphql.route.id }); + +export const GraphQLTab = ({ deltaGraphqlId, graphqlId }: GraphQLTabProps) => { + const closeTab = useCloseTab(); + + const graphqlCollection = useApiCollection(GraphQLCollectionSchema); + + const graphqlExists = + useLiveQuery( + (_) => _.from({ item: graphqlCollection }).where(eqStruct({ graphqlId })).findOne(), + [graphqlCollection, graphqlId], + ).data !== undefined; + + useEffect(() => { + if (!graphqlExists) void closeTab(graphqlTabId({ graphqlId })); + }, [graphqlExists, graphqlId, closeTab]); + + const deltaCollection = useApiCollection(GraphQLDeltaCollectionSchema); + + const deltaExists = + useLiveQuery( + (_) => _.from({ item: deltaCollection }).where(eqStruct({ deltaGraphqlId })).findOne(), + [deltaCollection, deltaGraphqlId], + ).data !== undefined; + + useEffect(() => { + if (deltaGraphqlId && !deltaExists) void closeTab(graphqlTabId({ deltaGraphqlId, graphqlId })); + }, [deltaExists, deltaGraphqlId, graphqlId, closeTab]); + + const deltaOptions = { + deltaId: deltaGraphqlId, + deltaSchema: GraphQLDeltaCollectionSchema, + isDelta: deltaGraphqlId !== undefined, + originId: graphqlId, + originSchema: GraphQLCollectionSchema, + }; + + const [name] = useDeltaState({ ...deltaOptions, valueKey: 'name' }); + + return ( + <> + GQL + {name} + + ); +}; diff --git a/packages/client/src/pages/workspace/routes/workspace/$workspaceIdCan/(graphql)/__virtual.ts b/packages/client/src/pages/workspace/routes/workspace/$workspaceIdCan/(graphql)/__virtual.ts new file mode 100644 index 000000000..5de4c49bd --- /dev/null +++ b/packages/client/src/pages/workspace/routes/workspace/$workspaceIdCan/(graphql)/__virtual.ts @@ -0,0 +1,3 @@ +import { resolveRoutesFrom } from '../../../../../graphql/@x/workspace'; + +export default resolveRoutesFrom(import.meta.dirname); diff --git a/packages/client/src/shared/routes.tsx b/packages/client/src/shared/routes.tsx index 1072e6b2a..80f12fc6b 100644 --- a/packages/client/src/shared/routes.tsx +++ b/packages/client/src/shared/routes.tsx @@ -16,6 +16,13 @@ export const routes = { index: getRouteApi('/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan/'), history: getRouteApi('/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan/history'), }, + graphql: { + route: getRouteApi('/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan'), + index: getRouteApi('/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/'), + delta: getRouteApi( + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan', + ), + }, http: { route: getRouteApi('/(dashboard)/(workspace)/workspace/$workspaceIdCan/(http)/http/$httpIdCan'), index: getRouteApi('/(dashboard)/(workspace)/workspace/$workspaceIdCan/(http)/http/$httpIdCan/'), diff --git a/packages/db/pkg/sqlc/gen/db.go b/packages/db/pkg/sqlc/gen/db.go index a109944ac..b5c84d0bd 100644 --- a/packages/db/pkg/sqlc/gen/db.go +++ b/packages/db/pkg/sqlc/gen/db.go @@ -39,6 +39,9 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.cleanupOrphanedFlowNodeForEachStmt, err = db.PrepareContext(ctx, cleanupOrphanedFlowNodeForEach); err != nil { return nil, fmt.Errorf("error preparing query CleanupOrphanedFlowNodeForEach: %w", err) } + if q.cleanupOrphanedFlowNodeGraphQLStmt, err = db.PrepareContext(ctx, cleanupOrphanedFlowNodeGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query CleanupOrphanedFlowNodeGraphQL: %w", err) + } if q.cleanupOrphanedFlowNodeHttpStmt, err = db.PrepareContext(ctx, cleanupOrphanedFlowNodeHttp); err != nil { return nil, fmt.Errorf("error preparing query CleanupOrphanedFlowNodeHttp: %w", err) } @@ -90,6 +93,9 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.createFlowNodeForEachStmt, err = db.PrepareContext(ctx, createFlowNodeForEach); err != nil { return nil, fmt.Errorf("error preparing query CreateFlowNodeForEach: %w", err) } + if q.createFlowNodeGraphQLStmt, err = db.PrepareContext(ctx, createFlowNodeGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query CreateFlowNodeGraphQL: %w", err) + } if q.createFlowNodeHTTPStmt, err = db.PrepareContext(ctx, createFlowNodeHTTP); err != nil { return nil, fmt.Errorf("error preparing query CreateFlowNodeHTTP: %w", err) } @@ -117,6 +123,30 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.createFlowsBulkStmt, err = db.PrepareContext(ctx, createFlowsBulk); err != nil { return nil, fmt.Errorf("error preparing query CreateFlowsBulk: %w", err) } + if q.createGraphQLStmt, err = db.PrepareContext(ctx, createGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query CreateGraphQL: %w", err) + } + if q.createGraphQLAssertStmt, err = db.PrepareContext(ctx, createGraphQLAssert); err != nil { + return nil, fmt.Errorf("error preparing query CreateGraphQLAssert: %w", err) + } + if q.createGraphQLHeaderStmt, err = db.PrepareContext(ctx, createGraphQLHeader); err != nil { + return nil, fmt.Errorf("error preparing query CreateGraphQLHeader: %w", err) + } + if q.createGraphQLResponseStmt, err = db.PrepareContext(ctx, createGraphQLResponse); err != nil { + return nil, fmt.Errorf("error preparing query CreateGraphQLResponse: %w", err) + } + if q.createGraphQLResponseAssertStmt, err = db.PrepareContext(ctx, createGraphQLResponseAssert); err != nil { + return nil, fmt.Errorf("error preparing query CreateGraphQLResponseAssert: %w", err) + } + if q.createGraphQLResponseHeaderStmt, err = db.PrepareContext(ctx, createGraphQLResponseHeader); err != nil { + return nil, fmt.Errorf("error preparing query CreateGraphQLResponseHeader: %w", err) + } + if q.createGraphQLResponseHeaderBulkStmt, err = db.PrepareContext(ctx, createGraphQLResponseHeaderBulk); err != nil { + return nil, fmt.Errorf("error preparing query CreateGraphQLResponseHeaderBulk: %w", err) + } + if q.createGraphQLVersionStmt, err = db.PrepareContext(ctx, createGraphQLVersion); err != nil { + return nil, fmt.Errorf("error preparing query CreateGraphQLVersion: %w", err) + } if q.createHTTPStmt, err = db.PrepareContext(ctx, createHTTP); err != nil { return nil, fmt.Errorf("error preparing query CreateHTTP: %w", err) } @@ -231,6 +261,9 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.deleteFlowNodeForEachStmt, err = db.PrepareContext(ctx, deleteFlowNodeForEach); err != nil { return nil, fmt.Errorf("error preparing query DeleteFlowNodeForEach: %w", err) } + if q.deleteFlowNodeGraphQLStmt, err = db.PrepareContext(ctx, deleteFlowNodeGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query DeleteFlowNodeGraphQL: %w", err) + } if q.deleteFlowNodeHTTPStmt, err = db.PrepareContext(ctx, deleteFlowNodeHTTP); err != nil { return nil, fmt.Errorf("error preparing query DeleteFlowNodeHTTP: %w", err) } @@ -246,6 +279,21 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.deleteFlowVariableStmt, err = db.PrepareContext(ctx, deleteFlowVariable); err != nil { return nil, fmt.Errorf("error preparing query DeleteFlowVariable: %w", err) } + if q.deleteGraphQLStmt, err = db.PrepareContext(ctx, deleteGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query DeleteGraphQL: %w", err) + } + if q.deleteGraphQLAssertStmt, err = db.PrepareContext(ctx, deleteGraphQLAssert); err != nil { + return nil, fmt.Errorf("error preparing query DeleteGraphQLAssert: %w", err) + } + if q.deleteGraphQLHeaderStmt, err = db.PrepareContext(ctx, deleteGraphQLHeader); err != nil { + return nil, fmt.Errorf("error preparing query DeleteGraphQLHeader: %w", err) + } + if q.deleteGraphQLResponseStmt, err = db.PrepareContext(ctx, deleteGraphQLResponse); err != nil { + return nil, fmt.Errorf("error preparing query DeleteGraphQLResponse: %w", err) + } + if q.deleteGraphQLResponseHeaderStmt, err = db.PrepareContext(ctx, deleteGraphQLResponseHeader); err != nil { + return nil, fmt.Errorf("error preparing query DeleteGraphQLResponseHeader: %w", err) + } if q.deleteHTTPStmt, err = db.PrepareContext(ctx, deleteHTTP); err != nil { return nil, fmt.Errorf("error preparing query DeleteHTTP: %w", err) } @@ -402,6 +450,9 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.getFlowNodeForEachStmt, err = db.PrepareContext(ctx, getFlowNodeForEach); err != nil { return nil, fmt.Errorf("error preparing query GetFlowNodeForEach: %w", err) } + if q.getFlowNodeGraphQLStmt, err = db.PrepareContext(ctx, getFlowNodeGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query GetFlowNodeGraphQL: %w", err) + } if q.getFlowNodeHTTPStmt, err = db.PrepareContext(ctx, getFlowNodeHTTP); err != nil { return nil, fmt.Errorf("error preparing query GetFlowNodeHTTP: %w", err) } @@ -444,6 +495,72 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.getFlowsByWorkspaceIDStmt, err = db.PrepareContext(ctx, getFlowsByWorkspaceID); err != nil { return nil, fmt.Errorf("error preparing query GetFlowsByWorkspaceID: %w", err) } + if q.getGraphQLStmt, err = db.PrepareContext(ctx, getGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQL: %w", err) + } + if q.getGraphQLAssertStmt, err = db.PrepareContext(ctx, getGraphQLAssert); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLAssert: %w", err) + } + if q.getGraphQLAssertDeltasByParentIDStmt, err = db.PrepareContext(ctx, getGraphQLAssertDeltasByParentID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLAssertDeltasByParentID: %w", err) + } + if q.getGraphQLAssertDeltasByWorkspaceIDStmt, err = db.PrepareContext(ctx, getGraphQLAssertDeltasByWorkspaceID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLAssertDeltasByWorkspaceID: %w", err) + } + if q.getGraphQLAssertsByGraphQLIDStmt, err = db.PrepareContext(ctx, getGraphQLAssertsByGraphQLID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLAssertsByGraphQLID: %w", err) + } + if q.getGraphQLAssertsByIDsStmt, err = db.PrepareContext(ctx, getGraphQLAssertsByIDs); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLAssertsByIDs: %w", err) + } + if q.getGraphQLDeltasByParentIDStmt, err = db.PrepareContext(ctx, getGraphQLDeltasByParentID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLDeltasByParentID: %w", err) + } + if q.getGraphQLDeltasByWorkspaceIDStmt, err = db.PrepareContext(ctx, getGraphQLDeltasByWorkspaceID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLDeltasByWorkspaceID: %w", err) + } + if q.getGraphQLHeaderDeltasByParentIDStmt, err = db.PrepareContext(ctx, getGraphQLHeaderDeltasByParentID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLHeaderDeltasByParentID: %w", err) + } + if q.getGraphQLHeaderDeltasByWorkspaceIDStmt, err = db.PrepareContext(ctx, getGraphQLHeaderDeltasByWorkspaceID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLHeaderDeltasByWorkspaceID: %w", err) + } + if q.getGraphQLHeadersStmt, err = db.PrepareContext(ctx, getGraphQLHeaders); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLHeaders: %w", err) + } + if q.getGraphQLHeadersByIDsStmt, err = db.PrepareContext(ctx, getGraphQLHeadersByIDs); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLHeadersByIDs: %w", err) + } + if q.getGraphQLResponseStmt, err = db.PrepareContext(ctx, getGraphQLResponse); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLResponse: %w", err) + } + if q.getGraphQLResponseAssertsByResponseIDStmt, err = db.PrepareContext(ctx, getGraphQLResponseAssertsByResponseID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLResponseAssertsByResponseID: %w", err) + } + if q.getGraphQLResponseAssertsByWorkspaceIDStmt, err = db.PrepareContext(ctx, getGraphQLResponseAssertsByWorkspaceID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLResponseAssertsByWorkspaceID: %w", err) + } + if q.getGraphQLResponseHeadersByResponseIDStmt, err = db.PrepareContext(ctx, getGraphQLResponseHeadersByResponseID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLResponseHeadersByResponseID: %w", err) + } + if q.getGraphQLResponseHeadersByWorkspaceIDStmt, err = db.PrepareContext(ctx, getGraphQLResponseHeadersByWorkspaceID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLResponseHeadersByWorkspaceID: %w", err) + } + if q.getGraphQLResponsesByGraphQLIDStmt, err = db.PrepareContext(ctx, getGraphQLResponsesByGraphQLID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLResponsesByGraphQLID: %w", err) + } + if q.getGraphQLResponsesByWorkspaceIDStmt, err = db.PrepareContext(ctx, getGraphQLResponsesByWorkspaceID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLResponsesByWorkspaceID: %w", err) + } + if q.getGraphQLVersionsByGraphQLIDStmt, err = db.PrepareContext(ctx, getGraphQLVersionsByGraphQLID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLVersionsByGraphQLID: %w", err) + } + if q.getGraphQLWorkspaceIDStmt, err = db.PrepareContext(ctx, getGraphQLWorkspaceID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLWorkspaceID: %w", err) + } + if q.getGraphQLsByWorkspaceIDStmt, err = db.PrepareContext(ctx, getGraphQLsByWorkspaceID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLsByWorkspaceID: %w", err) + } if q.getHTTPStmt, err = db.PrepareContext(ctx, getHTTP); err != nil { return nil, fmt.Errorf("error preparing query GetHTTP: %w", err) } @@ -738,6 +855,9 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.updateFlowNodeForEachStmt, err = db.PrepareContext(ctx, updateFlowNodeForEach); err != nil { return nil, fmt.Errorf("error preparing query UpdateFlowNodeForEach: %w", err) } + if q.updateFlowNodeGraphQLStmt, err = db.PrepareContext(ctx, updateFlowNodeGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query UpdateFlowNodeGraphQL: %w", err) + } if q.updateFlowNodeHTTPStmt, err = db.PrepareContext(ctx, updateFlowNodeHTTP); err != nil { return nil, fmt.Errorf("error preparing query UpdateFlowNodeHTTP: %w", err) } @@ -759,6 +879,21 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.updateFlowVariableOrderStmt, err = db.PrepareContext(ctx, updateFlowVariableOrder); err != nil { return nil, fmt.Errorf("error preparing query UpdateFlowVariableOrder: %w", err) } + if q.updateGraphQLStmt, err = db.PrepareContext(ctx, updateGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query UpdateGraphQL: %w", err) + } + if q.updateGraphQLAssertStmt, err = db.PrepareContext(ctx, updateGraphQLAssert); err != nil { + return nil, fmt.Errorf("error preparing query UpdateGraphQLAssert: %w", err) + } + if q.updateGraphQLAssertDeltaStmt, err = db.PrepareContext(ctx, updateGraphQLAssertDelta); err != nil { + return nil, fmt.Errorf("error preparing query UpdateGraphQLAssertDelta: %w", err) + } + if q.updateGraphQLDeltaStmt, err = db.PrepareContext(ctx, updateGraphQLDelta); err != nil { + return nil, fmt.Errorf("error preparing query UpdateGraphQLDelta: %w", err) + } + if q.updateGraphQLHeaderStmt, err = db.PrepareContext(ctx, updateGraphQLHeader); err != nil { + return nil, fmt.Errorf("error preparing query UpdateGraphQLHeader: %w", err) + } if q.updateHTTPStmt, err = db.PrepareContext(ctx, updateHTTP); err != nil { return nil, fmt.Errorf("error preparing query UpdateHTTP: %w", err) } @@ -879,6 +1014,11 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing cleanupOrphanedFlowNodeForEachStmt: %w", cerr) } } + if q.cleanupOrphanedFlowNodeGraphQLStmt != nil { + if cerr := q.cleanupOrphanedFlowNodeGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing cleanupOrphanedFlowNodeGraphQLStmt: %w", cerr) + } + } if q.cleanupOrphanedFlowNodeHttpStmt != nil { if cerr := q.cleanupOrphanedFlowNodeHttpStmt.Close(); cerr != nil { err = fmt.Errorf("error closing cleanupOrphanedFlowNodeHttpStmt: %w", cerr) @@ -964,6 +1104,11 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing createFlowNodeForEachStmt: %w", cerr) } } + if q.createFlowNodeGraphQLStmt != nil { + if cerr := q.createFlowNodeGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createFlowNodeGraphQLStmt: %w", cerr) + } + } if q.createFlowNodeHTTPStmt != nil { if cerr := q.createFlowNodeHTTPStmt.Close(); cerr != nil { err = fmt.Errorf("error closing createFlowNodeHTTPStmt: %w", cerr) @@ -1009,6 +1154,46 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing createFlowsBulkStmt: %w", cerr) } } + if q.createGraphQLStmt != nil { + if cerr := q.createGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createGraphQLStmt: %w", cerr) + } + } + if q.createGraphQLAssertStmt != nil { + if cerr := q.createGraphQLAssertStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createGraphQLAssertStmt: %w", cerr) + } + } + if q.createGraphQLHeaderStmt != nil { + if cerr := q.createGraphQLHeaderStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createGraphQLHeaderStmt: %w", cerr) + } + } + if q.createGraphQLResponseStmt != nil { + if cerr := q.createGraphQLResponseStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createGraphQLResponseStmt: %w", cerr) + } + } + if q.createGraphQLResponseAssertStmt != nil { + if cerr := q.createGraphQLResponseAssertStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createGraphQLResponseAssertStmt: %w", cerr) + } + } + if q.createGraphQLResponseHeaderStmt != nil { + if cerr := q.createGraphQLResponseHeaderStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createGraphQLResponseHeaderStmt: %w", cerr) + } + } + if q.createGraphQLResponseHeaderBulkStmt != nil { + if cerr := q.createGraphQLResponseHeaderBulkStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createGraphQLResponseHeaderBulkStmt: %w", cerr) + } + } + if q.createGraphQLVersionStmt != nil { + if cerr := q.createGraphQLVersionStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createGraphQLVersionStmt: %w", cerr) + } + } if q.createHTTPStmt != nil { if cerr := q.createHTTPStmt.Close(); cerr != nil { err = fmt.Errorf("error closing createHTTPStmt: %w", cerr) @@ -1199,6 +1384,11 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing deleteFlowNodeForEachStmt: %w", cerr) } } + if q.deleteFlowNodeGraphQLStmt != nil { + if cerr := q.deleteFlowNodeGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing deleteFlowNodeGraphQLStmt: %w", cerr) + } + } if q.deleteFlowNodeHTTPStmt != nil { if cerr := q.deleteFlowNodeHTTPStmt.Close(); cerr != nil { err = fmt.Errorf("error closing deleteFlowNodeHTTPStmt: %w", cerr) @@ -1224,6 +1414,31 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing deleteFlowVariableStmt: %w", cerr) } } + if q.deleteGraphQLStmt != nil { + if cerr := q.deleteGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing deleteGraphQLStmt: %w", cerr) + } + } + if q.deleteGraphQLAssertStmt != nil { + if cerr := q.deleteGraphQLAssertStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing deleteGraphQLAssertStmt: %w", cerr) + } + } + if q.deleteGraphQLHeaderStmt != nil { + if cerr := q.deleteGraphQLHeaderStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing deleteGraphQLHeaderStmt: %w", cerr) + } + } + if q.deleteGraphQLResponseStmt != nil { + if cerr := q.deleteGraphQLResponseStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing deleteGraphQLResponseStmt: %w", cerr) + } + } + if q.deleteGraphQLResponseHeaderStmt != nil { + if cerr := q.deleteGraphQLResponseHeaderStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing deleteGraphQLResponseHeaderStmt: %w", cerr) + } + } if q.deleteHTTPStmt != nil { if cerr := q.deleteHTTPStmt.Close(); cerr != nil { err = fmt.Errorf("error closing deleteHTTPStmt: %w", cerr) @@ -1484,6 +1699,11 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing getFlowNodeForEachStmt: %w", cerr) } } + if q.getFlowNodeGraphQLStmt != nil { + if cerr := q.getFlowNodeGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getFlowNodeGraphQLStmt: %w", cerr) + } + } if q.getFlowNodeHTTPStmt != nil { if cerr := q.getFlowNodeHTTPStmt.Close(); cerr != nil { err = fmt.Errorf("error closing getFlowNodeHTTPStmt: %w", cerr) @@ -1554,6 +1774,116 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing getFlowsByWorkspaceIDStmt: %w", cerr) } } + if q.getGraphQLStmt != nil { + if cerr := q.getGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLStmt: %w", cerr) + } + } + if q.getGraphQLAssertStmt != nil { + if cerr := q.getGraphQLAssertStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLAssertStmt: %w", cerr) + } + } + if q.getGraphQLAssertDeltasByParentIDStmt != nil { + if cerr := q.getGraphQLAssertDeltasByParentIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLAssertDeltasByParentIDStmt: %w", cerr) + } + } + if q.getGraphQLAssertDeltasByWorkspaceIDStmt != nil { + if cerr := q.getGraphQLAssertDeltasByWorkspaceIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLAssertDeltasByWorkspaceIDStmt: %w", cerr) + } + } + if q.getGraphQLAssertsByGraphQLIDStmt != nil { + if cerr := q.getGraphQLAssertsByGraphQLIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLAssertsByGraphQLIDStmt: %w", cerr) + } + } + if q.getGraphQLAssertsByIDsStmt != nil { + if cerr := q.getGraphQLAssertsByIDsStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLAssertsByIDsStmt: %w", cerr) + } + } + if q.getGraphQLDeltasByParentIDStmt != nil { + if cerr := q.getGraphQLDeltasByParentIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLDeltasByParentIDStmt: %w", cerr) + } + } + if q.getGraphQLDeltasByWorkspaceIDStmt != nil { + if cerr := q.getGraphQLDeltasByWorkspaceIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLDeltasByWorkspaceIDStmt: %w", cerr) + } + } + if q.getGraphQLHeaderDeltasByParentIDStmt != nil { + if cerr := q.getGraphQLHeaderDeltasByParentIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLHeaderDeltasByParentIDStmt: %w", cerr) + } + } + if q.getGraphQLHeaderDeltasByWorkspaceIDStmt != nil { + if cerr := q.getGraphQLHeaderDeltasByWorkspaceIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLHeaderDeltasByWorkspaceIDStmt: %w", cerr) + } + } + if q.getGraphQLHeadersStmt != nil { + if cerr := q.getGraphQLHeadersStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLHeadersStmt: %w", cerr) + } + } + if q.getGraphQLHeadersByIDsStmt != nil { + if cerr := q.getGraphQLHeadersByIDsStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLHeadersByIDsStmt: %w", cerr) + } + } + if q.getGraphQLResponseStmt != nil { + if cerr := q.getGraphQLResponseStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLResponseStmt: %w", cerr) + } + } + if q.getGraphQLResponseAssertsByResponseIDStmt != nil { + if cerr := q.getGraphQLResponseAssertsByResponseIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLResponseAssertsByResponseIDStmt: %w", cerr) + } + } + if q.getGraphQLResponseAssertsByWorkspaceIDStmt != nil { + if cerr := q.getGraphQLResponseAssertsByWorkspaceIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLResponseAssertsByWorkspaceIDStmt: %w", cerr) + } + } + if q.getGraphQLResponseHeadersByResponseIDStmt != nil { + if cerr := q.getGraphQLResponseHeadersByResponseIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLResponseHeadersByResponseIDStmt: %w", cerr) + } + } + if q.getGraphQLResponseHeadersByWorkspaceIDStmt != nil { + if cerr := q.getGraphQLResponseHeadersByWorkspaceIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLResponseHeadersByWorkspaceIDStmt: %w", cerr) + } + } + if q.getGraphQLResponsesByGraphQLIDStmt != nil { + if cerr := q.getGraphQLResponsesByGraphQLIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLResponsesByGraphQLIDStmt: %w", cerr) + } + } + if q.getGraphQLResponsesByWorkspaceIDStmt != nil { + if cerr := q.getGraphQLResponsesByWorkspaceIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLResponsesByWorkspaceIDStmt: %w", cerr) + } + } + if q.getGraphQLVersionsByGraphQLIDStmt != nil { + if cerr := q.getGraphQLVersionsByGraphQLIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLVersionsByGraphQLIDStmt: %w", cerr) + } + } + if q.getGraphQLWorkspaceIDStmt != nil { + if cerr := q.getGraphQLWorkspaceIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLWorkspaceIDStmt: %w", cerr) + } + } + if q.getGraphQLsByWorkspaceIDStmt != nil { + if cerr := q.getGraphQLsByWorkspaceIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLsByWorkspaceIDStmt: %w", cerr) + } + } if q.getHTTPStmt != nil { if cerr := q.getHTTPStmt.Close(); cerr != nil { err = fmt.Errorf("error closing getHTTPStmt: %w", cerr) @@ -2044,6 +2374,11 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing updateFlowNodeForEachStmt: %w", cerr) } } + if q.updateFlowNodeGraphQLStmt != nil { + if cerr := q.updateFlowNodeGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing updateFlowNodeGraphQLStmt: %w", cerr) + } + } if q.updateFlowNodeHTTPStmt != nil { if cerr := q.updateFlowNodeHTTPStmt.Close(); cerr != nil { err = fmt.Errorf("error closing updateFlowNodeHTTPStmt: %w", cerr) @@ -2079,6 +2414,31 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing updateFlowVariableOrderStmt: %w", cerr) } } + if q.updateGraphQLStmt != nil { + if cerr := q.updateGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing updateGraphQLStmt: %w", cerr) + } + } + if q.updateGraphQLAssertStmt != nil { + if cerr := q.updateGraphQLAssertStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing updateGraphQLAssertStmt: %w", cerr) + } + } + if q.updateGraphQLAssertDeltaStmt != nil { + if cerr := q.updateGraphQLAssertDeltaStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing updateGraphQLAssertDeltaStmt: %w", cerr) + } + } + if q.updateGraphQLDeltaStmt != nil { + if cerr := q.updateGraphQLDeltaStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing updateGraphQLDeltaStmt: %w", cerr) + } + } + if q.updateGraphQLHeaderStmt != nil { + if cerr := q.updateGraphQLHeaderStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing updateGraphQLHeaderStmt: %w", cerr) + } + } if q.updateHTTPStmt != nil { if cerr := q.updateHTTPStmt.Close(); cerr != nil { err = fmt.Errorf("error closing updateHTTPStmt: %w", cerr) @@ -2273,6 +2633,7 @@ type Queries struct { cleanupOrphanedFlowNodeConditionStmt *sql.Stmt cleanupOrphanedFlowNodeForStmt *sql.Stmt cleanupOrphanedFlowNodeForEachStmt *sql.Stmt + cleanupOrphanedFlowNodeGraphQLStmt *sql.Stmt cleanupOrphanedFlowNodeHttpStmt *sql.Stmt cleanupOrphanedFlowNodeJsStmt *sql.Stmt cleanupOrphanedNodeExecutionsStmt *sql.Stmt @@ -2290,6 +2651,7 @@ type Queries struct { createFlowNodeConditionStmt *sql.Stmt createFlowNodeForStmt *sql.Stmt createFlowNodeForEachStmt *sql.Stmt + createFlowNodeGraphQLStmt *sql.Stmt createFlowNodeHTTPStmt *sql.Stmt createFlowNodeJsStmt *sql.Stmt createFlowNodeMemoryStmt *sql.Stmt @@ -2299,6 +2661,14 @@ type Queries struct { createFlowVariableStmt *sql.Stmt createFlowVariableBulkStmt *sql.Stmt createFlowsBulkStmt *sql.Stmt + createGraphQLStmt *sql.Stmt + createGraphQLAssertStmt *sql.Stmt + createGraphQLHeaderStmt *sql.Stmt + createGraphQLResponseStmt *sql.Stmt + createGraphQLResponseAssertStmt *sql.Stmt + createGraphQLResponseHeaderStmt *sql.Stmt + createGraphQLResponseHeaderBulkStmt *sql.Stmt + createGraphQLVersionStmt *sql.Stmt createHTTPStmt *sql.Stmt createHTTPAssertStmt *sql.Stmt createHTTPAssertBulkStmt *sql.Stmt @@ -2337,11 +2707,17 @@ type Queries struct { deleteFlowNodeConditionStmt *sql.Stmt deleteFlowNodeForStmt *sql.Stmt deleteFlowNodeForEachStmt *sql.Stmt + deleteFlowNodeGraphQLStmt *sql.Stmt deleteFlowNodeHTTPStmt *sql.Stmt deleteFlowNodeJsStmt *sql.Stmt deleteFlowNodeMemoryStmt *sql.Stmt deleteFlowTagStmt *sql.Stmt deleteFlowVariableStmt *sql.Stmt + deleteGraphQLStmt *sql.Stmt + deleteGraphQLAssertStmt *sql.Stmt + deleteGraphQLHeaderStmt *sql.Stmt + deleteGraphQLResponseStmt *sql.Stmt + deleteGraphQLResponseHeaderStmt *sql.Stmt deleteHTTPStmt *sql.Stmt deleteHTTPAssertStmt *sql.Stmt deleteHTTPBodyFormStmt *sql.Stmt @@ -2394,6 +2770,7 @@ type Queries struct { getFlowNodeConditionStmt *sql.Stmt getFlowNodeForStmt *sql.Stmt getFlowNodeForEachStmt *sql.Stmt + getFlowNodeGraphQLStmt *sql.Stmt getFlowNodeHTTPStmt *sql.Stmt getFlowNodeJsStmt *sql.Stmt getFlowNodeMemoryStmt *sql.Stmt @@ -2408,6 +2785,28 @@ type Queries struct { getFlowVariablesByFlowIDsStmt *sql.Stmt getFlowsByVersionParentIDStmt *sql.Stmt getFlowsByWorkspaceIDStmt *sql.Stmt + getGraphQLStmt *sql.Stmt + getGraphQLAssertStmt *sql.Stmt + getGraphQLAssertDeltasByParentIDStmt *sql.Stmt + getGraphQLAssertDeltasByWorkspaceIDStmt *sql.Stmt + getGraphQLAssertsByGraphQLIDStmt *sql.Stmt + getGraphQLAssertsByIDsStmt *sql.Stmt + getGraphQLDeltasByParentIDStmt *sql.Stmt + getGraphQLDeltasByWorkspaceIDStmt *sql.Stmt + getGraphQLHeaderDeltasByParentIDStmt *sql.Stmt + getGraphQLHeaderDeltasByWorkspaceIDStmt *sql.Stmt + getGraphQLHeadersStmt *sql.Stmt + getGraphQLHeadersByIDsStmt *sql.Stmt + getGraphQLResponseStmt *sql.Stmt + getGraphQLResponseAssertsByResponseIDStmt *sql.Stmt + getGraphQLResponseAssertsByWorkspaceIDStmt *sql.Stmt + getGraphQLResponseHeadersByResponseIDStmt *sql.Stmt + getGraphQLResponseHeadersByWorkspaceIDStmt *sql.Stmt + getGraphQLResponsesByGraphQLIDStmt *sql.Stmt + getGraphQLResponsesByWorkspaceIDStmt *sql.Stmt + getGraphQLVersionsByGraphQLIDStmt *sql.Stmt + getGraphQLWorkspaceIDStmt *sql.Stmt + getGraphQLsByWorkspaceIDStmt *sql.Stmt getHTTPStmt *sql.Stmt getHTTPAssertStmt *sql.Stmt getHTTPAssertsByHttpIDStmt *sql.Stmt @@ -2506,6 +2905,7 @@ type Queries struct { updateFlowNodeConditionStmt *sql.Stmt updateFlowNodeForStmt *sql.Stmt updateFlowNodeForEachStmt *sql.Stmt + updateFlowNodeGraphQLStmt *sql.Stmt updateFlowNodeHTTPStmt *sql.Stmt updateFlowNodeIDMappingStmt *sql.Stmt updateFlowNodeJsStmt *sql.Stmt @@ -2513,6 +2913,11 @@ type Queries struct { updateFlowNodeStateStmt *sql.Stmt updateFlowVariableStmt *sql.Stmt updateFlowVariableOrderStmt *sql.Stmt + updateGraphQLStmt *sql.Stmt + updateGraphQLAssertStmt *sql.Stmt + updateGraphQLAssertDeltaStmt *sql.Stmt + updateGraphQLDeltaStmt *sql.Stmt + updateGraphQLHeaderStmt *sql.Stmt updateHTTPStmt *sql.Stmt updateHTTPAssertStmt *sql.Stmt updateHTTPAssertDeltaStmt *sql.Stmt @@ -2554,6 +2959,7 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { cleanupOrphanedFlowNodeConditionStmt: q.cleanupOrphanedFlowNodeConditionStmt, cleanupOrphanedFlowNodeForStmt: q.cleanupOrphanedFlowNodeForStmt, cleanupOrphanedFlowNodeForEachStmt: q.cleanupOrphanedFlowNodeForEachStmt, + cleanupOrphanedFlowNodeGraphQLStmt: q.cleanupOrphanedFlowNodeGraphQLStmt, cleanupOrphanedFlowNodeHttpStmt: q.cleanupOrphanedFlowNodeHttpStmt, cleanupOrphanedFlowNodeJsStmt: q.cleanupOrphanedFlowNodeJsStmt, cleanupOrphanedNodeExecutionsStmt: q.cleanupOrphanedNodeExecutionsStmt, @@ -2571,6 +2977,7 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { createFlowNodeConditionStmt: q.createFlowNodeConditionStmt, createFlowNodeForStmt: q.createFlowNodeForStmt, createFlowNodeForEachStmt: q.createFlowNodeForEachStmt, + createFlowNodeGraphQLStmt: q.createFlowNodeGraphQLStmt, createFlowNodeHTTPStmt: q.createFlowNodeHTTPStmt, createFlowNodeJsStmt: q.createFlowNodeJsStmt, createFlowNodeMemoryStmt: q.createFlowNodeMemoryStmt, @@ -2580,6 +2987,14 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { createFlowVariableStmt: q.createFlowVariableStmt, createFlowVariableBulkStmt: q.createFlowVariableBulkStmt, createFlowsBulkStmt: q.createFlowsBulkStmt, + createGraphQLStmt: q.createGraphQLStmt, + createGraphQLAssertStmt: q.createGraphQLAssertStmt, + createGraphQLHeaderStmt: q.createGraphQLHeaderStmt, + createGraphQLResponseStmt: q.createGraphQLResponseStmt, + createGraphQLResponseAssertStmt: q.createGraphQLResponseAssertStmt, + createGraphQLResponseHeaderStmt: q.createGraphQLResponseHeaderStmt, + createGraphQLResponseHeaderBulkStmt: q.createGraphQLResponseHeaderBulkStmt, + createGraphQLVersionStmt: q.createGraphQLVersionStmt, createHTTPStmt: q.createHTTPStmt, createHTTPAssertStmt: q.createHTTPAssertStmt, createHTTPAssertBulkStmt: q.createHTTPAssertBulkStmt, @@ -2618,11 +3033,17 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { deleteFlowNodeConditionStmt: q.deleteFlowNodeConditionStmt, deleteFlowNodeForStmt: q.deleteFlowNodeForStmt, deleteFlowNodeForEachStmt: q.deleteFlowNodeForEachStmt, + deleteFlowNodeGraphQLStmt: q.deleteFlowNodeGraphQLStmt, deleteFlowNodeHTTPStmt: q.deleteFlowNodeHTTPStmt, deleteFlowNodeJsStmt: q.deleteFlowNodeJsStmt, deleteFlowNodeMemoryStmt: q.deleteFlowNodeMemoryStmt, deleteFlowTagStmt: q.deleteFlowTagStmt, deleteFlowVariableStmt: q.deleteFlowVariableStmt, + deleteGraphQLStmt: q.deleteGraphQLStmt, + deleteGraphQLAssertStmt: q.deleteGraphQLAssertStmt, + deleteGraphQLHeaderStmt: q.deleteGraphQLHeaderStmt, + deleteGraphQLResponseStmt: q.deleteGraphQLResponseStmt, + deleteGraphQLResponseHeaderStmt: q.deleteGraphQLResponseHeaderStmt, deleteHTTPStmt: q.deleteHTTPStmt, deleteHTTPAssertStmt: q.deleteHTTPAssertStmt, deleteHTTPBodyFormStmt: q.deleteHTTPBodyFormStmt, @@ -2675,6 +3096,7 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { getFlowNodeConditionStmt: q.getFlowNodeConditionStmt, getFlowNodeForStmt: q.getFlowNodeForStmt, getFlowNodeForEachStmt: q.getFlowNodeForEachStmt, + getFlowNodeGraphQLStmt: q.getFlowNodeGraphQLStmt, getFlowNodeHTTPStmt: q.getFlowNodeHTTPStmt, getFlowNodeJsStmt: q.getFlowNodeJsStmt, getFlowNodeMemoryStmt: q.getFlowNodeMemoryStmt, @@ -2689,6 +3111,28 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { getFlowVariablesByFlowIDsStmt: q.getFlowVariablesByFlowIDsStmt, getFlowsByVersionParentIDStmt: q.getFlowsByVersionParentIDStmt, getFlowsByWorkspaceIDStmt: q.getFlowsByWorkspaceIDStmt, + getGraphQLStmt: q.getGraphQLStmt, + getGraphQLAssertStmt: q.getGraphQLAssertStmt, + getGraphQLAssertDeltasByParentIDStmt: q.getGraphQLAssertDeltasByParentIDStmt, + getGraphQLAssertDeltasByWorkspaceIDStmt: q.getGraphQLAssertDeltasByWorkspaceIDStmt, + getGraphQLAssertsByGraphQLIDStmt: q.getGraphQLAssertsByGraphQLIDStmt, + getGraphQLAssertsByIDsStmt: q.getGraphQLAssertsByIDsStmt, + getGraphQLDeltasByParentIDStmt: q.getGraphQLDeltasByParentIDStmt, + getGraphQLDeltasByWorkspaceIDStmt: q.getGraphQLDeltasByWorkspaceIDStmt, + getGraphQLHeaderDeltasByParentIDStmt: q.getGraphQLHeaderDeltasByParentIDStmt, + getGraphQLHeaderDeltasByWorkspaceIDStmt: q.getGraphQLHeaderDeltasByWorkspaceIDStmt, + getGraphQLHeadersStmt: q.getGraphQLHeadersStmt, + getGraphQLHeadersByIDsStmt: q.getGraphQLHeadersByIDsStmt, + getGraphQLResponseStmt: q.getGraphQLResponseStmt, + getGraphQLResponseAssertsByResponseIDStmt: q.getGraphQLResponseAssertsByResponseIDStmt, + getGraphQLResponseAssertsByWorkspaceIDStmt: q.getGraphQLResponseAssertsByWorkspaceIDStmt, + getGraphQLResponseHeadersByResponseIDStmt: q.getGraphQLResponseHeadersByResponseIDStmt, + getGraphQLResponseHeadersByWorkspaceIDStmt: q.getGraphQLResponseHeadersByWorkspaceIDStmt, + getGraphQLResponsesByGraphQLIDStmt: q.getGraphQLResponsesByGraphQLIDStmt, + getGraphQLResponsesByWorkspaceIDStmt: q.getGraphQLResponsesByWorkspaceIDStmt, + getGraphQLVersionsByGraphQLIDStmt: q.getGraphQLVersionsByGraphQLIDStmt, + getGraphQLWorkspaceIDStmt: q.getGraphQLWorkspaceIDStmt, + getGraphQLsByWorkspaceIDStmt: q.getGraphQLsByWorkspaceIDStmt, getHTTPStmt: q.getHTTPStmt, getHTTPAssertStmt: q.getHTTPAssertStmt, getHTTPAssertsByHttpIDStmt: q.getHTTPAssertsByHttpIDStmt, @@ -2787,6 +3231,7 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { updateFlowNodeConditionStmt: q.updateFlowNodeConditionStmt, updateFlowNodeForStmt: q.updateFlowNodeForStmt, updateFlowNodeForEachStmt: q.updateFlowNodeForEachStmt, + updateFlowNodeGraphQLStmt: q.updateFlowNodeGraphQLStmt, updateFlowNodeHTTPStmt: q.updateFlowNodeHTTPStmt, updateFlowNodeIDMappingStmt: q.updateFlowNodeIDMappingStmt, updateFlowNodeJsStmt: q.updateFlowNodeJsStmt, @@ -2794,6 +3239,11 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { updateFlowNodeStateStmt: q.updateFlowNodeStateStmt, updateFlowVariableStmt: q.updateFlowVariableStmt, updateFlowVariableOrderStmt: q.updateFlowVariableOrderStmt, + updateGraphQLStmt: q.updateGraphQLStmt, + updateGraphQLAssertStmt: q.updateGraphQLAssertStmt, + updateGraphQLAssertDeltaStmt: q.updateGraphQLAssertDeltaStmt, + updateGraphQLDeltaStmt: q.updateGraphQLDeltaStmt, + updateGraphQLHeaderStmt: q.updateGraphQLHeaderStmt, updateHTTPStmt: q.updateHTTPStmt, updateHTTPAssertStmt: q.updateHTTPAssertStmt, updateHTTPAssertDeltaStmt: q.updateHTTPAssertDeltaStmt, diff --git a/packages/db/pkg/sqlc/gen/flow.sql.go b/packages/db/pkg/sqlc/gen/flow.sql.go index 335ae3c71..a1779f0dd 100644 --- a/packages/db/pkg/sqlc/gen/flow.sql.go +++ b/packages/db/pkg/sqlc/gen/flow.sql.go @@ -52,6 +52,15 @@ func (q *Queries) CleanupOrphanedFlowNodeForEach(ctx context.Context) error { return err } +const cleanupOrphanedFlowNodeGraphQL = `-- name: CleanupOrphanedFlowNodeGraphQL :exec +DELETE FROM flow_node_graphql WHERE flow_node_id NOT IN (SELECT id FROM flow_node) +` + +func (q *Queries) CleanupOrphanedFlowNodeGraphQL(ctx context.Context) error { + _, err := q.exec(ctx, q.cleanupOrphanedFlowNodeGraphQLStmt, cleanupOrphanedFlowNodeGraphQL) + return err +} + const cleanupOrphanedFlowNodeHttp = `-- name: CleanupOrphanedFlowNodeHttp :exec DELETE FROM flow_node_http WHERE flow_node_id NOT IN (SELECT id FROM flow_node) ` @@ -228,6 +237,21 @@ func (q *Queries) CreateFlowNodeForEach(ctx context.Context, arg CreateFlowNodeF return err } +const createFlowNodeGraphQL = `-- name: CreateFlowNodeGraphQL :exec +INSERT INTO flow_node_graphql (flow_node_id, graphql_id, delta_graphql_id) VALUES (?, ?, ?) +` + +type CreateFlowNodeGraphQLParams struct { + FlowNodeID idwrap.IDWrap + GraphqlID idwrap.IDWrap + DeltaGraphqlID []byte +} + +func (q *Queries) CreateFlowNodeGraphQL(ctx context.Context, arg CreateFlowNodeGraphQLParams) error { + _, err := q.exec(ctx, q.createFlowNodeGraphQLStmt, createFlowNodeGraphQL, arg.FlowNodeID, arg.GraphqlID, arg.DeltaGraphqlID) + return err +} + const createFlowNodeHTTP = `-- name: CreateFlowNodeHTTP :exec INSERT INTO flow_node_http ( @@ -848,10 +872,10 @@ func (q *Queries) CreateMigration(ctx context.Context, arg CreateMigrationParams const createNodeExecution = `-- name: CreateNodeExecution :one INSERT INTO node_execution ( id, node_id, name, state, error, input_data, input_data_compress_type, - output_data, output_data_compress_type, http_response_id, completed_at + output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) -RETURNING id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, completed_at +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +RETURNING id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at ` type CreateNodeExecutionParams struct { @@ -865,6 +889,7 @@ type CreateNodeExecutionParams struct { OutputData []byte OutputDataCompressType int8 HttpResponseID *idwrap.IDWrap + GraphqlResponseID *idwrap.IDWrap CompletedAt sql.NullInt64 } @@ -880,6 +905,7 @@ func (q *Queries) CreateNodeExecution(ctx context.Context, arg CreateNodeExecuti arg.OutputData, arg.OutputDataCompressType, arg.HttpResponseID, + arg.GraphqlResponseID, arg.CompletedAt, ) var i NodeExecution @@ -894,6 +920,7 @@ func (q *Queries) CreateNodeExecution(ctx context.Context, arg CreateNodeExecuti &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ) return i, err @@ -990,6 +1017,15 @@ func (q *Queries) DeleteFlowNodeForEach(ctx context.Context, flowNodeID idwrap.I return err } +const deleteFlowNodeGraphQL = `-- name: DeleteFlowNodeGraphQL :exec +DELETE FROM flow_node_graphql WHERE flow_node_id = ? +` + +func (q *Queries) DeleteFlowNodeGraphQL(ctx context.Context, flowNodeID idwrap.IDWrap) error { + _, err := q.exec(ctx, q.deleteFlowNodeGraphQLStmt, deleteFlowNodeGraphQL, flowNodeID) + return err +} + const deleteFlowNodeHTTP = `-- name: DeleteFlowNodeHTTP :exec DELETE FROM flow_node_http WHERE @@ -1379,6 +1415,25 @@ func (q *Queries) GetFlowNodeForEach(ctx context.Context, flowNodeID idwrap.IDWr return i, err } +const getFlowNodeGraphQL = `-- name: GetFlowNodeGraphQL :one +SELECT + flow_node_id, + graphql_id, + delta_graphql_id +FROM + flow_node_graphql +WHERE + flow_node_id = ? +LIMIT 1 +` + +func (q *Queries) GetFlowNodeGraphQL(ctx context.Context, flowNodeID idwrap.IDWrap) (FlowNodeGraphql, error) { + row := q.queryRow(ctx, q.getFlowNodeGraphQLStmt, getFlowNodeGraphQL, flowNodeID) + var i FlowNodeGraphql + err := row.Scan(&i.FlowNodeID, &i.GraphqlID, &i.DeltaGraphqlID) + return i, err +} + const getFlowNodeHTTP = `-- name: GetFlowNodeHTTP :one SELECT flow_node_id, @@ -1857,7 +1912,7 @@ func (q *Queries) GetFlowsByWorkspaceID(ctx context.Context, workspaceID idwrap. } const getLatestNodeExecutionByNodeID = `-- name: GetLatestNodeExecutionByNodeID :one -SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, completed_at +SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at FROM node_execution WHERE node_id = ? AND completed_at IS NOT NULL ORDER BY completed_at DESC, id DESC @@ -1878,6 +1933,7 @@ func (q *Queries) GetLatestNodeExecutionByNodeID(ctx context.Context, nodeID idw &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ) return i, err @@ -1979,7 +2035,7 @@ func (q *Queries) GetMigrations(ctx context.Context) ([]Migration, error) { } const getNodeExecution = `-- name: GetNodeExecution :one -SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, completed_at FROM node_execution +SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at FROM node_execution WHERE id = ? ` @@ -1998,13 +2054,14 @@ func (q *Queries) GetNodeExecution(ctx context.Context, id idwrap.IDWrap) (NodeE &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ) return i, err } const getNodeExecutionsByNodeID = `-- name: GetNodeExecutionsByNodeID :many -SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, completed_at +SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at FROM node_execution WHERE node_id = ? AND completed_at IS NOT NULL ORDER BY completed_at DESC, id DESC @@ -2030,6 +2087,7 @@ func (q *Queries) GetNodeExecutionsByNodeID(ctx context.Context, nodeID idwrap.I &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ); err != nil { return nil, err @@ -2111,7 +2169,7 @@ func (q *Queries) GetTagsByWorkspaceID(ctx context.Context, workspaceID idwrap.I } const listNodeExecutions = `-- name: ListNodeExecutions :many -SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, completed_at FROM node_execution +SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at FROM node_execution WHERE node_id = ? ORDER BY completed_at DESC, id DESC LIMIT ? OFFSET ? @@ -2143,6 +2201,7 @@ func (q *Queries) ListNodeExecutions(ctx context.Context, arg ListNodeExecutions &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ); err != nil { return nil, err @@ -2159,7 +2218,7 @@ func (q *Queries) ListNodeExecutions(ctx context.Context, arg ListNodeExecutions } const listNodeExecutionsByFlowRun = `-- name: ListNodeExecutionsByFlowRun :many -SELECT ne.id, ne.node_id, ne.name, ne.state, ne.error, ne.input_data, ne.input_data_compress_type, ne.output_data, ne.output_data_compress_type, ne.http_response_id, ne.completed_at FROM node_execution ne +SELECT ne.id, ne.node_id, ne.name, ne.state, ne.error, ne.input_data, ne.input_data_compress_type, ne.output_data, ne.output_data_compress_type, ne.http_response_id, ne.graphql_response_id, ne.completed_at FROM node_execution ne JOIN flow_node fn ON ne.node_id = fn.id WHERE fn.flow_id = ? ORDER BY ne.completed_at DESC, ne.id DESC @@ -2185,6 +2244,7 @@ func (q *Queries) ListNodeExecutionsByFlowRun(ctx context.Context, flowID idwrap &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ); err != nil { return nil, err @@ -2201,7 +2261,7 @@ func (q *Queries) ListNodeExecutionsByFlowRun(ctx context.Context, flowID idwrap } const listNodeExecutionsByState = `-- name: ListNodeExecutionsByState :many -SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, completed_at FROM node_execution +SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at FROM node_execution WHERE node_id = ? AND state = ? ORDER BY completed_at DESC, id DESC LIMIT ? OFFSET ? @@ -2239,6 +2299,7 @@ func (q *Queries) ListNodeExecutionsByState(ctx context.Context, arg ListNodeExe &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ); err != nil { return nil, err @@ -2425,6 +2486,24 @@ func (q *Queries) UpdateFlowNodeForEach(ctx context.Context, arg UpdateFlowNodeF return err } +const updateFlowNodeGraphQL = `-- name: UpdateFlowNodeGraphQL :exec +INSERT INTO flow_node_graphql (flow_node_id, graphql_id, delta_graphql_id) VALUES (?, ?, ?) +ON CONFLICT(flow_node_id) DO UPDATE SET + graphql_id = excluded.graphql_id, + delta_graphql_id = excluded.delta_graphql_id +` + +type UpdateFlowNodeGraphQLParams struct { + FlowNodeID idwrap.IDWrap + GraphqlID idwrap.IDWrap + DeltaGraphqlID []byte +} + +func (q *Queries) UpdateFlowNodeGraphQL(ctx context.Context, arg UpdateFlowNodeGraphQLParams) error { + _, err := q.exec(ctx, q.updateFlowNodeGraphQLStmt, updateFlowNodeGraphQL, arg.FlowNodeID, arg.GraphqlID, arg.DeltaGraphqlID) + return err +} + const updateFlowNodeHTTP = `-- name: UpdateFlowNodeHTTP :exec INSERT INTO flow_node_http ( flow_node_id, @@ -2553,10 +2632,10 @@ func (q *Queries) UpdateFlowVariableOrder(ctx context.Context, arg UpdateFlowVar const updateNodeExecution = `-- name: UpdateNodeExecution :one UPDATE node_execution -SET state = ?, error = ?, output_data = ?, - output_data_compress_type = ?, http_response_id = ?, completed_at = ? +SET state = ?, error = ?, output_data = ?, + output_data_compress_type = ?, http_response_id = ?, graphql_response_id = ?, completed_at = ? WHERE id = ? -RETURNING id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, completed_at +RETURNING id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at ` type UpdateNodeExecutionParams struct { @@ -2565,6 +2644,7 @@ type UpdateNodeExecutionParams struct { OutputData []byte OutputDataCompressType int8 HttpResponseID *idwrap.IDWrap + GraphqlResponseID *idwrap.IDWrap CompletedAt sql.NullInt64 ID idwrap.IDWrap } @@ -2576,6 +2656,7 @@ func (q *Queries) UpdateNodeExecution(ctx context.Context, arg UpdateNodeExecuti arg.OutputData, arg.OutputDataCompressType, arg.HttpResponseID, + arg.GraphqlResponseID, arg.CompletedAt, arg.ID, ) @@ -2591,6 +2672,7 @@ func (q *Queries) UpdateNodeExecution(ctx context.Context, arg UpdateNodeExecuti &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ) return i, err @@ -2635,19 +2717,20 @@ func (q *Queries) UpdateTag(ctx context.Context, arg UpdateTagParams) error { const upsertNodeExecution = `-- name: UpsertNodeExecution :one INSERT INTO node_execution ( id, node_id, name, state, error, input_data, input_data_compress_type, - output_data, output_data_compress_type, http_response_id, completed_at + output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT(id) DO UPDATE SET state = excluded.state, - error = excluded.error, + error = excluded.error, input_data = excluded.input_data, input_data_compress_type = excluded.input_data_compress_type, output_data = excluded.output_data, output_data_compress_type = excluded.output_data_compress_type, http_response_id = excluded.http_response_id, + graphql_response_id = excluded.graphql_response_id, completed_at = excluded.completed_at -RETURNING id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, completed_at +RETURNING id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at ` type UpsertNodeExecutionParams struct { @@ -2661,6 +2744,7 @@ type UpsertNodeExecutionParams struct { OutputData []byte OutputDataCompressType int8 HttpResponseID *idwrap.IDWrap + GraphqlResponseID *idwrap.IDWrap CompletedAt sql.NullInt64 } @@ -2676,6 +2760,7 @@ func (q *Queries) UpsertNodeExecution(ctx context.Context, arg UpsertNodeExecuti arg.OutputData, arg.OutputDataCompressType, arg.HttpResponseID, + arg.GraphqlResponseID, arg.CompletedAt, ) var i NodeExecution @@ -2690,6 +2775,7 @@ func (q *Queries) UpsertNodeExecution(ctx context.Context, arg UpsertNodeExecuti &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ) return i, err diff --git a/packages/db/pkg/sqlc/gen/graphql.sql.go b/packages/db/pkg/sqlc/gen/graphql.sql.go new file mode 100644 index 000000000..dc1660fcd --- /dev/null +++ b/packages/db/pkg/sqlc/gen/graphql.sql.go @@ -0,0 +1,1749 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 +// source: graphql.sql + +package gen + +import ( + "context" + "strings" + "time" + + idwrap "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" +) + +const createGraphQL = `-- name: CreateGraphQL :exec +INSERT INTO graphql ( + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +` + +type CreateGraphQLParams struct { + ID idwrap.IDWrap + WorkspaceID idwrap.IDWrap + FolderID *idwrap.IDWrap + Name string + Url string + Query string + Variables string + Description string + LastRunAt interface{} + CreatedAt int64 + UpdatedAt int64 + ParentGraphqlID []byte + IsDelta bool + IsSnapshot bool + DeltaName interface{} + DeltaUrl interface{} + DeltaQuery interface{} + DeltaVariables interface{} + DeltaDescription interface{} +} + +func (q *Queries) CreateGraphQL(ctx context.Context, arg CreateGraphQLParams) error { + _, err := q.exec(ctx, q.createGraphQLStmt, createGraphQL, + arg.ID, + arg.WorkspaceID, + arg.FolderID, + arg.Name, + arg.Url, + arg.Query, + arg.Variables, + arg.Description, + arg.LastRunAt, + arg.CreatedAt, + arg.UpdatedAt, + arg.ParentGraphqlID, + arg.IsDelta, + arg.IsSnapshot, + arg.DeltaName, + arg.DeltaUrl, + arg.DeltaQuery, + arg.DeltaVariables, + arg.DeltaDescription, + ) + return err +} + +const createGraphQLAssert = `-- name: CreateGraphQLAssert :exec +INSERT INTO graphql_assert ( + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +` + +type CreateGraphQLAssertParams struct { + ID []byte + GraphqlID []byte + Value string + Enabled bool + Description string + DisplayOrder float64 + ParentGraphqlAssertID []byte + IsDelta bool + DeltaValue interface{} + DeltaEnabled interface{} + DeltaDescription interface{} + DeltaDisplayOrder interface{} + CreatedAt int64 + UpdatedAt int64 +} + +func (q *Queries) CreateGraphQLAssert(ctx context.Context, arg CreateGraphQLAssertParams) error { + _, err := q.exec(ctx, q.createGraphQLAssertStmt, createGraphQLAssert, + arg.ID, + arg.GraphqlID, + arg.Value, + arg.Enabled, + arg.Description, + arg.DisplayOrder, + arg.ParentGraphqlAssertID, + arg.IsDelta, + arg.DeltaValue, + arg.DeltaEnabled, + arg.DeltaDescription, + arg.DeltaDisplayOrder, + arg.CreatedAt, + arg.UpdatedAt, + ) + return err +} + +const createGraphQLHeader = `-- name: CreateGraphQLHeader :exec +INSERT INTO graphql_header ( + id, graphql_id, header_key, header_value, description, + enabled, display_order, created_at, updated_at, + parent_graphql_header_id, is_delta, + delta_header_key, delta_header_value, delta_description, delta_enabled, delta_display_order +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +` + +type CreateGraphQLHeaderParams struct { + ID idwrap.IDWrap + GraphqlID idwrap.IDWrap + HeaderKey string + HeaderValue string + Description string + Enabled bool + DisplayOrder float64 + CreatedAt int64 + UpdatedAt int64 + ParentGraphqlHeaderID []byte + IsDelta bool + DeltaHeaderKey interface{} + DeltaHeaderValue interface{} + DeltaDescription interface{} + DeltaEnabled interface{} + DeltaDisplayOrder interface{} +} + +func (q *Queries) CreateGraphQLHeader(ctx context.Context, arg CreateGraphQLHeaderParams) error { + _, err := q.exec(ctx, q.createGraphQLHeaderStmt, createGraphQLHeader, + arg.ID, + arg.GraphqlID, + arg.HeaderKey, + arg.HeaderValue, + arg.Description, + arg.Enabled, + arg.DisplayOrder, + arg.CreatedAt, + arg.UpdatedAt, + arg.ParentGraphqlHeaderID, + arg.IsDelta, + arg.DeltaHeaderKey, + arg.DeltaHeaderValue, + arg.DeltaDescription, + arg.DeltaEnabled, + arg.DeltaDisplayOrder, + ) + return err +} + +const createGraphQLResponse = `-- name: CreateGraphQLResponse :exec +INSERT INTO graphql_response ( + id, graphql_id, status, body, time, duration, size, created_at +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?) +` + +type CreateGraphQLResponseParams struct { + ID idwrap.IDWrap + GraphqlID idwrap.IDWrap + Status interface{} + Body []byte + Time time.Time + Duration interface{} + Size interface{} + CreatedAt int64 +} + +func (q *Queries) CreateGraphQLResponse(ctx context.Context, arg CreateGraphQLResponseParams) error { + _, err := q.exec(ctx, q.createGraphQLResponseStmt, createGraphQLResponse, + arg.ID, + arg.GraphqlID, + arg.Status, + arg.Body, + arg.Time, + arg.Duration, + arg.Size, + arg.CreatedAt, + ) + return err +} + +const createGraphQLResponseAssert = `-- name: CreateGraphQLResponseAssert :exec + +INSERT INTO graphql_response_assert ( + id, response_id, value, success, created_at +) +VALUES (?, ?, ?, ?, ?) +` + +type CreateGraphQLResponseAssertParams struct { + ID []byte + ResponseID []byte + Value string + Success bool + CreatedAt int64 +} + +// GraphQL Response Assert Queries +func (q *Queries) CreateGraphQLResponseAssert(ctx context.Context, arg CreateGraphQLResponseAssertParams) error { + _, err := q.exec(ctx, q.createGraphQLResponseAssertStmt, createGraphQLResponseAssert, + arg.ID, + arg.ResponseID, + arg.Value, + arg.Success, + arg.CreatedAt, + ) + return err +} + +const createGraphQLResponseHeader = `-- name: CreateGraphQLResponseHeader :exec +INSERT INTO graphql_response_header ( + id, response_id, key, value, created_at +) +VALUES (?, ?, ?, ?, ?) +` + +type CreateGraphQLResponseHeaderParams struct { + ID idwrap.IDWrap + ResponseID idwrap.IDWrap + Key string + Value string + CreatedAt int64 +} + +func (q *Queries) CreateGraphQLResponseHeader(ctx context.Context, arg CreateGraphQLResponseHeaderParams) error { + _, err := q.exec(ctx, q.createGraphQLResponseHeaderStmt, createGraphQLResponseHeader, + arg.ID, + arg.ResponseID, + arg.Key, + arg.Value, + arg.CreatedAt, + ) + return err +} + +const createGraphQLResponseHeaderBulk = `-- name: CreateGraphQLResponseHeaderBulk :exec +INSERT INTO graphql_response_header ( + id, response_id, key, value, created_at +) +VALUES + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?) +` + +type CreateGraphQLResponseHeaderBulkParams struct { + ID idwrap.IDWrap + ResponseID idwrap.IDWrap + Key string + Value string + CreatedAt int64 + ID_2 idwrap.IDWrap + ResponseID_2 idwrap.IDWrap + Key_2 string + Value_2 string + CreatedAt_2 int64 + ID_3 idwrap.IDWrap + ResponseID_3 idwrap.IDWrap + Key_3 string + Value_3 string + CreatedAt_3 int64 + ID_4 idwrap.IDWrap + ResponseID_4 idwrap.IDWrap + Key_4 string + Value_4 string + CreatedAt_4 int64 + ID_5 idwrap.IDWrap + ResponseID_5 idwrap.IDWrap + Key_5 string + Value_5 string + CreatedAt_5 int64 + ID_6 idwrap.IDWrap + ResponseID_6 idwrap.IDWrap + Key_6 string + Value_6 string + CreatedAt_6 int64 + ID_7 idwrap.IDWrap + ResponseID_7 idwrap.IDWrap + Key_7 string + Value_7 string + CreatedAt_7 int64 + ID_8 idwrap.IDWrap + ResponseID_8 idwrap.IDWrap + Key_8 string + Value_8 string + CreatedAt_8 int64 + ID_9 idwrap.IDWrap + ResponseID_9 idwrap.IDWrap + Key_9 string + Value_9 string + CreatedAt_9 int64 + ID_10 idwrap.IDWrap + ResponseID_10 idwrap.IDWrap + Key_10 string + Value_10 string + CreatedAt_10 int64 +} + +func (q *Queries) CreateGraphQLResponseHeaderBulk(ctx context.Context, arg CreateGraphQLResponseHeaderBulkParams) error { + _, err := q.exec(ctx, q.createGraphQLResponseHeaderBulkStmt, createGraphQLResponseHeaderBulk, + arg.ID, + arg.ResponseID, + arg.Key, + arg.Value, + arg.CreatedAt, + arg.ID_2, + arg.ResponseID_2, + arg.Key_2, + arg.Value_2, + arg.CreatedAt_2, + arg.ID_3, + arg.ResponseID_3, + arg.Key_3, + arg.Value_3, + arg.CreatedAt_3, + arg.ID_4, + arg.ResponseID_4, + arg.Key_4, + arg.Value_4, + arg.CreatedAt_4, + arg.ID_5, + arg.ResponseID_5, + arg.Key_5, + arg.Value_5, + arg.CreatedAt_5, + arg.ID_6, + arg.ResponseID_6, + arg.Key_6, + arg.Value_6, + arg.CreatedAt_6, + arg.ID_7, + arg.ResponseID_7, + arg.Key_7, + arg.Value_7, + arg.CreatedAt_7, + arg.ID_8, + arg.ResponseID_8, + arg.Key_8, + arg.Value_8, + arg.CreatedAt_8, + arg.ID_9, + arg.ResponseID_9, + arg.Key_9, + arg.Value_9, + arg.CreatedAt_9, + arg.ID_10, + arg.ResponseID_10, + arg.Key_10, + arg.Value_10, + arg.CreatedAt_10, + ) + return err +} + +const createGraphQLVersion = `-- name: CreateGraphQLVersion :exec + +INSERT INTO graphql_version ( + id, graphql_id, version_name, version_description, is_active, created_at, created_by +) +VALUES (?, ?, ?, ?, ?, ?, ?) +` + +type CreateGraphQLVersionParams struct { + ID []byte + GraphqlID []byte + VersionName string + VersionDescription string + IsActive bool + CreatedAt int64 + CreatedBy []byte +} + +// GraphQL Version Queries +func (q *Queries) CreateGraphQLVersion(ctx context.Context, arg CreateGraphQLVersionParams) error { + _, err := q.exec(ctx, q.createGraphQLVersionStmt, createGraphQLVersion, + arg.ID, + arg.GraphqlID, + arg.VersionName, + arg.VersionDescription, + arg.IsActive, + arg.CreatedAt, + arg.CreatedBy, + ) + return err +} + +const deleteGraphQL = `-- name: DeleteGraphQL :exec +DELETE FROM graphql +WHERE id = ? +` + +func (q *Queries) DeleteGraphQL(ctx context.Context, id idwrap.IDWrap) error { + _, err := q.exec(ctx, q.deleteGraphQLStmt, deleteGraphQL, id) + return err +} + +const deleteGraphQLAssert = `-- name: DeleteGraphQLAssert :exec +DELETE FROM graphql_assert +WHERE id = ? +` + +func (q *Queries) DeleteGraphQLAssert(ctx context.Context, id []byte) error { + _, err := q.exec(ctx, q.deleteGraphQLAssertStmt, deleteGraphQLAssert, id) + return err +} + +const deleteGraphQLHeader = `-- name: DeleteGraphQLHeader :exec +DELETE FROM graphql_header +WHERE id = ? +` + +func (q *Queries) DeleteGraphQLHeader(ctx context.Context, id idwrap.IDWrap) error { + _, err := q.exec(ctx, q.deleteGraphQLHeaderStmt, deleteGraphQLHeader, id) + return err +} + +const deleteGraphQLResponse = `-- name: DeleteGraphQLResponse :exec +DELETE FROM graphql_response WHERE id = ? +` + +func (q *Queries) DeleteGraphQLResponse(ctx context.Context, id idwrap.IDWrap) error { + _, err := q.exec(ctx, q.deleteGraphQLResponseStmt, deleteGraphQLResponse, id) + return err +} + +const deleteGraphQLResponseHeader = `-- name: DeleteGraphQLResponseHeader :exec +DELETE FROM graphql_response_header WHERE id = ? +` + +func (q *Queries) DeleteGraphQLResponseHeader(ctx context.Context, id idwrap.IDWrap) error { + _, err := q.exec(ctx, q.deleteGraphQLResponseHeaderStmt, deleteGraphQLResponseHeader, id) + return err +} + +const getGraphQL = `-- name: GetGraphQL :one + +SELECT + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +FROM graphql +WHERE id = ? LIMIT 1 +` + +// GraphQL Core Queries +func (q *Queries) GetGraphQL(ctx context.Context, id idwrap.IDWrap) (Graphql, error) { + row := q.queryRow(ctx, q.getGraphQLStmt, getGraphQL, id) + var i Graphql + err := row.Scan( + &i.ID, + &i.WorkspaceID, + &i.FolderID, + &i.Name, + &i.Url, + &i.Query, + &i.Variables, + &i.Description, + &i.LastRunAt, + &i.CreatedAt, + &i.UpdatedAt, + &i.ParentGraphqlID, + &i.IsDelta, + &i.IsSnapshot, + &i.DeltaName, + &i.DeltaUrl, + &i.DeltaQuery, + &i.DeltaVariables, + &i.DeltaDescription, + ) + return i, err +} + +const getGraphQLAssert = `-- name: GetGraphQLAssert :one + +SELECT + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +FROM graphql_assert +WHERE id = ? +LIMIT 1 +` + +type GetGraphQLAssertRow struct { + ID []byte + GraphqlID []byte + Value string + Enabled bool + Description string + DisplayOrder float64 + ParentGraphqlAssertID []byte + IsDelta bool + DeltaValue interface{} + DeltaEnabled interface{} + DeltaDescription interface{} + DeltaDisplayOrder interface{} + CreatedAt int64 + UpdatedAt int64 +} + +// GraphQL Assert Queries +func (q *Queries) GetGraphQLAssert(ctx context.Context, id []byte) (GetGraphQLAssertRow, error) { + row := q.queryRow(ctx, q.getGraphQLAssertStmt, getGraphQLAssert, id) + var i GetGraphQLAssertRow + err := row.Scan( + &i.ID, + &i.GraphqlID, + &i.Value, + &i.Enabled, + &i.Description, + &i.DisplayOrder, + &i.ParentGraphqlAssertID, + &i.IsDelta, + &i.DeltaValue, + &i.DeltaEnabled, + &i.DeltaDescription, + &i.DeltaDisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const getGraphQLAssertDeltasByParentID = `-- name: GetGraphQLAssertDeltasByParentID :many +SELECT + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +FROM graphql_assert +WHERE parent_graphql_assert_id = ? AND is_delta = TRUE +ORDER BY display_order +` + +type GetGraphQLAssertDeltasByParentIDRow struct { + ID []byte + GraphqlID []byte + Value string + Enabled bool + Description string + DisplayOrder float64 + ParentGraphqlAssertID []byte + IsDelta bool + DeltaValue interface{} + DeltaEnabled interface{} + DeltaDescription interface{} + DeltaDisplayOrder interface{} + CreatedAt int64 + UpdatedAt int64 +} + +func (q *Queries) GetGraphQLAssertDeltasByParentID(ctx context.Context, parentGraphqlAssertID []byte) ([]GetGraphQLAssertDeltasByParentIDRow, error) { + rows, err := q.query(ctx, q.getGraphQLAssertDeltasByParentIDStmt, getGraphQLAssertDeltasByParentID, parentGraphqlAssertID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GetGraphQLAssertDeltasByParentIDRow{} + for rows.Next() { + var i GetGraphQLAssertDeltasByParentIDRow + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.Value, + &i.Enabled, + &i.Description, + &i.DisplayOrder, + &i.ParentGraphqlAssertID, + &i.IsDelta, + &i.DeltaValue, + &i.DeltaEnabled, + &i.DeltaDescription, + &i.DeltaDisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLAssertDeltasByWorkspaceID = `-- name: GetGraphQLAssertDeltasByWorkspaceID :many +SELECT + ga.id, + ga.graphql_id, + ga.value, + ga.enabled, + ga.description, + ga.display_order, + ga.parent_graphql_assert_id, + ga.is_delta, + ga.delta_value, + ga.delta_enabled, + ga.delta_description, + ga.delta_display_order, + ga.created_at, + ga.updated_at +FROM graphql_assert ga +INNER JOIN graphql g ON ga.graphql_id = g.id +WHERE g.workspace_id = ? AND ga.is_delta = TRUE +ORDER BY ga.display_order +` + +type GetGraphQLAssertDeltasByWorkspaceIDRow struct { + ID []byte + GraphqlID []byte + Value string + Enabled bool + Description string + DisplayOrder float64 + ParentGraphqlAssertID []byte + IsDelta bool + DeltaValue interface{} + DeltaEnabled interface{} + DeltaDescription interface{} + DeltaDisplayOrder interface{} + CreatedAt int64 + UpdatedAt int64 +} + +func (q *Queries) GetGraphQLAssertDeltasByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]GetGraphQLAssertDeltasByWorkspaceIDRow, error) { + rows, err := q.query(ctx, q.getGraphQLAssertDeltasByWorkspaceIDStmt, getGraphQLAssertDeltasByWorkspaceID, workspaceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GetGraphQLAssertDeltasByWorkspaceIDRow{} + for rows.Next() { + var i GetGraphQLAssertDeltasByWorkspaceIDRow + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.Value, + &i.Enabled, + &i.Description, + &i.DisplayOrder, + &i.ParentGraphqlAssertID, + &i.IsDelta, + &i.DeltaValue, + &i.DeltaEnabled, + &i.DeltaDescription, + &i.DeltaDisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLAssertsByGraphQLID = `-- name: GetGraphQLAssertsByGraphQLID :many +SELECT + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +FROM graphql_assert +WHERE graphql_id = ? +ORDER BY display_order +` + +type GetGraphQLAssertsByGraphQLIDRow struct { + ID []byte + GraphqlID []byte + Value string + Enabled bool + Description string + DisplayOrder float64 + ParentGraphqlAssertID []byte + IsDelta bool + DeltaValue interface{} + DeltaEnabled interface{} + DeltaDescription interface{} + DeltaDisplayOrder interface{} + CreatedAt int64 + UpdatedAt int64 +} + +func (q *Queries) GetGraphQLAssertsByGraphQLID(ctx context.Context, graphqlID []byte) ([]GetGraphQLAssertsByGraphQLIDRow, error) { + rows, err := q.query(ctx, q.getGraphQLAssertsByGraphQLIDStmt, getGraphQLAssertsByGraphQLID, graphqlID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GetGraphQLAssertsByGraphQLIDRow{} + for rows.Next() { + var i GetGraphQLAssertsByGraphQLIDRow + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.Value, + &i.Enabled, + &i.Description, + &i.DisplayOrder, + &i.ParentGraphqlAssertID, + &i.IsDelta, + &i.DeltaValue, + &i.DeltaEnabled, + &i.DeltaDescription, + &i.DeltaDisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLAssertsByIDs = `-- name: GetGraphQLAssertsByIDs :many +SELECT + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +FROM graphql_assert +WHERE id IN (/*SLICE:ids*/?) +` + +type GetGraphQLAssertsByIDsRow struct { + ID []byte + GraphqlID []byte + Value string + Enabled bool + Description string + DisplayOrder float64 + ParentGraphqlAssertID []byte + IsDelta bool + DeltaValue interface{} + DeltaEnabled interface{} + DeltaDescription interface{} + DeltaDisplayOrder interface{} + CreatedAt int64 + UpdatedAt int64 +} + +func (q *Queries) GetGraphQLAssertsByIDs(ctx context.Context, ids [][]byte) ([]GetGraphQLAssertsByIDsRow, error) { + query := getGraphQLAssertsByIDs + var queryParams []interface{} + if len(ids) > 0 { + for _, v := range ids { + queryParams = append(queryParams, v) + } + query = strings.Replace(query, "/*SLICE:ids*/?", strings.Repeat(",?", len(ids))[1:], 1) + } else { + query = strings.Replace(query, "/*SLICE:ids*/?", "NULL", 1) + } + rows, err := q.query(ctx, nil, query, queryParams...) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GetGraphQLAssertsByIDsRow{} + for rows.Next() { + var i GetGraphQLAssertsByIDsRow + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.Value, + &i.Enabled, + &i.Description, + &i.DisplayOrder, + &i.ParentGraphqlAssertID, + &i.IsDelta, + &i.DeltaValue, + &i.DeltaEnabled, + &i.DeltaDescription, + &i.DeltaDisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLDeltasByParentID = `-- name: GetGraphQLDeltasByParentID :many +SELECT + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +FROM graphql +WHERE parent_graphql_id = ? AND is_delta = TRUE +ORDER BY updated_at DESC +` + +func (q *Queries) GetGraphQLDeltasByParentID(ctx context.Context, parentGraphqlID []byte) ([]Graphql, error) { + rows, err := q.query(ctx, q.getGraphQLDeltasByParentIDStmt, getGraphQLDeltasByParentID, parentGraphqlID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []Graphql{} + for rows.Next() { + var i Graphql + if err := rows.Scan( + &i.ID, + &i.WorkspaceID, + &i.FolderID, + &i.Name, + &i.Url, + &i.Query, + &i.Variables, + &i.Description, + &i.LastRunAt, + &i.CreatedAt, + &i.UpdatedAt, + &i.ParentGraphqlID, + &i.IsDelta, + &i.IsSnapshot, + &i.DeltaName, + &i.DeltaUrl, + &i.DeltaQuery, + &i.DeltaVariables, + &i.DeltaDescription, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLDeltasByWorkspaceID = `-- name: GetGraphQLDeltasByWorkspaceID :many + +SELECT + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +FROM graphql +WHERE workspace_id = ? AND is_delta = TRUE +ORDER BY updated_at DESC +` + +// GraphQL Delta Queries +func (q *Queries) GetGraphQLDeltasByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]Graphql, error) { + rows, err := q.query(ctx, q.getGraphQLDeltasByWorkspaceIDStmt, getGraphQLDeltasByWorkspaceID, workspaceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []Graphql{} + for rows.Next() { + var i Graphql + if err := rows.Scan( + &i.ID, + &i.WorkspaceID, + &i.FolderID, + &i.Name, + &i.Url, + &i.Query, + &i.Variables, + &i.Description, + &i.LastRunAt, + &i.CreatedAt, + &i.UpdatedAt, + &i.ParentGraphqlID, + &i.IsDelta, + &i.IsSnapshot, + &i.DeltaName, + &i.DeltaUrl, + &i.DeltaQuery, + &i.DeltaVariables, + &i.DeltaDescription, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLHeaderDeltasByParentID = `-- name: GetGraphQLHeaderDeltasByParentID :many +SELECT + id, graphql_id, header_key, header_value, description, + enabled, display_order, created_at, updated_at, + parent_graphql_header_id, is_delta, + delta_header_key, delta_header_value, delta_description, delta_enabled, delta_display_order +FROM graphql_header +WHERE parent_graphql_header_id = ? AND is_delta = TRUE +ORDER BY display_order +` + +func (q *Queries) GetGraphQLHeaderDeltasByParentID(ctx context.Context, parentGraphqlHeaderID []byte) ([]GraphqlHeader, error) { + rows, err := q.query(ctx, q.getGraphQLHeaderDeltasByParentIDStmt, getGraphQLHeaderDeltasByParentID, parentGraphqlHeaderID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlHeader{} + for rows.Next() { + var i GraphqlHeader + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.HeaderKey, + &i.HeaderValue, + &i.Description, + &i.Enabled, + &i.DisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + &i.ParentGraphqlHeaderID, + &i.IsDelta, + &i.DeltaHeaderKey, + &i.DeltaHeaderValue, + &i.DeltaDescription, + &i.DeltaEnabled, + &i.DeltaDisplayOrder, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLHeaderDeltasByWorkspaceID = `-- name: GetGraphQLHeaderDeltasByWorkspaceID :many + +SELECT + h.id, h.graphql_id, h.header_key, h.header_value, h.description, + h.enabled, h.display_order, h.created_at, h.updated_at, + h.parent_graphql_header_id, h.is_delta, + h.delta_header_key, h.delta_header_value, h.delta_description, h.delta_enabled, h.delta_display_order +FROM graphql_header h +JOIN graphql g ON h.graphql_id = g.id +WHERE g.workspace_id = ? AND h.is_delta = TRUE +ORDER BY h.updated_at DESC +` + +// GraphQL Header Delta Queries +func (q *Queries) GetGraphQLHeaderDeltasByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]GraphqlHeader, error) { + rows, err := q.query(ctx, q.getGraphQLHeaderDeltasByWorkspaceIDStmt, getGraphQLHeaderDeltasByWorkspaceID, workspaceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlHeader{} + for rows.Next() { + var i GraphqlHeader + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.HeaderKey, + &i.HeaderValue, + &i.Description, + &i.Enabled, + &i.DisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + &i.ParentGraphqlHeaderID, + &i.IsDelta, + &i.DeltaHeaderKey, + &i.DeltaHeaderValue, + &i.DeltaDescription, + &i.DeltaEnabled, + &i.DeltaDisplayOrder, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLHeaders = `-- name: GetGraphQLHeaders :many + +SELECT + id, graphql_id, header_key, header_value, description, + enabled, display_order, created_at, updated_at, + parent_graphql_header_id, is_delta, + delta_header_key, delta_header_value, delta_description, delta_enabled, delta_display_order +FROM graphql_header +WHERE graphql_id = ? +ORDER BY display_order +` + +// GraphQL Header Queries +func (q *Queries) GetGraphQLHeaders(ctx context.Context, graphqlID idwrap.IDWrap) ([]GraphqlHeader, error) { + rows, err := q.query(ctx, q.getGraphQLHeadersStmt, getGraphQLHeaders, graphqlID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlHeader{} + for rows.Next() { + var i GraphqlHeader + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.HeaderKey, + &i.HeaderValue, + &i.Description, + &i.Enabled, + &i.DisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + &i.ParentGraphqlHeaderID, + &i.IsDelta, + &i.DeltaHeaderKey, + &i.DeltaHeaderValue, + &i.DeltaDescription, + &i.DeltaEnabled, + &i.DeltaDisplayOrder, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLHeadersByIDs = `-- name: GetGraphQLHeadersByIDs :many +SELECT + id, graphql_id, header_key, header_value, description, + enabled, display_order, created_at, updated_at, + parent_graphql_header_id, is_delta, + delta_header_key, delta_header_value, delta_description, delta_enabled, delta_display_order +FROM graphql_header +WHERE id IN (/*SLICE:ids*/?) +` + +func (q *Queries) GetGraphQLHeadersByIDs(ctx context.Context, ids []idwrap.IDWrap) ([]GraphqlHeader, error) { + query := getGraphQLHeadersByIDs + var queryParams []interface{} + if len(ids) > 0 { + for _, v := range ids { + queryParams = append(queryParams, v) + } + query = strings.Replace(query, "/*SLICE:ids*/?", strings.Repeat(",?", len(ids))[1:], 1) + } else { + query = strings.Replace(query, "/*SLICE:ids*/?", "NULL", 1) + } + rows, err := q.query(ctx, nil, query, queryParams...) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlHeader{} + for rows.Next() { + var i GraphqlHeader + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.HeaderKey, + &i.HeaderValue, + &i.Description, + &i.Enabled, + &i.DisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + &i.ParentGraphqlHeaderID, + &i.IsDelta, + &i.DeltaHeaderKey, + &i.DeltaHeaderValue, + &i.DeltaDescription, + &i.DeltaEnabled, + &i.DeltaDisplayOrder, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLResponse = `-- name: GetGraphQLResponse :one + +SELECT + id, graphql_id, status, body, time, duration, size, created_at +FROM graphql_response +WHERE id = ? LIMIT 1 +` + +// GraphQL Response Queries +func (q *Queries) GetGraphQLResponse(ctx context.Context, id idwrap.IDWrap) (GraphqlResponse, error) { + row := q.queryRow(ctx, q.getGraphQLResponseStmt, getGraphQLResponse, id) + var i GraphqlResponse + err := row.Scan( + &i.ID, + &i.GraphqlID, + &i.Status, + &i.Body, + &i.Time, + &i.Duration, + &i.Size, + &i.CreatedAt, + ) + return i, err +} + +const getGraphQLResponseAssertsByResponseID = `-- name: GetGraphQLResponseAssertsByResponseID :many +SELECT id, response_id, value, success, created_at +FROM graphql_response_assert +WHERE response_id = ? +ORDER BY created_at +` + +func (q *Queries) GetGraphQLResponseAssertsByResponseID(ctx context.Context, responseID []byte) ([]GraphqlResponseAssert, error) { + rows, err := q.query(ctx, q.getGraphQLResponseAssertsByResponseIDStmt, getGraphQLResponseAssertsByResponseID, responseID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlResponseAssert{} + for rows.Next() { + var i GraphqlResponseAssert + if err := rows.Scan( + &i.ID, + &i.ResponseID, + &i.Value, + &i.Success, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLResponseAssertsByWorkspaceID = `-- name: GetGraphQLResponseAssertsByWorkspaceID :many +SELECT + gra.id, + gra.response_id, + gra.value, + gra.success, + gra.created_at +FROM graphql_response_assert gra +INNER JOIN graphql_response gr ON gra.response_id = gr.id +INNER JOIN graphql g ON gr.graphql_id = g.id +WHERE g.workspace_id = ? +` + +func (q *Queries) GetGraphQLResponseAssertsByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]GraphqlResponseAssert, error) { + rows, err := q.query(ctx, q.getGraphQLResponseAssertsByWorkspaceIDStmt, getGraphQLResponseAssertsByWorkspaceID, workspaceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlResponseAssert{} + for rows.Next() { + var i GraphqlResponseAssert + if err := rows.Scan( + &i.ID, + &i.ResponseID, + &i.Value, + &i.Success, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLResponseHeadersByResponseID = `-- name: GetGraphQLResponseHeadersByResponseID :many + +SELECT + id, response_id, key, value, created_at +FROM graphql_response_header +WHERE response_id = ? +ORDER BY key +` + +// GraphQL Response Header Queries +func (q *Queries) GetGraphQLResponseHeadersByResponseID(ctx context.Context, responseID idwrap.IDWrap) ([]GraphqlResponseHeader, error) { + rows, err := q.query(ctx, q.getGraphQLResponseHeadersByResponseIDStmt, getGraphQLResponseHeadersByResponseID, responseID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlResponseHeader{} + for rows.Next() { + var i GraphqlResponseHeader + if err := rows.Scan( + &i.ID, + &i.ResponseID, + &i.Key, + &i.Value, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLResponseHeadersByWorkspaceID = `-- name: GetGraphQLResponseHeadersByWorkspaceID :many +SELECT + grh.id, grh.response_id, grh.key, grh.value, grh.created_at +FROM graphql_response_header grh +INNER JOIN graphql_response gr ON grh.response_id = gr.id +INNER JOIN graphql g ON gr.graphql_id = g.id +WHERE g.workspace_id = ? +ORDER BY gr.time DESC, grh.key +` + +func (q *Queries) GetGraphQLResponseHeadersByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]GraphqlResponseHeader, error) { + rows, err := q.query(ctx, q.getGraphQLResponseHeadersByWorkspaceIDStmt, getGraphQLResponseHeadersByWorkspaceID, workspaceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlResponseHeader{} + for rows.Next() { + var i GraphqlResponseHeader + if err := rows.Scan( + &i.ID, + &i.ResponseID, + &i.Key, + &i.Value, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLResponsesByGraphQLID = `-- name: GetGraphQLResponsesByGraphQLID :many +SELECT + id, graphql_id, status, body, time, duration, size, created_at +FROM graphql_response +WHERE graphql_id = ? +ORDER BY time DESC +` + +func (q *Queries) GetGraphQLResponsesByGraphQLID(ctx context.Context, graphqlID idwrap.IDWrap) ([]GraphqlResponse, error) { + rows, err := q.query(ctx, q.getGraphQLResponsesByGraphQLIDStmt, getGraphQLResponsesByGraphQLID, graphqlID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlResponse{} + for rows.Next() { + var i GraphqlResponse + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.Status, + &i.Body, + &i.Time, + &i.Duration, + &i.Size, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLResponsesByWorkspaceID = `-- name: GetGraphQLResponsesByWorkspaceID :many +SELECT + gr.id, gr.graphql_id, gr.status, gr.body, gr.time, + gr.duration, gr.size, gr.created_at +FROM graphql_response gr +INNER JOIN graphql g ON gr.graphql_id = g.id +WHERE g.workspace_id = ? +ORDER BY gr.time DESC +` + +func (q *Queries) GetGraphQLResponsesByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]GraphqlResponse, error) { + rows, err := q.query(ctx, q.getGraphQLResponsesByWorkspaceIDStmt, getGraphQLResponsesByWorkspaceID, workspaceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlResponse{} + for rows.Next() { + var i GraphqlResponse + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.Status, + &i.Body, + &i.Time, + &i.Duration, + &i.Size, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLVersionsByGraphQLID = `-- name: GetGraphQLVersionsByGraphQLID :many +SELECT id, graphql_id, version_name, version_description, is_active, created_at, created_by +FROM graphql_version +WHERE graphql_id = ? +ORDER BY created_at DESC +` + +func (q *Queries) GetGraphQLVersionsByGraphQLID(ctx context.Context, graphqlID []byte) ([]GraphqlVersion, error) { + rows, err := q.query(ctx, q.getGraphQLVersionsByGraphQLIDStmt, getGraphQLVersionsByGraphQLID, graphqlID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlVersion{} + for rows.Next() { + var i GraphqlVersion + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.VersionName, + &i.VersionDescription, + &i.IsActive, + &i.CreatedAt, + &i.CreatedBy, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLWorkspaceID = `-- name: GetGraphQLWorkspaceID :one +SELECT workspace_id +FROM graphql +WHERE id = ? +LIMIT 1 +` + +func (q *Queries) GetGraphQLWorkspaceID(ctx context.Context, id idwrap.IDWrap) (idwrap.IDWrap, error) { + row := q.queryRow(ctx, q.getGraphQLWorkspaceIDStmt, getGraphQLWorkspaceID, id) + var workspace_id idwrap.IDWrap + err := row.Scan(&workspace_id) + return workspace_id, err +} + +const getGraphQLsByWorkspaceID = `-- name: GetGraphQLsByWorkspaceID :many +SELECT + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +FROM graphql +WHERE workspace_id = ? +ORDER BY updated_at DESC +` + +func (q *Queries) GetGraphQLsByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]Graphql, error) { + rows, err := q.query(ctx, q.getGraphQLsByWorkspaceIDStmt, getGraphQLsByWorkspaceID, workspaceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []Graphql{} + for rows.Next() { + var i Graphql + if err := rows.Scan( + &i.ID, + &i.WorkspaceID, + &i.FolderID, + &i.Name, + &i.Url, + &i.Query, + &i.Variables, + &i.Description, + &i.LastRunAt, + &i.CreatedAt, + &i.UpdatedAt, + &i.ParentGraphqlID, + &i.IsDelta, + &i.IsSnapshot, + &i.DeltaName, + &i.DeltaUrl, + &i.DeltaQuery, + &i.DeltaVariables, + &i.DeltaDescription, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateGraphQL = `-- name: UpdateGraphQL :exec +UPDATE graphql +SET + name = ?, + url = ?, + query = ?, + variables = ?, + description = ?, + last_run_at = COALESCE(?, last_run_at), + updated_at = unixepoch() +WHERE id = ? +` + +type UpdateGraphQLParams struct { + Name string + Url string + Query string + Variables string + Description string + LastRunAt interface{} + ID idwrap.IDWrap +} + +func (q *Queries) UpdateGraphQL(ctx context.Context, arg UpdateGraphQLParams) error { + _, err := q.exec(ctx, q.updateGraphQLStmt, updateGraphQL, + arg.Name, + arg.Url, + arg.Query, + arg.Variables, + arg.Description, + arg.LastRunAt, + arg.ID, + ) + return err +} + +const updateGraphQLAssert = `-- name: UpdateGraphQLAssert :exec +UPDATE graphql_assert +SET + value = ?, + enabled = ?, + description = ?, + display_order = ?, + updated_at = ? +WHERE id = ? +` + +type UpdateGraphQLAssertParams struct { + Value string + Enabled bool + Description string + DisplayOrder float64 + UpdatedAt int64 + ID []byte +} + +func (q *Queries) UpdateGraphQLAssert(ctx context.Context, arg UpdateGraphQLAssertParams) error { + _, err := q.exec(ctx, q.updateGraphQLAssertStmt, updateGraphQLAssert, + arg.Value, + arg.Enabled, + arg.Description, + arg.DisplayOrder, + arg.UpdatedAt, + arg.ID, + ) + return err +} + +const updateGraphQLAssertDelta = `-- name: UpdateGraphQLAssertDelta :exec +UPDATE graphql_assert +SET + delta_value = ?, + delta_enabled = ?, + delta_description = ?, + delta_display_order = ?, + updated_at = ? +WHERE id = ? +` + +type UpdateGraphQLAssertDeltaParams struct { + DeltaValue interface{} + DeltaEnabled interface{} + DeltaDescription interface{} + DeltaDisplayOrder interface{} + UpdatedAt int64 + ID []byte +} + +func (q *Queries) UpdateGraphQLAssertDelta(ctx context.Context, arg UpdateGraphQLAssertDeltaParams) error { + _, err := q.exec(ctx, q.updateGraphQLAssertDeltaStmt, updateGraphQLAssertDelta, + arg.DeltaValue, + arg.DeltaEnabled, + arg.DeltaDescription, + arg.DeltaDisplayOrder, + arg.UpdatedAt, + arg.ID, + ) + return err +} + +const updateGraphQLDelta = `-- name: UpdateGraphQLDelta :exec +UPDATE graphql +SET + delta_name = ?, + delta_url = ?, + delta_query = ?, + delta_variables = ?, + delta_description = ?, + updated_at = unixepoch() +WHERE id = ? +` + +type UpdateGraphQLDeltaParams struct { + DeltaName interface{} + DeltaUrl interface{} + DeltaQuery interface{} + DeltaVariables interface{} + DeltaDescription interface{} + ID idwrap.IDWrap +} + +func (q *Queries) UpdateGraphQLDelta(ctx context.Context, arg UpdateGraphQLDeltaParams) error { + _, err := q.exec(ctx, q.updateGraphQLDeltaStmt, updateGraphQLDelta, + arg.DeltaName, + arg.DeltaUrl, + arg.DeltaQuery, + arg.DeltaVariables, + arg.DeltaDescription, + arg.ID, + ) + return err +} + +const updateGraphQLHeader = `-- name: UpdateGraphQLHeader :exec +UPDATE graphql_header +SET + header_key = ?, + header_value = ?, + description = ?, + enabled = ?, + display_order = ?, + updated_at = unixepoch() +WHERE id = ? +` + +type UpdateGraphQLHeaderParams struct { + HeaderKey string + HeaderValue string + Description string + Enabled bool + DisplayOrder float64 + ID idwrap.IDWrap +} + +func (q *Queries) UpdateGraphQLHeader(ctx context.Context, arg UpdateGraphQLHeaderParams) error { + _, err := q.exec(ctx, q.updateGraphQLHeaderStmt, updateGraphQLHeader, + arg.HeaderKey, + arg.HeaderValue, + arg.Description, + arg.Enabled, + arg.DisplayOrder, + arg.ID, + ) + return err +} diff --git a/packages/db/pkg/sqlc/gen/models.go b/packages/db/pkg/sqlc/gen/models.go index a218fddd2..09c26761c 100644 --- a/packages/db/pkg/sqlc/gen/models.go +++ b/packages/db/pkg/sqlc/gen/models.go @@ -122,6 +122,12 @@ type FlowNodeForEach struct { Expression string } +type FlowNodeGraphql struct { + FlowNodeID idwrap.IDWrap + GraphqlID idwrap.IDWrap + DeltaGraphqlID []byte +} + type FlowNodeHttp struct { FlowNodeID idwrap.IDWrap HttpID idwrap.IDWrap @@ -156,6 +162,101 @@ type FlowVariable struct { DisplayOrder float64 } +type Graphql struct { + ID idwrap.IDWrap + WorkspaceID idwrap.IDWrap + FolderID *idwrap.IDWrap + Name string + Url string + Query string + Variables string + Description string + LastRunAt interface{} + CreatedAt int64 + UpdatedAt int64 + ParentGraphqlID []byte + IsDelta bool + IsSnapshot bool + DeltaName interface{} + DeltaUrl interface{} + DeltaQuery interface{} + DeltaVariables interface{} + DeltaDescription interface{} +} + +type GraphqlAssert struct { + ID []byte + GraphqlID []byte + Value string + Enabled bool + Description string + DisplayOrder float64 + CreatedAt int64 + UpdatedAt int64 + ParentGraphqlAssertID []byte + IsDelta bool + DeltaValue interface{} + DeltaEnabled interface{} + DeltaDescription interface{} + DeltaDisplayOrder interface{} +} + +type GraphqlHeader struct { + ID idwrap.IDWrap + GraphqlID idwrap.IDWrap + HeaderKey string + HeaderValue string + Description string + Enabled bool + DisplayOrder float64 + CreatedAt int64 + UpdatedAt int64 + ParentGraphqlHeaderID []byte + IsDelta bool + DeltaHeaderKey interface{} + DeltaHeaderValue interface{} + DeltaDescription interface{} + DeltaEnabled interface{} + DeltaDisplayOrder interface{} +} + +type GraphqlResponse struct { + ID idwrap.IDWrap + GraphqlID idwrap.IDWrap + Status interface{} + Body []byte + Time time.Time + Duration interface{} + Size interface{} + CreatedAt int64 +} + +type GraphqlResponseAssert struct { + ID []byte + ResponseID []byte + Value string + Success bool + CreatedAt int64 +} + +type GraphqlResponseHeader struct { + ID idwrap.IDWrap + ResponseID idwrap.IDWrap + Key string + Value string + CreatedAt int64 +} + +type GraphqlVersion struct { + ID []byte + GraphqlID []byte + VersionName string + VersionDescription string + IsActive bool + CreatedAt int64 + CreatedBy []byte +} + type Http struct { ID idwrap.IDWrap WorkspaceID idwrap.IDWrap @@ -340,6 +441,7 @@ type NodeExecution struct { OutputData []byte OutputDataCompressType int8 HttpResponseID *idwrap.IDWrap + GraphqlResponseID *idwrap.IDWrap CompletedAt sql.NullInt64 } diff --git a/packages/db/pkg/sqlc/queries/flow.sql b/packages/db/pkg/sqlc/queries/flow.sql index 8d919838d..debf271db 100644 --- a/packages/db/pkg/sqlc/queries/flow.sql +++ b/packages/db/pkg/sqlc/queries/flow.sql @@ -420,6 +420,32 @@ DELETE FROM flow_node_http WHERE flow_node_id = ?; +-- name: GetFlowNodeGraphQL :one +SELECT + flow_node_id, + graphql_id, + delta_graphql_id +FROM + flow_node_graphql +WHERE + flow_node_id = ? +LIMIT 1; + +-- name: CreateFlowNodeGraphQL :exec +INSERT INTO flow_node_graphql (flow_node_id, graphql_id, delta_graphql_id) VALUES (?, ?, ?); + +-- name: UpdateFlowNodeGraphQL :exec +INSERT INTO flow_node_graphql (flow_node_id, graphql_id, delta_graphql_id) VALUES (?, ?, ?) +ON CONFLICT(flow_node_id) DO UPDATE SET + graphql_id = excluded.graphql_id, + delta_graphql_id = excluded.delta_graphql_id; + +-- name: DeleteFlowNodeGraphQL :exec +DELETE FROM flow_node_graphql WHERE flow_node_id = ?; + +-- name: CleanupOrphanedFlowNodeGraphQL :exec +DELETE FROM flow_node_graphql WHERE flow_node_id NOT IN (SELECT id FROM flow_node); + -- name: GetFlowNodeCondition :one SELECT flow_node_id, @@ -631,32 +657,33 @@ ORDER BY ne.completed_at DESC, ne.id DESC; -- name: CreateNodeExecution :one INSERT INTO node_execution ( id, node_id, name, state, error, input_data, input_data_compress_type, - output_data, output_data_compress_type, http_response_id, completed_at + output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) RETURNING *; -- name: UpdateNodeExecution :one UPDATE node_execution -SET state = ?, error = ?, output_data = ?, - output_data_compress_type = ?, http_response_id = ?, completed_at = ? +SET state = ?, error = ?, output_data = ?, + output_data_compress_type = ?, http_response_id = ?, graphql_response_id = ?, completed_at = ? WHERE id = ? RETURNING *; -- name: UpsertNodeExecution :one INSERT INTO node_execution ( id, node_id, name, state, error, input_data, input_data_compress_type, - output_data, output_data_compress_type, http_response_id, completed_at + output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT(id) DO UPDATE SET state = excluded.state, - error = excluded.error, + error = excluded.error, input_data = excluded.input_data, input_data_compress_type = excluded.input_data_compress_type, output_data = excluded.output_data, output_data_compress_type = excluded.output_data_compress_type, http_response_id = excluded.http_response_id, + graphql_response_id = excluded.graphql_response_id, completed_at = excluded.completed_at RETURNING *; diff --git a/packages/db/pkg/sqlc/queries/graphql.sql b/packages/db/pkg/sqlc/queries/graphql.sql new file mode 100644 index 000000000..c1bd3a838 --- /dev/null +++ b/packages/db/pkg/sqlc/queries/graphql.sql @@ -0,0 +1,431 @@ +-- +-- GraphQL Core Queries +-- + +-- name: GetGraphQL :one +SELECT + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +FROM graphql +WHERE id = ? LIMIT 1; + +-- name: GetGraphQLsByWorkspaceID :many +SELECT + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +FROM graphql +WHERE workspace_id = ? +ORDER BY updated_at DESC; + +-- name: GetGraphQLWorkspaceID :one +SELECT workspace_id +FROM graphql +WHERE id = ? +LIMIT 1; + +-- name: CreateGraphQL :exec +INSERT INTO graphql ( + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); + +-- name: UpdateGraphQL :exec +UPDATE graphql +SET + name = ?, + url = ?, + query = ?, + variables = ?, + description = ?, + last_run_at = COALESCE(?, last_run_at), + updated_at = unixepoch() +WHERE id = ?; + +-- name: UpdateGraphQLDelta :exec +UPDATE graphql +SET + delta_name = ?, + delta_url = ?, + delta_query = ?, + delta_variables = ?, + delta_description = ?, + updated_at = unixepoch() +WHERE id = ?; + +-- name: DeleteGraphQL :exec +DELETE FROM graphql +WHERE id = ?; + +-- +-- GraphQL Header Queries +-- + +-- name: GetGraphQLHeaders :many +SELECT + id, graphql_id, header_key, header_value, description, + enabled, display_order, created_at, updated_at, + parent_graphql_header_id, is_delta, + delta_header_key, delta_header_value, delta_description, delta_enabled, delta_display_order +FROM graphql_header +WHERE graphql_id = ? +ORDER BY display_order; + +-- name: GetGraphQLHeadersByIDs :many +SELECT + id, graphql_id, header_key, header_value, description, + enabled, display_order, created_at, updated_at, + parent_graphql_header_id, is_delta, + delta_header_key, delta_header_value, delta_description, delta_enabled, delta_display_order +FROM graphql_header +WHERE id IN (sqlc.slice('ids')); + +-- name: CreateGraphQLHeader :exec +INSERT INTO graphql_header ( + id, graphql_id, header_key, header_value, description, + enabled, display_order, created_at, updated_at, + parent_graphql_header_id, is_delta, + delta_header_key, delta_header_value, delta_description, delta_enabled, delta_display_order +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); + +-- name: UpdateGraphQLHeader :exec +UPDATE graphql_header +SET + header_key = ?, + header_value = ?, + description = ?, + enabled = ?, + display_order = ?, + updated_at = unixepoch() +WHERE id = ?; + +-- name: DeleteGraphQLHeader :exec +DELETE FROM graphql_header +WHERE id = ?; + +-- +-- GraphQL Response Queries +-- + +-- name: GetGraphQLResponse :one +SELECT + id, graphql_id, status, body, time, duration, size, created_at +FROM graphql_response +WHERE id = ? LIMIT 1; + +-- name: GetGraphQLResponsesByGraphQLID :many +SELECT + id, graphql_id, status, body, time, duration, size, created_at +FROM graphql_response +WHERE graphql_id = ? +ORDER BY time DESC; + +-- name: GetGraphQLResponsesByWorkspaceID :many +SELECT + gr.id, gr.graphql_id, gr.status, gr.body, gr.time, + gr.duration, gr.size, gr.created_at +FROM graphql_response gr +INNER JOIN graphql g ON gr.graphql_id = g.id +WHERE g.workspace_id = ? +ORDER BY gr.time DESC; + +-- name: CreateGraphQLResponse :exec +INSERT INTO graphql_response ( + id, graphql_id, status, body, time, duration, size, created_at +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?); + +-- name: DeleteGraphQLResponse :exec +DELETE FROM graphql_response WHERE id = ?; + +-- +-- GraphQL Response Header Queries +-- + +-- name: GetGraphQLResponseHeadersByResponseID :many +SELECT + id, response_id, key, value, created_at +FROM graphql_response_header +WHERE response_id = ? +ORDER BY key; + +-- name: GetGraphQLResponseHeadersByWorkspaceID :many +SELECT + grh.id, grh.response_id, grh.key, grh.value, grh.created_at +FROM graphql_response_header grh +INNER JOIN graphql_response gr ON grh.response_id = gr.id +INNER JOIN graphql g ON gr.graphql_id = g.id +WHERE g.workspace_id = ? +ORDER BY gr.time DESC, grh.key; + +-- name: CreateGraphQLResponseHeader :exec +INSERT INTO graphql_response_header ( + id, response_id, key, value, created_at +) +VALUES (?, ?, ?, ?, ?); + +-- name: CreateGraphQLResponseHeaderBulk :exec +INSERT INTO graphql_response_header ( + id, response_id, key, value, created_at +) +VALUES + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?); + +-- name: DeleteGraphQLResponseHeader :exec +DELETE FROM graphql_response_header WHERE id = ?; + +-- +-- GraphQL Delta Queries +-- + +-- name: GetGraphQLDeltasByWorkspaceID :many +SELECT + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +FROM graphql +WHERE workspace_id = ? AND is_delta = TRUE +ORDER BY updated_at DESC; + +-- name: GetGraphQLDeltasByParentID :many +SELECT + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +FROM graphql +WHERE parent_graphql_id = ? AND is_delta = TRUE +ORDER BY updated_at DESC; + +-- +-- GraphQL Header Delta Queries +-- + +-- name: GetGraphQLHeaderDeltasByWorkspaceID :many +SELECT + h.id, h.graphql_id, h.header_key, h.header_value, h.description, + h.enabled, h.display_order, h.created_at, h.updated_at, + h.parent_graphql_header_id, h.is_delta, + h.delta_header_key, h.delta_header_value, h.delta_description, h.delta_enabled, h.delta_display_order +FROM graphql_header h +JOIN graphql g ON h.graphql_id = g.id +WHERE g.workspace_id = ? AND h.is_delta = TRUE +ORDER BY h.updated_at DESC; + +-- name: GetGraphQLHeaderDeltasByParentID :many +SELECT + id, graphql_id, header_key, header_value, description, + enabled, display_order, created_at, updated_at, + parent_graphql_header_id, is_delta, + delta_header_key, delta_header_value, delta_description, delta_enabled, delta_display_order +FROM graphql_header +WHERE parent_graphql_header_id = ? AND is_delta = TRUE +ORDER BY display_order; + +-- +-- GraphQL Assert Queries +-- + +-- name: GetGraphQLAssert :one +SELECT + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +FROM graphql_assert +WHERE id = ? +LIMIT 1; + +-- name: GetGraphQLAssertsByGraphQLID :many +SELECT + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +FROM graphql_assert +WHERE graphql_id = ? +ORDER BY display_order; + +-- name: GetGraphQLAssertsByIDs :many +SELECT + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +FROM graphql_assert +WHERE id IN (sqlc.slice('ids')); + +-- name: CreateGraphQLAssert :exec +INSERT INTO graphql_assert ( + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); + +-- name: UpdateGraphQLAssert :exec +UPDATE graphql_assert +SET + value = ?, + enabled = ?, + description = ?, + display_order = ?, + updated_at = ? +WHERE id = ?; + +-- name: UpdateGraphQLAssertDelta :exec +UPDATE graphql_assert +SET + delta_value = ?, + delta_enabled = ?, + delta_description = ?, + delta_display_order = ?, + updated_at = ? +WHERE id = ?; + +-- name: DeleteGraphQLAssert :exec +DELETE FROM graphql_assert +WHERE id = ?; + +-- name: GetGraphQLAssertDeltasByWorkspaceID :many +SELECT + ga.id, + ga.graphql_id, + ga.value, + ga.enabled, + ga.description, + ga.display_order, + ga.parent_graphql_assert_id, + ga.is_delta, + ga.delta_value, + ga.delta_enabled, + ga.delta_description, + ga.delta_display_order, + ga.created_at, + ga.updated_at +FROM graphql_assert ga +INNER JOIN graphql g ON ga.graphql_id = g.id +WHERE g.workspace_id = ? AND ga.is_delta = TRUE +ORDER BY ga.display_order; + +-- name: GetGraphQLAssertDeltasByParentID :many +SELECT + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +FROM graphql_assert +WHERE parent_graphql_assert_id = ? AND is_delta = TRUE +ORDER BY display_order; + +-- +-- GraphQL Version Queries +-- + +-- name: CreateGraphQLVersion :exec +INSERT INTO graphql_version ( + id, graphql_id, version_name, version_description, is_active, created_at, created_by +) +VALUES (?, ?, ?, ?, ?, ?, ?); + +-- name: GetGraphQLVersionsByGraphQLID :many +SELECT id, graphql_id, version_name, version_description, is_active, created_at, created_by +FROM graphql_version +WHERE graphql_id = ? +ORDER BY created_at DESC; + +-- +-- GraphQL Response Assert Queries +-- + +-- name: CreateGraphQLResponseAssert :exec +INSERT INTO graphql_response_assert ( + id, response_id, value, success, created_at +) +VALUES (?, ?, ?, ?, ?); + +-- name: GetGraphQLResponseAssertsByResponseID :many +SELECT id, response_id, value, success, created_at +FROM graphql_response_assert +WHERE response_id = ? +ORDER BY created_at; + +-- name: GetGraphQLResponseAssertsByWorkspaceID :many +SELECT + gra.id, + gra.response_id, + gra.value, + gra.success, + gra.created_at +FROM graphql_response_assert gra +INNER JOIN graphql_response gr ON gra.response_id = gr.id +INNER JOIN graphql g ON gr.graphql_id = g.id +WHERE g.workspace_id = ?; diff --git a/packages/db/pkg/sqlc/schema/03_files.sql b/packages/db/pkg/sqlc/schema/03_files.sql index 532473d45..21f4f8d75 100644 --- a/packages/db/pkg/sqlc/schema/03_files.sql +++ b/packages/db/pkg/sqlc/schema/03_files.sql @@ -16,13 +16,14 @@ CREATE TABLE files ( path_hash TEXT, updated_at BIGINT NOT NULL DEFAULT (unixepoch()), CHECK (length (id) == 16), - CHECK (content_kind IN (0, 1, 2, 3, 4)), -- 0 = folder, 1 = http, 2 = flow, 3 = http_delta, 4 = credential + CHECK (content_kind IN (0, 1, 2, 3, 4, 5)), -- 0 = folder, 1 = http, 2 = http_delta, 3 = flow, 4 = credential, 5 = graphql CHECK ( (content_kind = 0 AND content_id IS NOT NULL) OR (content_kind = 1 AND content_id IS NOT NULL) OR (content_kind = 2 AND content_id IS NOT NULL) OR (content_kind = 3 AND content_id IS NOT NULL) OR (content_kind = 4 AND content_id IS NOT NULL) OR + (content_kind = 5 AND content_id IS NOT NULL) OR (content_id IS NULL) ), FOREIGN KEY (workspace_id) REFERENCES workspaces (id) ON DELETE CASCADE, diff --git a/packages/db/pkg/sqlc/schema/05_flow.sql b/packages/db/pkg/sqlc/schema/05_flow.sql index a532f1f74..511d6ba55 100644 --- a/packages/db/pkg/sqlc/schema/05_flow.sql +++ b/packages/db/pkg/sqlc/schema/05_flow.sql @@ -83,6 +83,14 @@ CREATE TABLE flow_node_http ( ); +CREATE TABLE flow_node_graphql ( + flow_node_id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + delta_graphql_id BLOB, + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE, + FOREIGN KEY (delta_graphql_id) REFERENCES graphql (id) ON DELETE SET NULL +); + CREATE TABLE flow_node_condition ( flow_node_id BLOB NOT NULL PRIMARY KEY, expression TEXT NOT NULL @@ -122,8 +130,10 @@ CREATE TABLE node_execution ( output_data_compress_type INT8 NOT NULL DEFAULT 0, -- Add new fields http_response_id BLOB, -- Response ID for HTTP request nodes (NULL for non-request nodes) + graphql_response_id BLOB, -- Response ID for GraphQL request nodes completed_at BIGINT, -- Unix timestamp in milliseconds - FOREIGN KEY (http_response_id) REFERENCES http_response (id) ON DELETE SET NULL + FOREIGN KEY (http_response_id) REFERENCES http_response (id) ON DELETE SET NULL, + FOREIGN KEY (graphql_response_id) REFERENCES graphql_response (id) ON DELETE SET NULL ); CREATE INDEX node_execution_idx1 ON node_execution (node_id); diff --git a/packages/db/pkg/sqlc/schema/08_graphql.sql b/packages/db/pkg/sqlc/schema/08_graphql.sql new file mode 100644 index 000000000..cad1f8ce9 --- /dev/null +++ b/packages/db/pkg/sqlc/schema/08_graphql.sql @@ -0,0 +1,130 @@ +/* + * + * GRAPHQL SYSTEM + * GraphQL request support - simpler than HTTP (no delta system) + * + */ + +-- Core GraphQL request table +CREATE TABLE graphql ( + id BLOB NOT NULL PRIMARY KEY, + workspace_id BLOB NOT NULL, + folder_id BLOB, + name TEXT NOT NULL, + url TEXT NOT NULL, + query TEXT NOT NULL DEFAULT '', + variables TEXT NOT NULL DEFAULT '', + description TEXT NOT NULL DEFAULT '', + last_run_at BIGINT NULL, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + updated_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (workspace_id) REFERENCES workspaces (id) ON DELETE CASCADE, + FOREIGN KEY (folder_id) REFERENCES files (id) ON DELETE SET NULL +); + +CREATE INDEX graphql_workspace_idx ON graphql (workspace_id); +CREATE INDEX graphql_folder_idx ON graphql (folder_id) WHERE folder_id IS NOT NULL; + +-- GraphQL versions (snapshots of requests at a point in time) +CREATE TABLE graphql_version ( + id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + version_name TEXT NOT NULL, + version_description TEXT NOT NULL DEFAULT '', + is_active BOOLEAN NOT NULL DEFAULT FALSE, + + -- Metadata + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + created_by BLOB, -- User ID who created this version + + -- Foreign keys + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE, + FOREIGN KEY (created_by) REFERENCES users (id) ON DELETE SET NULL, + + -- Constraints + CHECK (version_name != '') +); + +CREATE INDEX graphql_version_graphql_idx ON graphql_version (graphql_id); +CREATE INDEX graphql_version_active_idx ON graphql_version (is_active) WHERE is_active = TRUE; +CREATE INDEX graphql_version_created_by_idx ON graphql_version (created_by); + +-- GraphQL request headers +CREATE TABLE graphql_header ( + id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + header_key TEXT NOT NULL, + header_value TEXT NOT NULL, + description TEXT NOT NULL DEFAULT '', + enabled BOOLEAN NOT NULL DEFAULT TRUE, + display_order REAL NOT NULL DEFAULT 0, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + updated_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE +); + +CREATE INDEX graphql_header_graphql_idx ON graphql_header (graphql_id); +CREATE INDEX graphql_header_order_idx ON graphql_header (graphql_id, display_order); + +-- GraphQL request assertions +CREATE TABLE graphql_assert ( + id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + value TEXT NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT TRUE, + description TEXT NOT NULL DEFAULT '', + display_order REAL NOT NULL DEFAULT 0, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + updated_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE +); + +CREATE INDEX graphql_assert_graphql_idx ON graphql_assert (graphql_id); +CREATE INDEX graphql_assert_order_idx ON graphql_assert (graphql_id, display_order); + +-- GraphQL response (read-only) +CREATE TABLE graphql_response ( + id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + status INT32 NOT NULL, + body BLOB, + time DATETIME NOT NULL, + duration INT32 NOT NULL, + size INT32 NOT NULL, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE +); + +CREATE INDEX graphql_response_graphql_idx ON graphql_response (graphql_id); +CREATE INDEX graphql_response_time_idx ON graphql_response (graphql_id, time DESC); + +-- GraphQL response headers (read-only) +CREATE TABLE graphql_response_header ( + id BLOB NOT NULL PRIMARY KEY, + response_id BLOB NOT NULL, + key TEXT NOT NULL, + value TEXT NOT NULL, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (response_id) REFERENCES graphql_response (id) ON DELETE CASCADE +); + +CREATE INDEX graphql_response_header_response_idx ON graphql_response_header (response_id); + +-- GraphQL response assertions (read-only) +CREATE TABLE graphql_response_assert ( + id BLOB NOT NULL PRIMARY KEY, + response_id BLOB NOT NULL, + value TEXT NOT NULL, + success BOOLEAN NOT NULL, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (response_id) REFERENCES graphql_response (id) ON DELETE CASCADE +); + +CREATE INDEX graphql_response_assert_response_idx ON graphql_response_assert (response_id); +CREATE INDEX graphql_response_assert_success_idx ON graphql_response_assert (response_id, success); diff --git a/packages/db/pkg/sqlc/schema/09_graphql_delta.sql b/packages/db/pkg/sqlc/schema/09_graphql_delta.sql new file mode 100644 index 000000000..e9899657f --- /dev/null +++ b/packages/db/pkg/sqlc/schema/09_graphql_delta.sql @@ -0,0 +1,56 @@ +/* + * + * GRAPHQL DELTA SYSTEM + * Adds delta/variant support to GraphQL tables for flow node overrides + * + */ + +-- Add delta system fields to graphql table +ALTER TABLE graphql ADD COLUMN parent_graphql_id BLOB DEFAULT NULL; +ALTER TABLE graphql ADD COLUMN is_delta BOOLEAN NOT NULL DEFAULT FALSE; +ALTER TABLE graphql ADD COLUMN is_snapshot BOOLEAN NOT NULL DEFAULT FALSE; + +-- Add delta override fields to graphql table +ALTER TABLE graphql ADD COLUMN delta_name TEXT NULL; +ALTER TABLE graphql ADD COLUMN delta_url TEXT NULL; +ALTER TABLE graphql ADD COLUMN delta_query TEXT NULL; +ALTER TABLE graphql ADD COLUMN delta_variables TEXT NULL; +ALTER TABLE graphql ADD COLUMN delta_description TEXT NULL; + +-- Add foreign key for parent relationship (SQLite requires recreating the table) +-- Since we can't add FK constraints to existing tables in SQLite, we'll handle this +-- at the application level for now and add it in the next major migration + +-- Add indexes for delta resolution and performance +CREATE INDEX graphql_parent_delta_idx ON graphql (parent_graphql_id, is_delta); +CREATE INDEX graphql_delta_resolution_idx ON graphql (parent_graphql_id, is_delta, updated_at DESC); +CREATE INDEX graphql_active_streaming_idx ON graphql (workspace_id, updated_at DESC) WHERE is_delta = FALSE; + +-- Add delta system fields to graphql_header table +ALTER TABLE graphql_header ADD COLUMN parent_graphql_header_id BLOB DEFAULT NULL; +ALTER TABLE graphql_header ADD COLUMN is_delta BOOLEAN NOT NULL DEFAULT FALSE; + +-- Add delta override fields to graphql_header table +ALTER TABLE graphql_header ADD COLUMN delta_header_key TEXT NULL; +ALTER TABLE graphql_header ADD COLUMN delta_header_value TEXT NULL; +ALTER TABLE graphql_header ADD COLUMN delta_description TEXT NULL; +ALTER TABLE graphql_header ADD COLUMN delta_enabled BOOLEAN NULL; +ALTER TABLE graphql_header ADD COLUMN delta_display_order REAL NULL; + +-- Add indexes for graphql_header delta support +CREATE INDEX graphql_header_parent_delta_idx ON graphql_header (parent_graphql_header_id, is_delta); +CREATE INDEX graphql_header_delta_streaming_idx ON graphql_header (parent_graphql_header_id, is_delta, updated_at DESC); + +-- Add delta system fields to graphql_assert table +ALTER TABLE graphql_assert ADD COLUMN parent_graphql_assert_id BLOB DEFAULT NULL; +ALTER TABLE graphql_assert ADD COLUMN is_delta BOOLEAN NOT NULL DEFAULT FALSE; + +-- Add delta override fields to graphql_assert table +ALTER TABLE graphql_assert ADD COLUMN delta_value TEXT NULL; +ALTER TABLE graphql_assert ADD COLUMN delta_enabled BOOLEAN NULL; +ALTER TABLE graphql_assert ADD COLUMN delta_description TEXT NULL; +ALTER TABLE graphql_assert ADD COLUMN delta_display_order REAL NULL; + +-- Add indexes for graphql_assert delta support +CREATE INDEX graphql_assert_parent_delta_idx ON graphql_assert (parent_graphql_assert_id, is_delta); +CREATE INDEX graphql_assert_delta_streaming_idx ON graphql_assert (parent_graphql_assert_id, is_delta, updated_at DESC); diff --git a/packages/db/pkg/sqlc/sqlc.yaml b/packages/db/pkg/sqlc/sqlc.yaml index dddf43631..6874ef5e3 100644 --- a/packages/db/pkg/sqlc/sqlc.yaml +++ b/packages/db/pkg/sqlc/sqlc.yaml @@ -274,6 +274,19 @@ sql: import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' package: 'idwrap' type: 'IDWrap' + ## flow_node_graphql + ### flow_node_id + - column: 'flow_node_graphql.flow_node_id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + ### graphql_id + - column: 'flow_node_graphql.graphql_id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' ## flow_node_condition ### flow_node_id - column: 'flow_node_condition.flow_node_id' @@ -368,6 +381,13 @@ sql: package: 'idwrap' type: 'IDWrap' pointer: true + ### graphql_response_id + - column: 'node_execution.graphql_response_id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + pointer: true ## files ### id - column: 'files.id' @@ -779,3 +799,62 @@ sql: import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' package: 'idwrap' type: 'IDWrap' + ## GraphQL system + ### graphql table + - column: 'graphql.id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + - column: 'graphql.workspace_id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + - column: 'graphql.folder_id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + pointer: true + - column: 'graphql.created_at' + go_type: 'int64' + - column: 'graphql.updated_at' + go_type: 'int64' + ### graphql_header table + - column: 'graphql_header.id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + - column: 'graphql_header.graphql_id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + - column: 'graphql_header.created_at' + go_type: 'int64' + - column: 'graphql_header.updated_at' + go_type: 'int64' + ### graphql_response table + - column: 'graphql_response.id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + - column: 'graphql_response.graphql_id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + ### graphql_response_header table + - column: 'graphql_response_header.id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + - column: 'graphql_response_header.response_id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' diff --git a/packages/server/cmd/server/server.go b/packages/server/cmd/server/server.go index e925223f2..7f883a483 100644 --- a/packages/server/cmd/server/server.go +++ b/packages/server/cmd/server/server.go @@ -30,6 +30,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rexportv2" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rfile" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rflowv2" + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rgraphql" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rhealth" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rhttp" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rimportv2" @@ -40,6 +41,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/credvault" "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream" "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream/memory" + gqlresolver "github.com/the-dev-tools/dev-tools/packages/server/pkg/graphql/resolver" "github.com/the-dev-tools/dev-tools/packages/server/pkg/http/resolver" "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" @@ -49,6 +51,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sfile" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/suser" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" @@ -56,6 +59,7 @@ import ( envapiv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/environment/v1" filesystemv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/file_system/v1" flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" httpv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/http/v1" "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/node_js_executor/v1/node_js_executorv1connect" apiv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/workspace/v1" @@ -163,6 +167,13 @@ func run() error { httpResponseService := shttp.NewHttpResponseService(queries) httpResponseReader := shttp.NewHttpResponseReader(currentDB) + // GraphQL + graphqlService := sgraphql.New(queries, logger) + graphqlReader := graphqlService.Reader() + graphqlHeaderService := sgraphql.NewGraphQLHeaderService(queries) + graphqlAssertService := sgraphql.NewGraphQLAssertService(queries) + graphqlResponseService := sgraphql.NewGraphQLResponseService(queries) + // File Service fileService := sfile.New(queries, logger) @@ -195,6 +206,7 @@ func run() error { flowNodeAIService := sflow.NewNodeAIService(queries) flowNodeAiProviderService := sflow.NewNodeAiProviderService(queries) flowNodeMemoryService := sflow.NewNodeMemoryService(queries) + flowNodeGraphQLService := sflow.NewNodeGraphQLService(queries) nodeExecutionService := sflow.NewNodeExecutionService(queries) nodeExecutionReader := sflow.NewNodeExecutionReader(currentDB) @@ -307,6 +319,13 @@ func run() error { httpAssertService, ) + // Create GraphQL resolver for GraphQL delta resolution (shared with flow service) + graphqlResolver := gqlresolver.NewStandardResolver( + graphqlReader, + &graphqlHeaderService, + &graphqlAssertService, + ) + httpSrv := rhttp.New(rhttp.HttpServiceRPCDeps{ DB: currentDB, Readers: rhttp.HttpServiceRPCReaders{ @@ -454,15 +473,19 @@ func run() error { NodeJs: &flowNodeNodeJsService, NodeAI: &flowNodeAIService, NodeAiProvider: &flowNodeAiProviderService, - NodeMemory: &flowNodeMemoryService, - NodeExecution: &nodeExecutionService, + NodeMemory: &flowNodeMemoryService, + NodeGraphQL: &flowNodeGraphQLService, + NodeExecution: &nodeExecutionService, FlowVariable: &flowVariableService, Env: &environmentService, Var: &variableService, Http: &httpService, HttpBodyRaw: httpBodyRawService, - HttpResponse: httpResponseService, - File: fileService, + HttpResponse: httpResponseService, + GraphQLResponse: graphqlResponseService, + GraphQL: &graphqlService, + GraphQLHeader: &graphqlHeaderService, + File: fileService, Importer: workspaceImporter, Credential: credentialService, }, @@ -479,6 +502,7 @@ func run() error { Ai: streamers.Ai, AiProvider: streamers.AiProvider, Memory: streamers.Memory, + NodeGraphQL: streamers.NodeGraphQL, Execution: streamers.Execution, HttpResponse: streamers.HttpResponse, HttpResponseHeader: streamers.HttpResponseHeader, @@ -486,9 +510,10 @@ func run() error { Log: streamers.Log, File: streamers.File, }, - Resolver: requestResolver, - Logger: logger, - JsClient: jsClient, + Resolver: requestResolver, + GraphQLResolver: graphqlResolver, + Logger: logger, + JsClient: jsClient, }) newServiceManager.AddService(rflowv2.CreateService(flowSrvV2, optionsAll)) @@ -540,21 +565,58 @@ func run() error { }) newServiceManager.AddService(rcredential.CreateService(credentialSrv, optionsAll)) + // GraphQL Service + graphqlStreamers := &rgraphql.GraphQLStreamers{ + GraphQL: streamers.GraphQL, + GraphQLHeader: streamers.GraphQLHeader, + GraphQLAssert: streamers.GraphQLAssert, + GraphQLResponse: streamers.GraphQLResponse, + GraphQLResponseHeader: streamers.GraphQLResponseHeader, + GraphQLResponseAssert: streamers.GraphQLResponseAssert, + GraphQLVersion: streamers.GraphQLVersion, + File: streamers.File, + } + + graphqlSrv := rgraphql.New(rgraphql.GraphQLServiceRPCDeps{ + DB: currentDB, + Services: rgraphql.GraphQLServiceRPCServices{ + GraphQL: graphqlService, + Header: graphqlHeaderService, + GraphQLAssert: graphqlAssertService, + Response: graphqlResponseService, + User: userService, + Workspace: workspaceService, + WorkspaceUser: workspaceUserService, + Env: environmentService, + Variable: variableService, + File: fileService, + }, + Readers: rgraphql.GraphQLServiceRPCReaders{ + GraphQL: graphqlReader, + User: userReader, + Workspace: workspaceReader, + }, + Resolver: graphqlResolver, + Streamers: graphqlStreamers, + }) + newServiceManager.AddService(rgraphql.CreateService(graphqlSrv, optionsAll)) + // Reference Service refServiceRPC := rreference.NewReferenceServiceRPC(rreference.ReferenceServiceRPCDeps{ DB: currentDB, Readers: rreference.ReferenceServiceRPCReaders{ - User: userReader, - Workspace: workspaceReader, - Env: envReader, - Variable: varReader, - Flow: flowReader, - Node: nodeReader, - NodeRequest: flowNodeRequestReader, - FlowVariable: flowVariableReader, - FlowEdge: flowEdgeReader, - NodeExecution: nodeExecutionReader, - HttpResponse: httpResponseReader, + User: userReader, + Workspace: workspaceReader, + Env: envReader, + Variable: varReader, + Flow: flowReader, + Node: nodeReader, + NodeRequest: flowNodeRequestReader, + FlowVariable: flowVariableReader, + FlowEdge: flowEdgeReader, + NodeExecution: nodeExecutionReader, + HttpResponse: httpResponseReader, + GraphQLResponse: &graphqlResponseService, }, }) newServiceManager.AddService(rreference.CreateService(refServiceRPC, optionsAll)) @@ -701,12 +763,20 @@ type Streamers struct { Ai eventstream.SyncStreamer[rflowv2.AiTopic, rflowv2.AiEvent] AiProvider eventstream.SyncStreamer[rflowv2.AiProviderTopic, rflowv2.AiProviderEvent] Memory eventstream.SyncStreamer[rflowv2.MemoryTopic, rflowv2.MemoryEvent] + NodeGraphQL eventstream.SyncStreamer[rflowv2.NodeGraphQLTopic, rflowv2.NodeGraphQLEvent] Execution eventstream.SyncStreamer[rflowv2.ExecutionTopic, rflowv2.ExecutionEvent] File eventstream.SyncStreamer[rfile.FileTopic, rfile.FileEvent] Credential eventstream.SyncStreamer[rcredential.CredentialTopic, rcredential.CredentialEvent] CredentialOpenAi eventstream.SyncStreamer[rcredential.CredentialOpenAiTopic, rcredential.CredentialOpenAiEvent] CredentialGemini eventstream.SyncStreamer[rcredential.CredentialGeminiTopic, rcredential.CredentialGeminiEvent] CredentialAnthropic eventstream.SyncStreamer[rcredential.CredentialAnthropicTopic, rcredential.CredentialAnthropicEvent] + GraphQL eventstream.SyncStreamer[rgraphql.GraphQLTopic, rgraphql.GraphQLEvent] + GraphQLHeader eventstream.SyncStreamer[rgraphql.GraphQLHeaderTopic, rgraphql.GraphQLHeaderEvent] + GraphQLAssert eventstream.SyncStreamer[rgraphql.GraphQLAssertTopic, rgraphql.GraphQLAssertEvent] + GraphQLResponse eventstream.SyncStreamer[rgraphql.GraphQLResponseTopic, rgraphql.GraphQLResponseEvent] + GraphQLResponseHeader eventstream.SyncStreamer[rgraphql.GraphQLResponseHeaderTopic, rgraphql.GraphQLResponseHeaderEvent] + GraphQLResponseAssert eventstream.SyncStreamer[rgraphql.GraphQLResponseAssertTopic, rgraphql.GraphQLResponseAssertEvent] + GraphQLVersion eventstream.SyncStreamer[rgraphql.GraphQLVersionTopic, rgraphql.GraphQLVersionEvent] } func NewStreamers() *Streamers { @@ -738,12 +808,20 @@ func NewStreamers() *Streamers { Ai: memory.NewInMemorySyncStreamer[rflowv2.AiTopic, rflowv2.AiEvent](), AiProvider: memory.NewInMemorySyncStreamer[rflowv2.AiProviderTopic, rflowv2.AiProviderEvent](), Memory: memory.NewInMemorySyncStreamer[rflowv2.MemoryTopic, rflowv2.MemoryEvent](), + NodeGraphQL: memory.NewInMemorySyncStreamer[rflowv2.NodeGraphQLTopic, rflowv2.NodeGraphQLEvent](), Execution: memory.NewInMemorySyncStreamer[rflowv2.ExecutionTopic, rflowv2.ExecutionEvent](), File: memory.NewInMemorySyncStreamer[rfile.FileTopic, rfile.FileEvent](), Credential: memory.NewInMemorySyncStreamer[rcredential.CredentialTopic, rcredential.CredentialEvent](), CredentialOpenAi: memory.NewInMemorySyncStreamer[rcredential.CredentialOpenAiTopic, rcredential.CredentialOpenAiEvent](), CredentialGemini: memory.NewInMemorySyncStreamer[rcredential.CredentialGeminiTopic, rcredential.CredentialGeminiEvent](), - CredentialAnthropic: memory.NewInMemorySyncStreamer[rcredential.CredentialAnthropicTopic, rcredential.CredentialAnthropicEvent](), + CredentialAnthropic: memory.NewInMemorySyncStreamer[rcredential.CredentialAnthropicTopic, rcredential.CredentialAnthropicEvent](), + GraphQL: memory.NewInMemorySyncStreamer[rgraphql.GraphQLTopic, rgraphql.GraphQLEvent](), + GraphQLHeader: memory.NewInMemorySyncStreamer[rgraphql.GraphQLHeaderTopic, rgraphql.GraphQLHeaderEvent](), + GraphQLAssert: memory.NewInMemorySyncStreamer[rgraphql.GraphQLAssertTopic, rgraphql.GraphQLAssertEvent](), + GraphQLResponse: memory.NewInMemorySyncStreamer[rgraphql.GraphQLResponseTopic, rgraphql.GraphQLResponseEvent](), + GraphQLResponseHeader: memory.NewInMemorySyncStreamer[rgraphql.GraphQLResponseHeaderTopic, rgraphql.GraphQLResponseHeaderEvent](), + GraphQLResponseAssert: memory.NewInMemorySyncStreamer[rgraphql.GraphQLResponseAssertTopic, rgraphql.GraphQLResponseAssertEvent](), + GraphQLVersion: memory.NewInMemorySyncStreamer[rgraphql.GraphQLVersionTopic, rgraphql.GraphQLVersionEvent](), } } @@ -775,12 +853,17 @@ func (s *Streamers) Shutdown() { s.Ai.Shutdown() s.AiProvider.Shutdown() s.Memory.Shutdown() + s.NodeGraphQL.Shutdown() s.Execution.Shutdown() s.File.Shutdown() s.Credential.Shutdown() s.CredentialOpenAi.Shutdown() s.CredentialGemini.Shutdown() s.CredentialAnthropic.Shutdown() + s.GraphQL.Shutdown() + s.GraphQLHeader.Shutdown() + s.GraphQLResponse.Shutdown() + s.GraphQLResponseHeader.Shutdown() } // registerCascadeHandlers registers all handlers needed for cascade deletion events. @@ -1001,4 +1084,30 @@ func registerCascadeHandlers(registry *streamregistry.Registry, httpStreamers *r }) }) } + + // GraphQL entity + if streamers.GraphQL != nil { + registry.Register(mutation.EntityGraphQL, func(evt mutation.Event) { + if evt.Op != mutation.OpDelete { + return + } + streamers.GraphQL.Publish(rgraphql.GraphQLTopic{WorkspaceID: evt.WorkspaceID}, rgraphql.GraphQLEvent{ + Type: "delete", + GraphQL: &graphqlv1.GraphQL{GraphqlId: evt.ID.Bytes()}, + }) + }) + } + + // GraphQL Header entity + if streamers.GraphQLHeader != nil { + registry.Register(mutation.EntityGraphQLHeader, func(evt mutation.Event) { + if evt.Op != mutation.OpDelete { + return + } + streamers.GraphQLHeader.Publish(rgraphql.GraphQLHeaderTopic{WorkspaceID: evt.WorkspaceID}, rgraphql.GraphQLHeaderEvent{ + Type: "delete", + GraphQLHeader: &graphqlv1.GraphQLHeader{GraphqlHeaderId: evt.ID.Bytes(), GraphqlId: evt.ParentID.Bytes()}, + }) + }) + } } diff --git a/packages/server/docs/specs/GRAPHQL.md b/packages/server/docs/specs/GRAPHQL.md new file mode 100644 index 000000000..a5129ec80 --- /dev/null +++ b/packages/server/docs/specs/GRAPHQL.md @@ -0,0 +1,357 @@ +# GraphQL Specification + +## Overview + +The GraphQL system adds first-class GraphQL request support to DevTools. It enables users to compose GraphQL queries/mutations, execute them against any GraphQL endpoint, introspect schemas for autocompletion and documentation, and view responses -- all following the same architecture patterns as the existing HTTP system. + +## Reference Implementation + +This design is informed by [Bruno](https://github.com/usebruno/bruno)'s GraphQL implementation, adapted to DevTools' TypeScript + Go stack (TypeSpec, Connect RPC, TanStack React DB, CodeMirror 6). + +### What Bruno Does + +- **Query Editor**: CodeMirror with `codemirror-graphql` for syntax highlighting, schema-aware autocompletion, real-time validation, and query formatting via Prettier +- **Variables Editor**: JSON editor for GraphQL variables with prettify support +- **Schema Introspection**: Fetches schema via standard introspection query (`getIntrospectionQuery()` from `graphql` lib), caches result, builds `GraphQLSchema` object via `buildClientSchema()` +- **Documentation Explorer**: Custom component that navigates the `GraphQLSchema` type map with breadcrumb navigation, search, and clickable type references +- **Request Execution**: HTTP POST with `Content-Type: application/json`, body `{ "query": "...", "variables": {...} }` +- **Tabbed UI**: Query (default), Variables, Headers, Auth, Docs tabs + +### What We Include + +- Query editor with schema-aware autocompletion and validation (via `cm6-graphql` for CodeMirror 6) +- Variables editor (JSON) +- Headers (key-value table for manual auth and custom headers) +- Schema introspection and caching in SQLite +- Documentation explorer +- Request execution and response display + +### What We Exclude (For Now) + +- **Scripts/hooks**: Pre/post-request scripts (not needed) +- **Variable extraction**: Already handled automatically by DevTools +- **Auth UI**: Users set auth manually via headers; dedicated auth UI added later +- **Delta system**: Not needed initially; can be added later + +--- + +## Core Concepts + +### 1. Request Definition + +A GraphQL request defines what to send to a GraphQL endpoint. + +- **URL**: The GraphQL endpoint (e.g., `https://api.example.com/graphql`) +- **Query**: The GraphQL query/mutation string +- **Variables**: JSON string of variables to pass with the query +- **Headers**: Key-value pairs with enable/disable toggle (used for auth tokens, custom headers) + +Unlike HTTP requests, GraphQL is always: +- Method: **POST** +- Content-Type: **application/json** +- Body: `{ "query": "...", "variables": {...} }` + +### 2. Schema Introspection + +GraphQL's self-documenting nature is a key feature: + +1. User clicks "Fetch Schema" in the UI +2. Backend sends the standard introspection query to the endpoint (with user's headers for auth) +3. Backend returns the raw introspection JSON +4. Frontend builds a `GraphQLSchema` object via `buildClientSchema()` from the `graphql` JS library +5. Schema enables: autocompletion in the query editor, validation/linting, and the documentation explorer + +Schema introspection results are stored in SQLite (not localStorage like Bruno) for persistence and consistency. + +### 3. Execution & Response + +When a GraphQL request is "Run": + +1. **Interpolation**: Variables (`{{ varName }}`) are substituted into URL, query, variables, and header values +2. **Construction**: Build JSON body `{ "query": "...", "variables": {...} }` +3. **Transmission**: HTTP POST via the existing Go HTTP client (`httpclient` package) +4. **Response**: Status, headers, body (JSON), timing, and size are captured +5. **Persistence**: Response stored in `graphql_response` table, linked to the GraphQL request + +--- + +## Architecture + +### Design Decision: Separate Entity Type + +GraphQL is a **new entity type** rather than an extension of HTTP because: + +1. HTTP's `BodyKind` enum (`FormData`/`UrlEncoded`/`Raw`) doesn't conceptually fit GraphQL's `query + variables` model +2. GraphQL requires schema storage -- an entirely new concern that doesn't belong on HTTP +3. Execution is fundamentally simpler (always POST, always JSON, fixed body structure) +4. Follows the existing pattern where each protocol is its own entity + +GraphQL does **not** use the delta system initially to keep scope manageable. + +### File System Integration + +A new `GraphQL` value is added to the `FileKind` enum in `file-system.tsp`, allowing GraphQL requests to appear in the workspace sidebar tree alongside HTTP requests and flows. + +--- + +## Backend + +### API Layer (`packages/server/internal/api/rgraphql`) + +- **Role**: Entry point for Connect RPC +- **Responsibilities**: + - Validates incoming Protobuf messages + - Orchestrates transactions (Fetch-Check-Act pattern) + - Calls the Service Layer + - Publishes events to `eventstream` for real-time UI updates +- **Key RPC Operations**: + - `GraphQLRun`: Execute a GraphQL request + - `GraphQLIntrospect`: Fetch schema via introspection query + - `GraphQLDuplicate`: Clone a GraphQL request + - Standard CRUD for GraphQL entity and headers + - Streaming sync for TanStack DB real-time collections +- **Files**: `rgraphql.go` (service struct, streamers), `rgraphql_exec.go` (execution), `rgraphql_crud.go` (management), `rgraphql_sync.go` (streaming) + +### Service Layer (`packages/server/pkg/service/sgraphql`) + +- **Role**: Business logic and data access adapter +- **Pattern**: Reader (non-blocking, `*sql.DB`) + Writer (transactional, `*sql.Tx`) +- **Responsibilities**: + - Converts between Internal Models (`mgraphql`) and DB Models (`gen`) + - Executes `sqlc` queries + - Handles duplication logic (copying headers) + +### Domain Model (`packages/server/pkg/model/mgraphql`) + +Pure Go structs decoupled from DB and API: + +```go +type GraphQL struct { + ID idwrap.IDWrap + WorkspaceID idwrap.IDWrap + FolderID *idwrap.IDWrap + Name string + Url string + Query string // GraphQL query/mutation string + Variables string // JSON string of variables + Description string + LastRunAt *int64 + CreatedAt int64 + UpdatedAt int64 +} + +type GraphQLHeader struct { + ID idwrap.IDWrap + GraphQLID idwrap.IDWrap + Key string + Value string + Description string + Enabled bool + DisplayOrder float32 +} + +type GraphQLResponse struct { + ID idwrap.IDWrap + GraphQLID idwrap.IDWrap + Status int32 + Body []byte + Time int64 + Duration int32 + Size int32 +} + +type GraphQLResponseHeader struct { + ID idwrap.IDWrap + ResponseID idwrap.IDWrap + Key string + Value string +} +``` + +### GraphQL Executor (`packages/server/pkg/graphql/executor.go`) + +Analogous to `packages/server/pkg/http/request/request.go` but simpler: + +```go +func PrepareGraphQLRequest(gql mgraphql.GraphQL, headers []mgraphql.GraphQLHeader, varMap map[string]any) (*http.Request, error) +func PrepareIntrospectionRequest(url string, headers []mgraphql.GraphQLHeader, varMap map[string]any) (*http.Request, error) +``` + +Both always produce HTTP POST with `Content-Type: application/json`. The introspection variant uses the well-known introspection query string. + +--- + +## Database Schema + +### Tables + +- **`graphql`**: Core request metadata (name, url, query, variables) +- **`graphql_header`**: Request headers (key, value, enabled, order) +- **`graphql_response`**: Execution results (status, body, duration, size) +- **`graphql_response_header`**: Response headers + +No delta fields. No assertions table (can be added later). + +Schema file: `packages/db/pkg/sqlc/schema/08_graphql.sql` + +--- + +## Frontend + +### CodeMirror 6 GraphQL Integration + +- **Package**: `cm6-graphql` (official CM6 GraphQL extension from GraphiQL monorepo) +- **Features**: Syntax highlighting, schema-aware autocompletion, linting/validation +- **Location**: `packages/client/src/features/graphql-editor/index.tsx` +- **Hook**: `useGraphQLEditorExtensions(schema?: GraphQLSchema)` returns CM6 extensions + +Also adds `'graphql'` to the prettier language support in `packages/client/src/features/expression/prettier.tsx`. + +### Page Components (`packages/client/src/pages/graphql/`) + +Following the pattern of `packages/client/src/pages/http/`: + +| Component | Description | +|-----------|-------------| +| `page.tsx` | Main page with resizable request/response split panels | +| `request/top-bar.tsx` | URL input, Send button, Fetch Schema button | +| `request/panel.tsx` | Tabbed panel: Query, Variables, Headers, Docs | +| `request/query-editor.tsx` | CodeMirror with `cm6-graphql` extensions | +| `request/variables-editor.tsx` | CodeMirror with JSON language | +| `request/header.tsx` | Headers key-value table | +| `request/doc-explorer.tsx` | Schema documentation browser | +| `response/body.tsx` | Response body viewer (JSON syntax highlighting) | + +### Documentation Explorer + +Custom component (not importing GraphiQL's, which has heavy context dependencies): + +- **Navigation**: Stack-based with breadcrumbs (root -> type -> field) +- **Root view**: Lists Query, Mutation, Subscription root types +- **Type view**: Fields with types, arguments, descriptions +- **Search**: Debounced filter across type/field names +- **Type links**: Clickable references that push onto navigation stack +- **Built with**: React Aria components, Tailwind CSS, `graphql` JS library's type introspection APIs + +### Routing + +Route: `/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/` + +Added to `packages/client/src/shared/routes.tsx` and sidebar file tree handler. + +--- + +## TypeSpec Definition + +File: `packages/spec/api/graphql.tsp` + +```typespec +using DevTools; +namespace Api.GraphQL; + +@TanStackDB.collection +model GraphQL { + @primaryKey graphqlId: Id; + name: string; + url: string; + query: string; + variables: string; + lastRunAt?: Protobuf.WellKnown.Timestamp; +} + +@TanStackDB.collection +model GraphQLHeader { + @primaryKey graphqlHeaderId: Id; + @foreignKey graphqlId: Id; + key: string; + value: string; + enabled: boolean; + description: string; + order: float32; +} + +@TanStackDB.collection(#{ isReadOnly: true }) +model GraphQLResponse { + @primaryKey graphqlResponseId: Id; + @foreignKey graphqlId: Id; + status: int32; + body: string; + time: Protobuf.WellKnown.Timestamp; + duration: int32; + size: int32; +} + +@TanStackDB.collection(#{ isReadOnly: true }) +model GraphQLResponseHeader { + @primaryKey graphqlResponseHeaderId: Id; + @foreignKey graphqlResponseId: Id; + key: string; + value: string; +} + +model GraphQLRunRequest { graphqlId: Id; } +op GraphQLRun(...GraphQLRunRequest): {}; + +model GraphQLDuplicateRequest { graphqlId: Id; } +op GraphQLDuplicate(...GraphQLDuplicateRequest): {}; + +model GraphQLIntrospectRequest { graphqlId: Id; } +model GraphQLIntrospectResponse { sdl: string; introspectionJson: string; } +op GraphQLIntrospect(...GraphQLIntrospectRequest): GraphQLIntrospectResponse; +``` + +--- + +## Implementation Order + +1. TypeSpec + code generation (`graphql.tsp`, `FileKind.GraphQL`, run `spec:build`) +2. Database schema + sqlc (`08_graphql.sql`, queries, `sqlc.yaml`, run `db:generate`) +3. Go models (`mgraphql/`) +4. Go services (`sgraphql/` - reader, writer, mapper for each entity) +5. Go executor (`pkg/graphql/executor.go`) +6. Go RPC handlers (`rgraphql/` - CRUD, exec, introspect, sync) +7. Server wiring (`server.go` - streamers, services, cascade handlers) +8. Frontend packages (`cm6-graphql`, `graphql` npm deps) +9. Frontend components (pages, editor, doc explorer, routes) + +--- + +## Files Changed / Created + +### New Files + +``` +packages/spec/api/graphql.tsp +packages/db/pkg/sqlc/schema/08_graphql.sql +packages/db/pkg/sqlc/queries/graphql.sql +packages/server/pkg/model/mgraphql/mgraphql.go +packages/server/pkg/service/sgraphql/ (sgraphql.go, reader.go, writer.go, mapper.go, header*.go, response*.go) +packages/server/pkg/graphql/executor.go +packages/server/internal/api/rgraphql/ (rgraphql.go, _crud.go, _crud_header.go, _exec.go, _converter.go, _sync.go) +packages/client/src/features/graphql-editor/index.tsx +packages/client/src/pages/graphql/ (page.tsx, tab.tsx, request/*, response/*, routes/*) +``` + +### Modified Files + +``` +packages/spec/api/main.tsp (add graphql.tsp import) +packages/spec/api/file-system.tsp (add GraphQL to FileKind) +packages/db/pkg/sqlc/sqlc.yaml (add graphql column overrides) +packages/server/cmd/server/server.go (wire services, streamers, cascade) +packages/client/package.json (add cm6-graphql, graphql deps) +packages/client/src/shared/routes.tsx (add GraphQL routes) +packages/client/src/features/expression/prettier.tsx (add graphql language) +``` + +--- + +## Verification + +1. `direnv exec . pnpm nx run spec:build` succeeds +2. `direnv exec . pnpm nx run db:generate` succeeds +3. `direnv exec . pnpm nx run server:dev` starts without errors +4. `direnv exec . pnpm nx run client:dev` builds successfully +5. `direnv exec . task lint` passes +6. `direnv exec . task test` passes +7. E2E: Create GraphQL request -> enter endpoint -> write query -> fetch schema -> verify autocompletion -> send request -> verify response display -> browse docs diff --git a/packages/server/internal/api/rfile/rfile.go b/packages/server/internal/api/rfile/rfile.go index 262457828..d25c87e2e 100644 --- a/packages/server/internal/api/rfile/rfile.go +++ b/packages/server/internal/api/rfile/rfile.go @@ -140,6 +140,8 @@ func toAPIFileKind(kind mfile.ContentType) apiv1.FileKind { return apiv1.FileKind_FILE_KIND_FLOW case mfile.ContentTypeCredential: return apiv1.FileKind_FILE_KIND_CREDENTIAL + case mfile.ContentTypeGraphQL: + return apiv1.FileKind_FILE_KIND_GRAPH_Q_L default: return apiv1.FileKind_FILE_KIND_UNSPECIFIED } @@ -158,6 +160,8 @@ func fromAPIFileKind(kind apiv1.FileKind) mfile.ContentType { return mfile.ContentTypeFlow case apiv1.FileKind_FILE_KIND_CREDENTIAL: return mfile.ContentTypeCredential + case apiv1.FileKind_FILE_KIND_GRAPH_Q_L: + return mfile.ContentTypeGraphQL default: return mfile.ContentTypeUnknown } diff --git a/packages/server/internal/api/rflowv2/logging_test.go b/packages/server/internal/api/rflowv2/logging_test.go index b3c9315a3..bd3908f7e 100644 --- a/packages/server/internal/api/rflowv2/logging_test.go +++ b/packages/server/internal/api/rflowv2/logging_test.go @@ -80,6 +80,9 @@ func TestFlowRun_Logging(t *testing.T) { nil, // NodeAIService nil, // NodeAiProviderService nil, // NodeMemoryService + nil, // NodeGraphQLService + nil, // GraphQLService + nil, // GraphQLHeaderService &wsService, &varService, &flowVarService, diff --git a/packages/server/internal/api/rflowv2/rflowv2.go b/packages/server/internal/api/rflowv2/rflowv2.go index cb8015384..ccf962e73 100644 --- a/packages/server/internal/api/rflowv2/rflowv2.go +++ b/packages/server/internal/api/rflowv2/rflowv2.go @@ -12,10 +12,12 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/internal/api" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rfile" + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rgraphql" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rhttp" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rlog" "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/flowbuilder" + gqlresolver "github.com/the-dev-tools/dev-tools/packages/server/pkg/graphql/resolver" "github.com/the-dev-tools/dev-tools/packages/server/pkg/http/resolver" "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" @@ -24,6 +26,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sfile" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" @@ -181,6 +184,12 @@ type nodeJsWithFlow struct { baseNode *mflow.Node } +type nodeGraphQLWithFlow struct { + nodeGraphQL mflow.NodeGraphQL + flowID idwrap.IDWrap + baseNode *mflow.Node +} + // Shared event type strings for all entity types. // Using mutation.Operation.String() values for consistency. const ( @@ -269,14 +278,18 @@ type FlowServiceV2Services struct { NodeAI *sflow.NodeAIService NodeAiProvider *sflow.NodeAiProviderService NodeMemory *sflow.NodeMemoryService + NodeGraphQL *sflow.NodeGraphQLService NodeExecution *sflow.NodeExecutionService FlowVariable *sflow.FlowVariableService Env *senv.EnvironmentService Var *senv.VariableService Http *shttp.HTTPService HttpBodyRaw *shttp.HttpBodyRawService - HttpResponse shttp.HttpResponseService - File *sfile.FileService + HttpResponse shttp.HttpResponseService + GraphQLResponse sgraphql.GraphQLResponseService + GraphQL *sgraphql.GraphQLService + GraphQLHeader *sgraphql.GraphQLHeaderService + File *sfile.FileService Importer WorkspaceImporter Credential scredential.CredentialService } @@ -318,6 +331,9 @@ func (s *FlowServiceV2Services) Validate() error { if s.NodeMemory == nil { return fmt.Errorf("node memory service is required") } + if s.NodeGraphQL == nil { + return fmt.Errorf("node graphql service is required") + } if s.NodeExecution == nil { return fmt.Errorf("node execution service is required") } @@ -352,22 +368,27 @@ type FlowServiceV2Streamers struct { Ai eventstream.SyncStreamer[AiTopic, AiEvent] AiProvider eventstream.SyncStreamer[AiProviderTopic, AiProviderEvent] Memory eventstream.SyncStreamer[MemoryTopic, MemoryEvent] + NodeGraphQL eventstream.SyncStreamer[NodeGraphQLTopic, NodeGraphQLEvent] Execution eventstream.SyncStreamer[ExecutionTopic, ExecutionEvent] - HttpResponse eventstream.SyncStreamer[rhttp.HttpResponseTopic, rhttp.HttpResponseEvent] - HttpResponseHeader eventstream.SyncStreamer[rhttp.HttpResponseHeaderTopic, rhttp.HttpResponseHeaderEvent] - HttpResponseAssert eventstream.SyncStreamer[rhttp.HttpResponseAssertTopic, rhttp.HttpResponseAssertEvent] - Log eventstream.SyncStreamer[rlog.LogTopic, rlog.LogEvent] - File eventstream.SyncStreamer[rfile.FileTopic, rfile.FileEvent] + HttpResponse eventstream.SyncStreamer[rhttp.HttpResponseTopic, rhttp.HttpResponseEvent] + HttpResponseHeader eventstream.SyncStreamer[rhttp.HttpResponseHeaderTopic, rhttp.HttpResponseHeaderEvent] + HttpResponseAssert eventstream.SyncStreamer[rhttp.HttpResponseAssertTopic, rhttp.HttpResponseAssertEvent] + GraphQLResponse eventstream.SyncStreamer[rgraphql.GraphQLResponseTopic, rgraphql.GraphQLResponseEvent] + GraphQLResponseHeader eventstream.SyncStreamer[rgraphql.GraphQLResponseHeaderTopic, rgraphql.GraphQLResponseHeaderEvent] + GraphQLResponseAssert eventstream.SyncStreamer[rgraphql.GraphQLResponseAssertTopic, rgraphql.GraphQLResponseAssertEvent] + Log eventstream.SyncStreamer[rlog.LogTopic, rlog.LogEvent] + File eventstream.SyncStreamer[rfile.FileTopic, rfile.FileEvent] } type FlowServiceV2Deps struct { - DB *sql.DB - Readers FlowServiceV2Readers - Services FlowServiceV2Services - Streamers FlowServiceV2Streamers - Resolver resolver.RequestResolver - Logger *slog.Logger - JsClient node_js_executorv1connect.NodeJsExecutorServiceClient + DB *sql.DB + Readers FlowServiceV2Readers + Services FlowServiceV2Services + Streamers FlowServiceV2Streamers + Resolver resolver.RequestResolver + GraphQLResolver gqlresolver.GraphQLResolver + Logger *slog.Logger + JsClient node_js_executorv1connect.NodeJsExecutorServiceClient } func (d *FlowServiceV2Deps) Validate() error { @@ -383,6 +404,9 @@ func (d *FlowServiceV2Deps) Validate() error { if d.Resolver == nil { return fmt.Errorf("resolver is required") } + if d.GraphQLResolver == nil { + return fmt.Errorf("graphql resolver is required") + } if d.Logger == nil { return fmt.Errorf("logger is required") } @@ -411,6 +435,7 @@ type FlowServiceV2RPC struct { nais *sflow.NodeAIService naps *sflow.NodeAiProviderService nmems *sflow.NodeMemoryService + ngqs *sflow.NodeGraphQLService nes *sflow.NodeExecutionService fvs *sflow.FlowVariableService envs *senv.EnvironmentService @@ -422,6 +447,7 @@ type FlowServiceV2RPC struct { // V2 import services workspaceImportService WorkspaceImporter httpResponseService shttp.HttpResponseService + graphqlResponseService sgraphql.GraphQLResponseService flowStream eventstream.SyncStreamer[FlowTopic, FlowEvent] nodeStream eventstream.SyncStreamer[NodeTopic, NodeEvent] edgeStream eventstream.SyncStreamer[EdgeTopic, EdgeEvent] @@ -434,11 +460,15 @@ type FlowServiceV2RPC struct { aiStream eventstream.SyncStreamer[AiTopic, AiEvent] aiProviderStream eventstream.SyncStreamer[AiProviderTopic, AiProviderEvent] memoryStream eventstream.SyncStreamer[MemoryTopic, MemoryEvent] + nodeGraphQLStream eventstream.SyncStreamer[NodeGraphQLTopic, NodeGraphQLEvent] executionStream eventstream.SyncStreamer[ExecutionTopic, ExecutionEvent] - httpResponseStream eventstream.SyncStreamer[rhttp.HttpResponseTopic, rhttp.HttpResponseEvent] - httpResponseHeaderStream eventstream.SyncStreamer[rhttp.HttpResponseHeaderTopic, rhttp.HttpResponseHeaderEvent] - httpResponseAssertStream eventstream.SyncStreamer[rhttp.HttpResponseAssertTopic, rhttp.HttpResponseAssertEvent] - logStream eventstream.SyncStreamer[rlog.LogTopic, rlog.LogEvent] + httpResponseStream eventstream.SyncStreamer[rhttp.HttpResponseTopic, rhttp.HttpResponseEvent] + httpResponseHeaderStream eventstream.SyncStreamer[rhttp.HttpResponseHeaderTopic, rhttp.HttpResponseHeaderEvent] + httpResponseAssertStream eventstream.SyncStreamer[rhttp.HttpResponseAssertTopic, rhttp.HttpResponseAssertEvent] + graphqlResponseStream eventstream.SyncStreamer[rgraphql.GraphQLResponseTopic, rgraphql.GraphQLResponseEvent] + graphqlResponseHeaderStream eventstream.SyncStreamer[rgraphql.GraphQLResponseHeaderTopic, rgraphql.GraphQLResponseHeaderEvent] + graphqlResponseAssertStream eventstream.SyncStreamer[rgraphql.GraphQLResponseAssertTopic, rgraphql.GraphQLResponseAssertEvent] + logStream eventstream.SyncStreamer[rlog.LogTopic, rlog.LogEvent] fileService *sfile.FileService fileStream eventstream.SyncStreamer[rfile.FileTopic, rfile.FileEvent] @@ -464,9 +494,10 @@ func New(deps FlowServiceV2Deps) *FlowServiceV2RPC { builder := flowbuilder.New( deps.Services.Node, deps.Services.NodeRequest, deps.Services.NodeFor, deps.Services.NodeForEach, deps.Services.NodeIf, deps.Services.NodeJs, deps.Services.NodeAI, - deps.Services.NodeAiProvider, deps.Services.NodeMemory, + deps.Services.NodeAiProvider, deps.Services.NodeMemory, deps.Services.NodeGraphQL, + deps.Services.GraphQL, deps.Services.GraphQLHeader, deps.Services.Workspace, deps.Services.Var, deps.Services.FlowVariable, - deps.Resolver, deps.Logger, llmFactory, + deps.Resolver, deps.GraphQLResolver, deps.Logger, llmFactory, ) return &FlowServiceV2RPC{ @@ -489,6 +520,7 @@ func New(deps FlowServiceV2Deps) *FlowServiceV2RPC { nais: deps.Services.NodeAI, naps: deps.Services.NodeAiProvider, nmems: deps.Services.NodeMemory, + ngqs: deps.Services.NodeGraphQL, nes: deps.Services.NodeExecution, fvs: deps.Services.FlowVariable, envs: deps.Services.Env, @@ -499,6 +531,7 @@ func New(deps FlowServiceV2Deps) *FlowServiceV2RPC { logger: deps.Logger, workspaceImportService: deps.Services.Importer, httpResponseService: deps.Services.HttpResponse, + graphqlResponseService: deps.Services.GraphQLResponse, flowStream: deps.Streamers.Flow, nodeStream: deps.Streamers.Node, edgeStream: deps.Streamers.Edge, @@ -511,11 +544,15 @@ func New(deps FlowServiceV2Deps) *FlowServiceV2RPC { aiStream: deps.Streamers.Ai, aiProviderStream: deps.Streamers.AiProvider, memoryStream: deps.Streamers.Memory, + nodeGraphQLStream: deps.Streamers.NodeGraphQL, executionStream: deps.Streamers.Execution, - httpResponseStream: deps.Streamers.HttpResponse, - httpResponseHeaderStream: deps.Streamers.HttpResponseHeader, - httpResponseAssertStream: deps.Streamers.HttpResponseAssert, - logStream: deps.Streamers.Log, + httpResponseStream: deps.Streamers.HttpResponse, + httpResponseHeaderStream: deps.Streamers.HttpResponseHeader, + httpResponseAssertStream: deps.Streamers.HttpResponseAssert, + graphqlResponseStream: deps.Streamers.GraphQLResponse, + graphqlResponseHeaderStream: deps.Streamers.GraphQLResponseHeader, + graphqlResponseAssertStream: deps.Streamers.GraphQLResponseAssert, + logStream: deps.Streamers.Log, fileService: deps.Services.File, fileStream: deps.Streamers.File, jsClient: deps.JsClient, @@ -546,7 +583,8 @@ func (s *FlowServiceV2RPC) mutationPublisher() mutation.Publisher { jsStream: s.jsStream, aiStream: s.aiStream, aiProviderStream: s.aiProviderStream, - memoryStream: s.memoryStream, + memoryStream: s.memoryStream, + nodeGraphQLStream: s.nodeGraphQLStream, } } @@ -561,8 +599,9 @@ type rflowPublisher struct { forEachStream eventstream.SyncStreamer[ForEachTopic, ForEachEvent] jsStream eventstream.SyncStreamer[JsTopic, JsEvent] aiStream eventstream.SyncStreamer[AiTopic, AiEvent] - aiProviderStream eventstream.SyncStreamer[AiProviderTopic, AiProviderEvent] - memoryStream eventstream.SyncStreamer[MemoryTopic, MemoryEvent] + aiProviderStream eventstream.SyncStreamer[AiProviderTopic, AiProviderEvent] + memoryStream eventstream.SyncStreamer[MemoryTopic, MemoryEvent] + nodeGraphQLStream eventstream.SyncStreamer[NodeGraphQLTopic, NodeGraphQLEvent] } func (p *rflowPublisher) PublishAll(events []mutation.Event) { @@ -589,6 +628,8 @@ func (p *rflowPublisher) PublishAll(events []mutation.Event) { p.publishNodeAiProvider(evt) case mutation.EntityFlowNodeMemory: p.publishNodeMemory(evt) + case mutation.EntityFlowNodeGraphQL: + p.publishNodeGraphQL(evt) case mutation.EntityFlowEdge: p.publishEdge(evt) case mutation.EntityFlowVariable: @@ -1021,3 +1062,34 @@ func (p *rflowPublisher) publishNodeMemory(evt mutation.Event) { }) } } + +func (p *rflowPublisher) publishNodeGraphQL(evt mutation.Event) { + if p.nodeStream == nil { + return + } + + var node *flowv1.Node + var flowID idwrap.IDWrap + + switch evt.Op { + case mutation.OpInsert, mutation.OpUpdate: + if data, ok := evt.Payload.(nodeGraphQLWithFlow); ok && data.baseNode != nil { + node = serializeNode(*data.baseNode) + flowID = data.flowID + } + case mutation.OpDelete: + node = &flowv1.Node{ + NodeId: evt.ID.Bytes(), + FlowId: evt.ParentID.Bytes(), + } + flowID = evt.ParentID + } + + if node != nil { + p.nodeStream.Publish(NodeTopic{FlowID: flowID}, NodeEvent{ + Type: nodeEventUpdate, + FlowID: flowID, + Node: node, + }) + } +} diff --git a/packages/server/internal/api/rflowv2/rflowv2_common.go b/packages/server/internal/api/rflowv2/rflowv2_common.go index 4cbda58ab..73c848dfd 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_common.go +++ b/packages/server/internal/api/rflowv2/rflowv2_common.go @@ -124,6 +124,16 @@ func serializeNodeAI(n mflow.NodeAI) *flowv1.NodeAi { } } +func serializeNodeGraphQL(n mflow.NodeGraphQL) *flowv1.NodeGraphQL { + msg := &flowv1.NodeGraphQL{ + NodeId: n.FlowNodeID.Bytes(), + } + if n.GraphQLID != nil && !isZeroID(*n.GraphQLID) { + msg.GraphqlId = n.GraphQLID.Bytes() + } + return msg +} + func serializeNodeExecution(execution mflow.NodeExecution) *flowv1.NodeExecution { result := &flowv1.NodeExecution{ NodeExecutionId: execution.ID.Bytes(), @@ -182,6 +192,11 @@ func serializeNodeExecution(execution mflow.NodeExecution) *flowv1.NodeExecution result.HttpResponseId = execution.ResponseID.Bytes() } + // Handle GraphQL response ID + if execution.GraphQLResponseID != nil { + result.GraphqlResponseId = execution.GraphQLResponseID.Bytes() + } + // Handle completion timestamp if execution.CompletedAt != nil { result.CompletedAt = timestamppb.New(time.Unix(*execution.CompletedAt, 0)) diff --git a/packages/server/internal/api/rflowv2/rflowv2_exec.go b/packages/server/internal/api/rflowv2/rflowv2_exec.go index ece8e26f2..bfbe008fe 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_exec.go +++ b/packages/server/internal/api/rflowv2/rflowv2_exec.go @@ -16,6 +16,7 @@ import ( devtoolsdb "github.com/the-dev-tools/dev-tools/packages/db" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rlog" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/ngraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nrequest" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/runner" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/runner/flowlocalrunner" @@ -23,6 +24,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mcondition" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" logv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/log/v1" @@ -208,6 +210,86 @@ func (s *FlowServiceV2RPC) executeFlow( respDrain.Wait() }() + gqlRespChan := make(chan ngraphql.NodeGraphQLSideResp, len(nodes)*2+1) + gqlResponsePublished := make(map[string]chan struct{}) + var gqlResponsePublishedMu sync.Mutex + var gqlRespDrain sync.WaitGroup + gqlRespDrain.Add(1) + go func() { + defer gqlRespDrain.Done() + for resp := range gqlRespChan { + responseID := resp.Response.ID.String() + + gqlResponsePublishedMu.Lock() + publishedChan := make(chan struct{}) + gqlResponsePublished[responseID] = publishedChan + gqlResponsePublishedMu.Unlock() + + // Save all entities first, THEN publish events in batch + // This ensures atomicity and ordering - the client can query for + // child entities (headers/assertions) immediately after receiving + // the response event, preventing race conditions in real-time updates + + // Save GraphQL Response + responseSuccess := false + if err := s.graphqlResponseService.Create(ctx, resp.Response); err != nil { + s.logger.Error("failed to save graphql response", "error", err) + } else { + responseSuccess = true + } + + // Save Response Headers + var successHeaders []mgraphql.GraphQLResponseHeader + for _, h := range resp.RespHeaders { + if err := s.graphqlResponseService.CreateHeader(ctx, h); err != nil { + s.logger.Error("failed to save graphql response header", "error", err) + } else { + successHeaders = append(successHeaders, h) + } + } + + // Save Asserts + var successAsserts []mgraphql.GraphQLResponseAssert + for _, a := range resp.RespAsserts { + if err := s.graphqlResponseService.CreateAssert(ctx, a); err != nil { + s.logger.Error("failed to save graphql response assert", "error", err) + } else { + successAsserts = append(successAsserts, a) + } + } + + // Publish all events atomically AFTER all saves complete + // This guarantees the client receives events in the correct order: + // 1. Response (parent) + // 2. Headers (children) + // 3. Assertions (children) + if responseSuccess { + // Publish response first + s.publishGraphQLResponseEvent("insert", resp.Response, flow.WorkspaceID) + + // Then headers + for _, h := range successHeaders { + s.publishGraphQLResponseHeaderEvent("insert", h, flow.WorkspaceID) + } + + // Then assertions + for _, a := range successAsserts { + s.publishGraphQLResponseAssertEvent("insert", a, flow.WorkspaceID) + } + } + + close(publishedChan) + + if resp.Done != nil { + close(resp.Done) + } + } + }() + defer func() { + close(gqlRespChan) + gqlRespDrain.Wait() + }() + sharedHTTPClient := httpclient.New() edgeMap := mflow.NewEdgesMap(edges) // Build edgesBySource map for O(1) edge lookup by source node ID @@ -226,6 +308,7 @@ func (s *FlowServiceV2RPC) executeFlow( timeoutDuration, sharedHTTPClient, requestRespChan, + gqlRespChan, s.jsClient, ) if err != nil { @@ -357,11 +440,20 @@ func (s *FlowServiceV2RPC) executeFlow( } model := mflow.NodeExecution{ - ID: execID, - NodeID: status.NodeID, - Name: executionName, - State: status.State, - ResponseID: status.AuxiliaryID, + ID: execID, + NodeID: status.NodeID, + Name: executionName, + State: status.State, + } + + // Set the appropriate response ID based on node kind + nodeKindForAux := nodeKindMap[status.NodeID] + if status.AuxiliaryID != nil { + if nodeKindForAux == mflow.NODE_KIND_GRAPHQL { + model.GraphQLResponseID = status.AuxiliaryID + } else { + model.ResponseID = status.AuxiliaryID + } } if status.Error != nil { @@ -403,24 +495,37 @@ func (s *FlowServiceV2RPC) executeFlow( } // If this execution has a ResponseID, wait for the response to be published first - // This ensures frontend receives HttpResponse before NodeExecution + // This ensures frontend receives HttpResponse/GraphQLResponse before NodeExecution if status.AuxiliaryID != nil { respIDStr := status.AuxiliaryID.String() + + // Check HTTP response published map responsePublishedMu.Lock() publishedChan, ok := responsePublished[respIDStr] responsePublishedMu.Unlock() if ok { select { case <-publishedChan: - // Response published, safe to continue case <-ctx.Done(): - // Context cancelled, continue anyway } - // Clean up map entry to prevent memory leak responsePublishedMu.Lock() delete(responsePublished, respIDStr) responsePublishedMu.Unlock() } + + // Check GraphQL response published map + gqlResponsePublishedMu.Lock() + gqlPublishedChan, gqlOK := gqlResponsePublished[respIDStr] + gqlResponsePublishedMu.Unlock() + if gqlOK { + select { + case <-gqlPublishedChan: + case <-ctx.Done(): + } + gqlResponsePublishedMu.Lock() + delete(gqlResponsePublished, respIDStr) + gqlResponsePublishedMu.Unlock() + } } // Publish execution event @@ -650,6 +755,7 @@ func (s *FlowServiceV2RPC) createFlowVersionSnapshot( aiData *mflow.NodeAI aiProviderData *mflow.NodeAiProvider memoryData *mflow.NodeMemory + graphqlData *mflow.NodeGraphQL } nodeConfigs := make([]nodeConfig, 0, len(sourceNodes)) @@ -719,6 +825,14 @@ func (s *FlowServiceV2RPC) createFlowVersionSnapshot( } else if memoryData != nil { config.memoryData = memoryData } + + case mflow.NODE_KIND_GRAPHQL: + graphqlData, err := s.ngqs.GetNodeGraphQL(ctx, sourceNode.ID) + if err != nil { + s.logger.Warn("failed to get graphql node config, using defaults", "node_id", sourceNode.ID.String(), "error", err) + } else if graphqlData != nil { + config.graphqlData = graphqlData + } } nodeConfigs = append(nodeConfigs, config) @@ -754,6 +868,11 @@ func (s *FlowServiceV2RPC) createFlowVersionSnapshot( txService := s.nmems.TX(tx) nmemsWriter = &txService } + var ngqsWriter *sflow.NodeGraphQLService + if s.ngqs != nil { + txService := s.ngqs.TX(tx) + ngqsWriter = &txService + } edgeWriter := s.es.TX(tx) varWriter := s.fvs.TX(tx) @@ -958,6 +1077,19 @@ func (s *FlowServiceV2RPC) createFlowVersionSnapshot( } // Memory node events are handled through nodeStream subscription } + + case mflow.NODE_KIND_GRAPHQL: + if ngqsWriter == nil { + s.logger.Warn("NodeGraphQL service not available, skipping GraphQL node config", "node_id", sourceNode.ID.String()) + } else if config.graphqlData != nil { + newGraphQLData := mflow.NodeGraphQL{ + FlowNodeID: newNodeID, + GraphQLID: config.graphqlData.GraphQLID, + } + if err := ngqsWriter.CreateNodeGraphQL(ctx, newGraphQLData); err != nil { + return mflow.Flow{}, nil, fmt.Errorf("create graphql node: %w", err) + } + } } // Collect base node event diff --git a/packages/server/internal/api/rflowv2/rflowv2_exec_test.go b/packages/server/internal/api/rflowv2/rflowv2_exec_test.go index 63a9630e5..bd0e4fd12 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_exec_test.go +++ b/packages/server/internal/api/rflowv2/rflowv2_exec_test.go @@ -75,6 +75,9 @@ func setupTestService(t *testing.T) (*FlowServiceV2RPC, *gen.Queries, context.Co nil, // NodeAIService &aiProviderService, &memoryService, + nil, // NodeGraphQLService + nil, // GraphQLService + nil, // GraphQLHeaderService &wsService, &varService, &flowVarService, diff --git a/packages/server/internal/api/rflowv2/rflowv2_node_condition_test.go b/packages/server/internal/api/rflowv2/rflowv2_node_condition_test.go index b3ea048c6..eeaab136e 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_node_condition_test.go +++ b/packages/server/internal/api/rflowv2/rflowv2_node_condition_test.go @@ -71,6 +71,9 @@ func TestNodeCondition_CRUD(t *testing.T) { nil, // NodeAIService nil, // NodeAiProviderService nil, // NodeMemoryService + nil, // NodeGraphQLService + nil, // GraphQLService + nil, // GraphQLHeaderService &wsService, &varService, &flowVarService, diff --git a/packages/server/internal/api/rflowv2/rflowv2_node_exec.go b/packages/server/internal/api/rflowv2/rflowv2_node_exec.go index 87aeca9f7..834659791 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_node_exec.go +++ b/packages/server/internal/api/rflowv2/rflowv2_node_exec.go @@ -10,11 +10,13 @@ import ( "connectrpc.com/connect" emptypb "google.golang.org/protobuf/types/known/emptypb" + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rgraphql" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rhttp" "github.com/the-dev-tools/dev-tools/packages/server/internal/converter" "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream" "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" ) @@ -192,6 +194,39 @@ func (s *FlowServiceV2RPC) publishHttpResponseAssertEvent(eventType string, asse }) } +func (s *FlowServiceV2RPC) publishGraphQLResponseEvent(eventType string, response mgraphql.GraphQLResponse, workspaceID idwrap.IDWrap) { + if s.graphqlResponseStream == nil { + return + } + responsePB := rgraphql.ToAPIGraphQLResponse(response) + s.graphqlResponseStream.Publish(rgraphql.GraphQLResponseTopic{WorkspaceID: workspaceID}, rgraphql.GraphQLResponseEvent{ + Type: eventType, + GraphQLResponse: responsePB, + }) +} + +func (s *FlowServiceV2RPC) publishGraphQLResponseHeaderEvent(eventType string, header mgraphql.GraphQLResponseHeader, workspaceID idwrap.IDWrap) { + if s.graphqlResponseHeaderStream == nil { + return + } + headerPB := rgraphql.ToAPIGraphQLResponseHeader(header) + s.graphqlResponseHeaderStream.Publish(rgraphql.GraphQLResponseHeaderTopic{WorkspaceID: workspaceID}, rgraphql.GraphQLResponseHeaderEvent{ + Type: eventType, + GraphQLResponseHeader: headerPB, + }) +} + +func (s *FlowServiceV2RPC) publishGraphQLResponseAssertEvent(eventType string, assert mgraphql.GraphQLResponseAssert, workspaceID idwrap.IDWrap) { + if s.graphqlResponseAssertStream == nil { + return + } + assertPB := rgraphql.ToAPIGraphQLResponseAssert(assert) + s.graphqlResponseAssertStream.Publish(rgraphql.GraphQLResponseAssertTopic{WorkspaceID: workspaceID}, rgraphql.GraphQLResponseAssertEvent{ + Type: eventType, + GraphQLResponseAssert: assertPB, + }) +} + func (s *FlowServiceV2RPC) executionEventToSyncResponse( ctx context.Context, evt ExecutionEvent, @@ -224,6 +259,9 @@ func (s *FlowServiceV2RPC) executionEventToSyncResponse( if evt.Execution.HttpResponseId != nil { upsert.HttpResponseId = evt.Execution.HttpResponseId } + if evt.Execution.GraphqlResponseId != nil { + upsert.GraphqlResponseId = evt.Execution.GraphqlResponseId + } if evt.Execution.CompletedAt != nil { upsert.CompletedAt = evt.Execution.CompletedAt } diff --git a/packages/server/internal/api/rflowv2/rflowv2_node_exec_test.go b/packages/server/internal/api/rflowv2/rflowv2_node_exec_test.go index b1b64869c..029f41d5e 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_node_exec_test.go +++ b/packages/server/internal/api/rflowv2/rflowv2_node_exec_test.go @@ -71,6 +71,9 @@ func TestNodeExecution_Collection(t *testing.T) { nil, // NodeAIService nil, // NodeAiProviderService nil, // NodeMemoryService + nil, // NodeGraphQLService + nil, // GraphQLService + nil, // GraphQLHeaderService &wsService, &varService, &flowVarService, @@ -218,6 +221,9 @@ func TestNodeExecution_Collection_VersionFlow(t *testing.T) { nil, // NodeAIService nil, // NodeAiProviderService nil, // NodeMemoryService + nil, // NodeGraphQLService + nil, // GraphQLService + nil, // GraphQLHeaderService &wsService, &varService, &flowVarService, diff --git a/packages/server/internal/api/rflowv2/rflowv2_node_graphql.go b/packages/server/internal/api/rflowv2/rflowv2_node_graphql.go new file mode 100644 index 000000000..835a47e48 --- /dev/null +++ b/packages/server/internal/api/rflowv2/rflowv2_node_graphql.go @@ -0,0 +1,446 @@ +//nolint:revive // exported +package rflowv2 + +import ( + "context" + "database/sql" + "errors" + "fmt" + "sync" + + "connectrpc.com/connect" + emptypb "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/mutation" + flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" +) + +// NodeGraphQLTopic identifies the flow whose GraphQL nodes are being published. +type NodeGraphQLTopic struct { + FlowID idwrap.IDWrap +} + +// NodeGraphQLEvent describes a GraphQL node change for sync streaming. +type NodeGraphQLEvent struct { + Type string + FlowID idwrap.IDWrap + Node *flowv1.NodeGraphQL +} + +func (s *FlowServiceV2RPC) NodeGraphQLCollection( + ctx context.Context, + _ *connect.Request[emptypb.Empty], +) (*connect.Response[flowv1.NodeGraphQLCollectionResponse], error) { + flows, err := s.listAccessibleFlows(ctx) + if err != nil { + return nil, err + } + + var items []*flowv1.NodeGraphQL + for _, flow := range flows { + nodes, err := s.nsReader.GetNodesByFlowID(ctx, flow.ID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { + return nil, connect.NewError(connect.CodeInternal, err) + } + for _, node := range nodes { + if node.NodeKind != mflow.NODE_KIND_GRAPHQL { + continue + } + nodeGQL, err := s.ngqs.GetNodeGraphQL(ctx, node.ID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + continue + } + return nil, connect.NewError(connect.CodeInternal, err) + } + items = append(items, serializeNodeGraphQL(*nodeGQL)) + } + } + + return connect.NewResponse(&flowv1.NodeGraphQLCollectionResponse{Items: items}), nil +} + +func (s *FlowServiceV2RPC) NodeGraphQLInsert( + ctx context.Context, + req *connect.Request[flowv1.NodeGraphQLInsertRequest], +) (*connect.Response[emptypb.Empty], error) { + type insertData struct { + nodeID idwrap.IDWrap + graphqlID *idwrap.IDWrap + baseNode *mflow.Node + flowID idwrap.IDWrap + workspaceID idwrap.IDWrap + } + var validatedItems []insertData + + for _, item := range req.Msg.GetItems() { + nodeID, err := idwrap.NewFromBytes(item.GetNodeId()) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, fmt.Errorf("invalid node id: %w", err)) + } + + var graphqlID *idwrap.IDWrap + if len(item.GetGraphqlId()) > 0 { + parsedID, err := idwrap.NewFromBytes(item.GetGraphqlId()) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, fmt.Errorf("invalid graphql id: %w", err)) + } + if !isZeroID(parsedID) { + graphqlID = &parsedID + } + } + + // CRITICAL FIX: Get base node BEFORE transaction to avoid SQLite deadlock + // Allow nil baseNode to support out-of-order message arrival + baseNode, _ := s.ns.GetNode(ctx, nodeID) + + var flowID idwrap.IDWrap + var workspaceID idwrap.IDWrap + if baseNode != nil { + flowID = baseNode.FlowID + flow, err := s.fsReader.GetFlow(ctx, flowID) + if err == nil { + workspaceID = flow.WorkspaceID + } + } + + validatedItems = append(validatedItems, insertData{ + nodeID: nodeID, + graphqlID: graphqlID, + baseNode: baseNode, + flowID: flowID, + workspaceID: workspaceID, + }) + } + + if len(validatedItems) == 0 { + return connect.NewResponse(&emptypb.Empty{}), nil + } + + // Begin transaction with mutation context + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + ngqsWriter := s.ngqs.TX(mut.TX()) + + for _, data := range validatedItems { + nodeGraphQL := mflow.NodeGraphQL{ + FlowNodeID: data.nodeID, + GraphQLID: data.graphqlID, + } + + if err := ngqsWriter.CreateNodeGraphQL(ctx, nodeGraphQL); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Only track for event publishing if base node exists + if data.baseNode != nil { + mut.Track(mutation.Event{ + Entity: mutation.EntityFlowNodeGraphQL, + Op: mutation.OpInsert, + ID: data.nodeID, + WorkspaceID: data.workspaceID, + ParentID: data.flowID, + Payload: nodeGraphQLWithFlow{ + nodeGraphQL: nodeGraphQL, + flowID: data.flowID, + baseNode: data.baseNode, + }, + }) + } + } + + // Commit transaction (auto-publishes events) + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *FlowServiceV2RPC) NodeGraphQLUpdate( + ctx context.Context, + req *connect.Request[flowv1.NodeGraphQLUpdateRequest], +) (*connect.Response[emptypb.Empty], error) { + type updateData struct { + nodeID idwrap.IDWrap + graphqlID *idwrap.IDWrap + baseNode *mflow.Node + workspaceID idwrap.IDWrap + } + var validatedItems []updateData + + for _, item := range req.Msg.GetItems() { + nodeID, err := idwrap.NewFromBytes(item.GetNodeId()) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, fmt.Errorf("invalid node id: %w", err)) + } + + nodeModel, err := s.ensureNodeAccess(ctx, nodeID) + if err != nil { + return nil, err + } + + // Get workspace ID for the flow + flow, err := s.fsReader.GetFlow(ctx, nodeModel.FlowID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var graphqlID *idwrap.IDWrap + if graphqlBytes := item.GetGraphqlId(); len(graphqlBytes) > 0 { + parsedID, err := idwrap.NewFromBytes(graphqlBytes) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, fmt.Errorf("invalid graphql id: %w", err)) + } + if !isZeroID(parsedID) { + graphqlID = &parsedID + } + } + + validatedItems = append(validatedItems, updateData{ + nodeID: nodeID, + graphqlID: graphqlID, + baseNode: nodeModel, + workspaceID: flow.WorkspaceID, + }) + } + + if len(validatedItems) == 0 { + return connect.NewResponse(&emptypb.Empty{}), nil + } + + // Begin transaction with mutation context + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + ngqsWriter := s.ngqs.TX(mut.TX()) + + for _, data := range validatedItems { + nodeGraphQL := mflow.NodeGraphQL{ + FlowNodeID: data.nodeID, + GraphQLID: data.graphqlID, + } + + if err := ngqsWriter.UpdateNodeGraphQL(ctx, nodeGraphQL); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + mut.Track(mutation.Event{ + Entity: mutation.EntityFlowNodeGraphQL, + Op: mutation.OpUpdate, + ID: data.nodeID, + WorkspaceID: data.workspaceID, + ParentID: data.baseNode.FlowID, + Payload: nodeGraphQLWithFlow{ + nodeGraphQL: nodeGraphQL, + flowID: data.baseNode.FlowID, + baseNode: data.baseNode, + }, + }) + } + + // Commit transaction (auto-publishes events) + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *FlowServiceV2RPC) NodeGraphQLDelete( + ctx context.Context, + req *connect.Request[flowv1.NodeGraphQLDeleteRequest], +) (*connect.Response[emptypb.Empty], error) { + type deleteData struct { + nodeID idwrap.IDWrap + flowID idwrap.IDWrap + } + var validatedItems []deleteData + + for _, item := range req.Msg.GetItems() { + nodeID, err := idwrap.NewFromBytes(item.GetNodeId()) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, fmt.Errorf("invalid node id: %w", err)) + } + + nodeModel, err := s.ensureNodeAccess(ctx, nodeID) + if err != nil { + return nil, err + } + + validatedItems = append(validatedItems, deleteData{ + nodeID: nodeID, + flowID: nodeModel.FlowID, + }) + } + + if len(validatedItems) == 0 { + return connect.NewResponse(&emptypb.Empty{}), nil + } + + // Begin transaction with mutation context + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + for _, data := range validatedItems { + mut.Track(mutation.Event{ + Entity: mutation.EntityFlowNodeGraphQL, + Op: mutation.OpDelete, + ID: data.nodeID, + ParentID: data.flowID, + }) + if err := mut.Queries().DeleteFlowNodeGraphQL(ctx, data.nodeID); err != nil && !errors.Is(err, sql.ErrNoRows) { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + // Commit transaction (auto-publishes events) + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *FlowServiceV2RPC) NodeGraphQLSync( + ctx context.Context, + _ *connect.Request[emptypb.Empty], + stream *connect.ServerStream[flowv1.NodeGraphQLSyncResponse], +) error { + if stream == nil { + return connect.NewError(connect.CodeInternal, errors.New("stream is required")) + } + return s.streamNodeGraphQLSync(ctx, func(resp *flowv1.NodeGraphQLSyncResponse) error { + return stream.Send(resp) + }) +} + +func (s *FlowServiceV2RPC) streamNodeGraphQLSync( + ctx context.Context, + send func(*flowv1.NodeGraphQLSyncResponse) error, +) error { + if s.nodeStream == nil { + return connect.NewError(connect.CodeUnavailable, errors.New("node stream not configured")) + } + + var flowSet sync.Map + + filter := func(topic NodeTopic) bool { + if _, ok := flowSet.Load(topic.FlowID.String()); ok { + return true + } + if err := s.ensureFlowAccess(ctx, topic.FlowID); err != nil { + return false + } + flowSet.Store(topic.FlowID.String(), struct{}{}) + return true + } + + events, err := s.nodeStream.Subscribe(ctx, filter) + if err != nil { + return connect.NewError(connect.CodeInternal, err) + } + + for { + select { + case evt, ok := <-events: + if !ok { + return nil + } + resp, err := s.nodeGraphQLEventToSyncResponse(ctx, evt.Payload) + if err != nil { + return connect.NewError(connect.CodeInternal, fmt.Errorf("failed to convert GraphQL node event: %w", err)) + } + if resp == nil { + continue + } + if err := send(resp); err != nil { + return err + } + case <-ctx.Done(): + return ctx.Err() + } + } +} + +func (s *FlowServiceV2RPC) nodeGraphQLEventToSyncResponse( + ctx context.Context, + evt NodeEvent, +) (*flowv1.NodeGraphQLSyncResponse, error) { + if evt.Node == nil { + return nil, nil + } + + // Only process GraphQL nodes + if evt.Node.GetKind() != flowv1.NodeKind_NODE_KIND_GRAPH_Q_L { + return nil, nil + } + + nodeID, err := idwrap.NewFromBytes(evt.Node.GetNodeId()) + if err != nil { + return nil, fmt.Errorf("invalid node id: %w", err) + } + + // Fetch the GraphQL configuration for this node (may not exist) + nodeGQL, err := s.ngqs.GetNodeGraphQL(ctx, nodeID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { + return nil, err + } + + var syncEvent *flowv1.NodeGraphQLSync + switch evt.Type { + case nodeEventInsert: + insert := &flowv1.NodeGraphQLSyncInsert{ + NodeId: nodeID.Bytes(), + } + if nodeGQL != nil && nodeGQL.GraphQLID != nil && !isZeroID(*nodeGQL.GraphQLID) { + insert.GraphqlId = nodeGQL.GraphQLID.Bytes() + } + syncEvent = &flowv1.NodeGraphQLSync{ + Value: &flowv1.NodeGraphQLSync_ValueUnion{ + Kind: flowv1.NodeGraphQLSync_ValueUnion_KIND_INSERT, + Insert: insert, + }, + } + case nodeEventUpdate: + update := &flowv1.NodeGraphQLSyncUpdate{ + NodeId: nodeID.Bytes(), + } + if nodeGQL != nil && nodeGQL.GraphQLID != nil && !isZeroID(*nodeGQL.GraphQLID) { + update.GraphqlId = nodeGQL.GraphQLID.Bytes() + } + syncEvent = &flowv1.NodeGraphQLSync{ + Value: &flowv1.NodeGraphQLSync_ValueUnion{ + Kind: flowv1.NodeGraphQLSync_ValueUnion_KIND_UPDATE, + Update: update, + }, + } + case nodeEventDelete: + syncEvent = &flowv1.NodeGraphQLSync{ + Value: &flowv1.NodeGraphQLSync_ValueUnion{ + Kind: flowv1.NodeGraphQLSync_ValueUnion_KIND_DELETE, + Delete: &flowv1.NodeGraphQLSyncDelete{ + NodeId: nodeID.Bytes(), + }, + }, + } + default: + return nil, nil + } + + return &flowv1.NodeGraphQLSyncResponse{ + Items: []*flowv1.NodeGraphQLSync{syncEvent}, + }, nil +} diff --git a/packages/server/internal/api/rflowv2/rflowv2_testutil_test.go b/packages/server/internal/api/rflowv2/rflowv2_testutil_test.go index 0aaa7bb58..132b156ad 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_testutil_test.go +++ b/packages/server/internal/api/rflowv2/rflowv2_testutil_test.go @@ -91,6 +91,9 @@ func NewRFlowTestContext(t *testing.T) *RFlowTestContext { nil, // NodeAIService - not needed for non-AI tests nil, // NodeAiProviderService - not needed for non-AI tests nil, // NodeMemoryService - not needed for non-AI tests + nil, // NodeGraphQLService - not needed for non-GraphQL tests + nil, // GraphQLService - not needed for non-GraphQL tests + nil, // GraphQLHeaderService - not needed for non-GraphQL tests &wsService, &varService, &flowVarService, diff --git a/packages/server/internal/api/rgraphql/rgraphql.go b/packages/server/internal/api/rgraphql/rgraphql.go new file mode 100644 index 000000000..4de3ef65c --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql.go @@ -0,0 +1,554 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "database/sql" + "errors" + "fmt" + "sync" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/api" + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rfile" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/delta" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mworkspace" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/mutation" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sfile" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/suser" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" + "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1/graph_q_lv1connect" +) + +const ( + eventTypeInsert = "insert" + eventTypeUpdate = "update" + eventTypeDelete = "delete" +) + +// Topic/Event types for each entity + +type GraphQLTopic struct { + WorkspaceID idwrap.IDWrap +} + +type GraphQLEvent struct { + Type string + GraphQL *graphqlv1.GraphQL +} + +type GraphQLHeaderTopic struct { + WorkspaceID idwrap.IDWrap +} + +type GraphQLHeaderEvent struct { + Type string + GraphQLHeader *graphqlv1.GraphQLHeader +} + +type GraphQLResponseTopic struct { + WorkspaceID idwrap.IDWrap +} + +type GraphQLResponseEvent struct { + Type string + GraphQLResponse *graphqlv1.GraphQLResponse +} + +type GraphQLResponseHeaderTopic struct { + WorkspaceID idwrap.IDWrap +} + +type GraphQLResponseHeaderEvent struct { + Type string + GraphQLResponseHeader *graphqlv1.GraphQLResponseHeader +} + +type GraphQLResponseAssertTopic struct { + WorkspaceID idwrap.IDWrap +} + +type GraphQLResponseAssertEvent struct { + Type string + GraphQLResponseAssert *graphqlv1.GraphQLResponseAssert +} + +type GraphQLVersionTopic struct { + WorkspaceID idwrap.IDWrap +} + +type GraphQLVersionEvent struct { + Type string + GraphQLVersion *graphqlv1.GraphQLVersion +} + +type GraphQLAssertTopic struct { + WorkspaceID idwrap.IDWrap +} + +type GraphQLAssertEvent struct { + Type string + GraphQLAssert *graphqlv1.GraphQLAssert + IsDelta bool +} + +// GraphQLStreamers groups all event streams +type GraphQLStreamers struct { + GraphQL eventstream.SyncStreamer[GraphQLTopic, GraphQLEvent] + GraphQLHeader eventstream.SyncStreamer[GraphQLHeaderTopic, GraphQLHeaderEvent] + GraphQLAssert eventstream.SyncStreamer[GraphQLAssertTopic, GraphQLAssertEvent] + GraphQLResponse eventstream.SyncStreamer[GraphQLResponseTopic, GraphQLResponseEvent] + GraphQLResponseHeader eventstream.SyncStreamer[GraphQLResponseHeaderTopic, GraphQLResponseHeaderEvent] + GraphQLResponseAssert eventstream.SyncStreamer[GraphQLResponseAssertTopic, GraphQLResponseAssertEvent] + GraphQLVersion eventstream.SyncStreamer[GraphQLVersionTopic, GraphQLVersionEvent] + File eventstream.SyncStreamer[rfile.FileTopic, rfile.FileEvent] +} + +// GraphQLServiceRPC handles GraphQL RPC operations +type GraphQLServiceRPC struct { + DB *sql.DB + + graphqlReader *sgraphql.Reader + graphqlService sgraphql.GraphQLService + headerService sgraphql.GraphQLHeaderService + graphqlAssertService sgraphql.GraphQLAssertService + responseService sgraphql.GraphQLResponseService + resolver GraphQLResolver + + us suser.UserService + ws sworkspace.WorkspaceService + wus sworkspace.UserService + userReader *sworkspace.UserReader + wsReader *sworkspace.WorkspaceReader + + es senv.EnvService + vs senv.VariableService + + fileService *sfile.FileService + streamers *GraphQLStreamers +} + +// GraphQLResolver defines the interface for resolving GraphQL delta requests +type GraphQLResolver interface { + Resolve(ctx context.Context, baseID idwrap.IDWrap, deltaID *idwrap.IDWrap) (*delta.ResolveGraphQLOutput, error) +} + +type GraphQLServiceRPCDeps struct { + DB *sql.DB + Services GraphQLServiceRPCServices + Readers GraphQLServiceRPCReaders + Resolver GraphQLResolver + Streamers *GraphQLStreamers +} + +type GraphQLServiceRPCServices struct { + GraphQL sgraphql.GraphQLService + Header sgraphql.GraphQLHeaderService + GraphQLAssert sgraphql.GraphQLAssertService + Response sgraphql.GraphQLResponseService + User suser.UserService + Workspace sworkspace.WorkspaceService + WorkspaceUser sworkspace.UserService + Env senv.EnvService + Variable senv.VariableService + File *sfile.FileService +} + +type GraphQLServiceRPCReaders struct { + GraphQL *sgraphql.Reader + User *sworkspace.UserReader + Workspace *sworkspace.WorkspaceReader +} + +func (d *GraphQLServiceRPCDeps) Validate() error { + if d.DB == nil { + return fmt.Errorf("db is required") + } + if d.Streamers == nil { + return fmt.Errorf("streamers is required") + } + return nil +} + +func New(deps GraphQLServiceRPCDeps) GraphQLServiceRPC { + if err := deps.Validate(); err != nil { + panic(fmt.Sprintf("GraphQLServiceRPC Deps validation failed: %v", err)) + } + + return GraphQLServiceRPC{ + DB: deps.DB, + graphqlReader: deps.Readers.GraphQL, + graphqlService: deps.Services.GraphQL, + headerService: deps.Services.Header, + graphqlAssertService: deps.Services.GraphQLAssert, + responseService: deps.Services.Response, + resolver: deps.Resolver, + us: deps.Services.User, + ws: deps.Services.Workspace, + wus: deps.Services.WorkspaceUser, + userReader: deps.Readers.User, + wsReader: deps.Readers.Workspace, + es: deps.Services.Env, + vs: deps.Services.Variable, + fileService: deps.Services.File, + streamers: deps.Streamers, + } +} + +func CreateService(srv GraphQLServiceRPC, options []connect.HandlerOption) (*api.Service, error) { + path, handler := graph_q_lv1connect.NewGraphQLServiceHandler(&srv, options...) + return &api.Service{Path: path, Handler: handler}, nil +} + +// Access control helpers + +func (s *GraphQLServiceRPC) checkWorkspaceReadAccess(ctx context.Context, workspaceID idwrap.IDWrap) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + + wsUser, err := s.userReader.GetWorkspaceUsersByWorkspaceIDAndUserID(ctx, workspaceID, userID) + if err != nil { + if errors.Is(err, sworkspace.ErrWorkspaceUserNotFound) { + return connect.NewError(connect.CodeNotFound, errors.New("workspace not found or access denied")) + } + return connect.NewError(connect.CodeInternal, err) + } + + if wsUser.Role < mworkspace.RoleUser { + return connect.NewError(connect.CodePermissionDenied, errors.New("permission denied")) + } + return nil +} + +func (s *GraphQLServiceRPC) checkWorkspaceWriteAccess(ctx context.Context, workspaceID idwrap.IDWrap) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + + wsUser, err := s.userReader.GetWorkspaceUsersByWorkspaceIDAndUserID(ctx, workspaceID, userID) + if err != nil { + if errors.Is(err, sworkspace.ErrWorkspaceUserNotFound) { + return connect.NewError(connect.CodeNotFound, errors.New("workspace not found or access denied")) + } + return connect.NewError(connect.CodeInternal, err) + } + + if wsUser.Role < mworkspace.RoleAdmin { + return connect.NewError(connect.CodePermissionDenied, errors.New("permission denied")) + } + return nil +} + +func (s *GraphQLServiceRPC) checkWorkspaceDeleteAccess(ctx context.Context, workspaceID idwrap.IDWrap) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + + wsUser, err := s.userReader.GetWorkspaceUsersByWorkspaceIDAndUserID(ctx, workspaceID, userID) + if err != nil { + if errors.Is(err, sworkspace.ErrWorkspaceUserNotFound) { + return connect.NewError(connect.CodeNotFound, errors.New("workspace not found or access denied")) + } + return connect.NewError(connect.CodeInternal, err) + } + + if wsUser.Role != mworkspace.RoleOwner { + return connect.NewError(connect.CodePermissionDenied, errors.New("permission denied")) + } + return nil +} + +// Mutation publisher for auto-publish on commit + +func (s *GraphQLServiceRPC) mutationPublisher() mutation.Publisher { + return &rgraphqlPublisher{streamers: s.streamers} +} + +type rgraphqlPublisher struct { + streamers *GraphQLStreamers +} + +func (p *rgraphqlPublisher) PublishAll(events []mutation.Event) { + for _, evt := range events { + //nolint:exhaustive + switch evt.Entity { + case mutation.EntityGraphQL: + p.publishGraphQL(evt) + case mutation.EntityGraphQLHeader: + p.publishGraphQLHeader(evt) + case mutation.EntityGraphQLAssert: + p.publishGraphQLAssert(evt) + } + } +} + +func (p *rgraphqlPublisher) publishGraphQL(evt mutation.Event) { + if p.streamers.GraphQL == nil { + return + } + var model *graphqlv1.GraphQL + var eventType string + + switch evt.Op { + case mutation.OpInsert, mutation.OpUpdate: + if evt.Op == mutation.OpInsert { + eventType = eventTypeInsert + } else { + eventType = eventTypeUpdate + } + if g, ok := evt.Payload.(mgraphql.GraphQL); ok { + model = ToAPIGraphQL(g) + } else if gp, ok := evt.Payload.(*mgraphql.GraphQL); ok { + model = ToAPIGraphQL(*gp) + } + case mutation.OpDelete: + eventType = eventTypeDelete + model = &graphqlv1.GraphQL{GraphqlId: evt.ID.Bytes()} + } + + if model != nil { + p.streamers.GraphQL.Publish(GraphQLTopic{WorkspaceID: evt.WorkspaceID}, GraphQLEvent{ + Type: eventType, + GraphQL: model, + }) + } +} + +func (p *rgraphqlPublisher) publishGraphQLHeader(evt mutation.Event) { + if p.streamers.GraphQLHeader == nil { + return + } + var model *graphqlv1.GraphQLHeader + var eventType string + + switch evt.Op { + case mutation.OpInsert, mutation.OpUpdate: + if evt.Op == mutation.OpInsert { + eventType = eventTypeInsert + } else { + eventType = eventTypeUpdate + } + if h, ok := evt.Payload.(mgraphql.GraphQLHeader); ok { + model = ToAPIGraphQLHeader(h) + } else if hp, ok := evt.Payload.(*mgraphql.GraphQLHeader); ok { + model = ToAPIGraphQLHeader(*hp) + } + case mutation.OpDelete: + eventType = eventTypeDelete + model = &graphqlv1.GraphQLHeader{GraphqlHeaderId: evt.ID.Bytes(), GraphqlId: evt.ParentID.Bytes()} + } + + if model != nil { + p.streamers.GraphQLHeader.Publish(GraphQLHeaderTopic{WorkspaceID: evt.WorkspaceID}, GraphQLHeaderEvent{ + Type: eventType, + GraphQLHeader: model, + }) + } +} + +func (p *rgraphqlPublisher) publishGraphQLAssert(evt mutation.Event) { + if p.streamers.GraphQLAssert == nil { + return + } + var model *graphqlv1.GraphQLAssert + var eventType string + isDelta := false + + switch evt.Op { + case mutation.OpInsert, mutation.OpUpdate: + if evt.Op == mutation.OpInsert { + eventType = eventTypeInsert + } else { + eventType = eventTypeUpdate + } + if a, ok := evt.Payload.(mgraphql.GraphQLAssert); ok { + model = ToAPIGraphQLAssert(a) + isDelta = a.IsDelta + } else if ap, ok := evt.Payload.(*mgraphql.GraphQLAssert); ok { + model = ToAPIGraphQLAssert(*ap) + isDelta = ap.IsDelta + } + case mutation.OpDelete: + eventType = eventTypeDelete + model = &graphqlv1.GraphQLAssert{GraphqlAssertId: evt.ID.Bytes(), GraphqlId: evt.ParentID.Bytes()} + } + + if model != nil { + p.streamers.GraphQLAssert.Publish(GraphQLAssertTopic{WorkspaceID: evt.WorkspaceID}, GraphQLAssertEvent{ + Type: eventType, + GraphQLAssert: model, + IsDelta: isDelta, + }) + } +} + +// Sync stream handlers + +func (s *GraphQLServiceRPC) GraphQLSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLSyncResponse]) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + return s.streamGraphQLSync(ctx, userID, stream.Send) +} + +func (s *GraphQLServiceRPC) streamGraphQLSync(ctx context.Context, userID idwrap.IDWrap, send func(*graphqlv1.GraphQLSyncResponse) error) error { + var workspaceSet sync.Map + + filter := func(topic GraphQLTopic) bool { + if _, ok := workspaceSet.Load(topic.WorkspaceID.String()); ok { + return true + } + belongs, err := s.us.CheckUserBelongsToWorkspace(ctx, userID, topic.WorkspaceID) + if err != nil || !belongs { + return false + } + workspaceSet.Store(topic.WorkspaceID.String(), struct{}{}) + return true + } + + converter := func(events []GraphQLEvent) *graphqlv1.GraphQLSyncResponse { + var items []*graphqlv1.GraphQLSync + for _, event := range events { + if resp := graphqlSyncResponseFrom(event); resp != nil && len(resp.Items) > 0 { + items = append(items, resp.Items...) + } + } + if len(items) == 0 { + return nil + } + return &graphqlv1.GraphQLSyncResponse{Items: items} + } + + return eventstream.StreamToClient(ctx, s.streamers.GraphQL, filter, converter, send, nil) +} + +func (s *GraphQLServiceRPC) GraphQLHeaderSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLHeaderSyncResponse]) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + return s.streamGraphQLHeaderSync(ctx, userID, stream.Send) +} + +func (s *GraphQLServiceRPC) streamGraphQLHeaderSync(ctx context.Context, userID idwrap.IDWrap, send func(*graphqlv1.GraphQLHeaderSyncResponse) error) error { + var workspaceSet sync.Map + + filter := func(topic GraphQLHeaderTopic) bool { + if _, ok := workspaceSet.Load(topic.WorkspaceID.String()); ok { + return true + } + belongs, err := s.us.CheckUserBelongsToWorkspace(ctx, userID, topic.WorkspaceID) + if err != nil || !belongs { + return false + } + workspaceSet.Store(topic.WorkspaceID.String(), struct{}{}) + return true + } + + converter := func(events []GraphQLHeaderEvent) *graphqlv1.GraphQLHeaderSyncResponse { + var items []*graphqlv1.GraphQLHeaderSync + for _, event := range events { + if resp := graphqlHeaderSyncResponseFrom(event); resp != nil && len(resp.Items) > 0 { + items = append(items, resp.Items...) + } + } + if len(items) == 0 { + return nil + } + return &graphqlv1.GraphQLHeaderSyncResponse{Items: items} + } + + return eventstream.StreamToClient(ctx, s.streamers.GraphQLHeader, filter, converter, send, nil) +} + +func (s *GraphQLServiceRPC) GraphQLResponseSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLResponseSyncResponse]) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + return s.streamGraphQLResponseSync(ctx, userID, stream.Send) +} + +func (s *GraphQLServiceRPC) streamGraphQLResponseSync(ctx context.Context, userID idwrap.IDWrap, send func(*graphqlv1.GraphQLResponseSyncResponse) error) error { + var workspaceSet sync.Map + + filter := func(topic GraphQLResponseTopic) bool { + if _, ok := workspaceSet.Load(topic.WorkspaceID.String()); ok { + return true + } + belongs, err := s.us.CheckUserBelongsToWorkspace(ctx, userID, topic.WorkspaceID) + if err != nil || !belongs { + return false + } + workspaceSet.Store(topic.WorkspaceID.String(), struct{}{}) + return true + } + + converter := func(events []GraphQLResponseEvent) *graphqlv1.GraphQLResponseSyncResponse { + var items []*graphqlv1.GraphQLResponseSync + for _, event := range events { + if resp := graphqlResponseSyncResponseFrom(event); resp != nil && len(resp.Items) > 0 { + items = append(items, resp.Items...) + } + } + if len(items) == 0 { + return nil + } + return &graphqlv1.GraphQLResponseSyncResponse{Items: items} + } + + return eventstream.StreamToClient(ctx, s.streamers.GraphQLResponse, filter, converter, send, nil) +} + +func (s *GraphQLServiceRPC) GraphQLResponseHeaderSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLResponseHeaderSyncResponse]) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + return s.streamGraphQLResponseHeaderSync(ctx, userID, stream.Send) +} + +func (s *GraphQLServiceRPC) streamGraphQLResponseHeaderSync(ctx context.Context, userID idwrap.IDWrap, send func(*graphqlv1.GraphQLResponseHeaderSyncResponse) error) error { + var workspaceSet sync.Map + + filter := func(topic GraphQLResponseHeaderTopic) bool { + if _, ok := workspaceSet.Load(topic.WorkspaceID.String()); ok { + return true + } + belongs, err := s.us.CheckUserBelongsToWorkspace(ctx, userID, topic.WorkspaceID) + if err != nil || !belongs { + return false + } + workspaceSet.Store(topic.WorkspaceID.String(), struct{}{}) + return true + } + + converter := func(events []GraphQLResponseHeaderEvent) *graphqlv1.GraphQLResponseHeaderSyncResponse { + var items []*graphqlv1.GraphQLResponseHeaderSync + for _, event := range events { + if resp := graphqlResponseHeaderSyncResponseFrom(event); resp != nil && len(resp.Items) > 0 { + items = append(items, resp.Items...) + } + } + if len(items) == 0 { + return nil + } + return &graphqlv1.GraphQLResponseHeaderSyncResponse{Items: items} + } + + return eventstream.StreamToClient(ctx, s.streamers.GraphQLResponseHeader, filter, converter, send, nil) +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_converter.go b/packages/server/internal/api/rgraphql/rgraphql_converter.go new file mode 100644 index 000000000..f28287e0f --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_converter.go @@ -0,0 +1,382 @@ +//nolint:revive // exported +package rgraphql + +import ( + "time" + + "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +// Model -> Proto + +func ToAPIGraphQL(g mgraphql.GraphQL) *graphqlv1.GraphQL { + result := &graphqlv1.GraphQL{ + GraphqlId: g.ID.Bytes(), + Name: g.Name, + Url: g.Url, + Query: g.Query, + Variables: g.Variables, + } + if g.LastRunAt != nil { + result.LastRunAt = timestamppb.New(time.Unix(*g.LastRunAt, 0)) + } + return result +} + +func ToAPIGraphQLHeader(h mgraphql.GraphQLHeader) *graphqlv1.GraphQLHeader { + return &graphqlv1.GraphQLHeader{ + GraphqlHeaderId: h.ID.Bytes(), + GraphqlId: h.GraphQLID.Bytes(), + Key: h.Key, + Value: h.Value, + Enabled: h.Enabled, + Description: h.Description, + Order: h.DisplayOrder, + } +} + +func ToAPIGraphQLAssert(a mgraphql.GraphQLAssert) *graphqlv1.GraphQLAssert { + return &graphqlv1.GraphQLAssert{ + GraphqlAssertId: a.ID.Bytes(), + GraphqlId: a.GraphQLID.Bytes(), + Value: a.Value, + Enabled: a.Enabled, + Order: a.DisplayOrder, + } +} + +func ToAPIGraphQLResponse(r mgraphql.GraphQLResponse) *graphqlv1.GraphQLResponse { + return &graphqlv1.GraphQLResponse{ + GraphqlResponseId: r.ID.Bytes(), + GraphqlId: r.GraphQLID.Bytes(), + Status: r.Status, + Body: string(r.Body), + Time: timestamppb.New(time.Unix(r.Time, 0)), + Duration: r.Duration, + Size: r.Size, + } +} + +func ToAPIGraphQLResponseHeader(h mgraphql.GraphQLResponseHeader) *graphqlv1.GraphQLResponseHeader { + return &graphqlv1.GraphQLResponseHeader{ + GraphqlResponseHeaderId: h.ID.Bytes(), + GraphqlResponseId: h.ResponseID.Bytes(), + Key: h.HeaderKey, + Value: h.HeaderValue, + } +} + +func ToAPIGraphQLResponseAssert(a mgraphql.GraphQLResponseAssert) *graphqlv1.GraphQLResponseAssert { + return &graphqlv1.GraphQLResponseAssert{ + GraphqlResponseAssertId: a.ID.Bytes(), + GraphqlResponseId: a.ResponseID.Bytes(), + Value: a.Value, + Success: a.Success, + } +} + +// Sync response builders + +func graphqlSyncResponseFrom(event GraphQLEvent) *graphqlv1.GraphQLSyncResponse { + var value *graphqlv1.GraphQLSync_ValueUnion + + switch event.Type { + case eventTypeInsert: + name := event.GraphQL.GetName() + url := event.GraphQL.GetUrl() + query := event.GraphQL.GetQuery() + variables := event.GraphQL.GetVariables() + lastRunAt := event.GraphQL.GetLastRunAt() + value = &graphqlv1.GraphQLSync_ValueUnion{ + Kind: graphqlv1.GraphQLSync_ValueUnion_KIND_INSERT, + Insert: &graphqlv1.GraphQLSyncInsert{ + GraphqlId: event.GraphQL.GetGraphqlId(), + Name: name, + Url: url, + Query: query, + Variables: variables, + LastRunAt: lastRunAt, + }, + } + case eventTypeUpdate: + name := event.GraphQL.GetName() + url := event.GraphQL.GetUrl() + query := event.GraphQL.GetQuery() + variables := event.GraphQL.GetVariables() + lastRunAt := event.GraphQL.GetLastRunAt() + + var lastRunAtUnion *graphqlv1.GraphQLSyncUpdate_LastRunAtUnion + if lastRunAt != nil { + lastRunAtUnion = &graphqlv1.GraphQLSyncUpdate_LastRunAtUnion{ + Kind: graphqlv1.GraphQLSyncUpdate_LastRunAtUnion_KIND_VALUE, + Value: lastRunAt, + } + } + + value = &graphqlv1.GraphQLSync_ValueUnion{ + Kind: graphqlv1.GraphQLSync_ValueUnion_KIND_UPDATE, + Update: &graphqlv1.GraphQLSyncUpdate{ + GraphqlId: event.GraphQL.GetGraphqlId(), + Name: &name, + Url: &url, + Query: &query, + Variables: &variables, + LastRunAt: lastRunAtUnion, + }, + } + case eventTypeDelete: + value = &graphqlv1.GraphQLSync_ValueUnion{ + Kind: graphqlv1.GraphQLSync_ValueUnion_KIND_DELETE, + Delete: &graphqlv1.GraphQLSyncDelete{GraphqlId: event.GraphQL.GetGraphqlId()}, + } + } + + return &graphqlv1.GraphQLSyncResponse{ + Items: []*graphqlv1.GraphQLSync{{Value: value}}, + } +} + +func graphqlHeaderSyncResponseFrom(event GraphQLHeaderEvent) *graphqlv1.GraphQLHeaderSyncResponse { + var value *graphqlv1.GraphQLHeaderSync_ValueUnion + + switch event.Type { + case eventTypeInsert: + key := event.GraphQLHeader.GetKey() + val := event.GraphQLHeader.GetValue() + enabled := event.GraphQLHeader.GetEnabled() + description := event.GraphQLHeader.GetDescription() + order := event.GraphQLHeader.GetOrder() + value = &graphqlv1.GraphQLHeaderSync_ValueUnion{ + Kind: graphqlv1.GraphQLHeaderSync_ValueUnion_KIND_INSERT, + Insert: &graphqlv1.GraphQLHeaderSyncInsert{ + GraphqlHeaderId: event.GraphQLHeader.GetGraphqlHeaderId(), + GraphqlId: event.GraphQLHeader.GetGraphqlId(), + Key: key, + Value: val, + Enabled: enabled, + Description: description, + Order: order, + }, + } + case eventTypeUpdate: + key := event.GraphQLHeader.GetKey() + val := event.GraphQLHeader.GetValue() + enabled := event.GraphQLHeader.GetEnabled() + description := event.GraphQLHeader.GetDescription() + order := event.GraphQLHeader.GetOrder() + value = &graphqlv1.GraphQLHeaderSync_ValueUnion{ + Kind: graphqlv1.GraphQLHeaderSync_ValueUnion_KIND_UPDATE, + Update: &graphqlv1.GraphQLHeaderSyncUpdate{ + GraphqlHeaderId: event.GraphQLHeader.GetGraphqlHeaderId(), + Key: &key, + Value: &val, + Enabled: &enabled, + Description: &description, + Order: &order, + }, + } + case eventTypeDelete: + value = &graphqlv1.GraphQLHeaderSync_ValueUnion{ + Kind: graphqlv1.GraphQLHeaderSync_ValueUnion_KIND_DELETE, + Delete: &graphqlv1.GraphQLHeaderSyncDelete{GraphqlHeaderId: event.GraphQLHeader.GetGraphqlHeaderId()}, + } + } + + return &graphqlv1.GraphQLHeaderSyncResponse{ + Items: []*graphqlv1.GraphQLHeaderSync{{Value: value}}, + } +} + +func graphqlResponseSyncResponseFrom(event GraphQLResponseEvent) *graphqlv1.GraphQLResponseSyncResponse { + var value *graphqlv1.GraphQLResponseSync_ValueUnion + + switch event.Type { + case eventTypeInsert: + status := event.GraphQLResponse.GetStatus() + body := event.GraphQLResponse.GetBody() + t := event.GraphQLResponse.GetTime() + duration := event.GraphQLResponse.GetDuration() + size := event.GraphQLResponse.GetSize() + value = &graphqlv1.GraphQLResponseSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseSync_ValueUnion_KIND_INSERT, + Insert: &graphqlv1.GraphQLResponseSyncInsert{ + GraphqlResponseId: event.GraphQLResponse.GetGraphqlResponseId(), + GraphqlId: event.GraphQLResponse.GetGraphqlId(), + Status: status, + Body: body, + Time: t, + Duration: duration, + Size: size, + }, + } + case eventTypeUpdate: + status := event.GraphQLResponse.GetStatus() + body := event.GraphQLResponse.GetBody() + t := event.GraphQLResponse.GetTime() + duration := event.GraphQLResponse.GetDuration() + size := event.GraphQLResponse.GetSize() + value = &graphqlv1.GraphQLResponseSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseSync_ValueUnion_KIND_UPDATE, + Update: &graphqlv1.GraphQLResponseSyncUpdate{ + GraphqlResponseId: event.GraphQLResponse.GetGraphqlResponseId(), + Status: &status, + Body: &body, + Time: t, + Duration: &duration, + Size: &size, + }, + } + case eventTypeDelete: + value = &graphqlv1.GraphQLResponseSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseSync_ValueUnion_KIND_DELETE, + Delete: &graphqlv1.GraphQLResponseSyncDelete{GraphqlResponseId: event.GraphQLResponse.GetGraphqlResponseId()}, + } + } + + return &graphqlv1.GraphQLResponseSyncResponse{ + Items: []*graphqlv1.GraphQLResponseSync{{Value: value}}, + } +} + +func graphqlResponseHeaderSyncResponseFrom(event GraphQLResponseHeaderEvent) *graphqlv1.GraphQLResponseHeaderSyncResponse { + var value *graphqlv1.GraphQLResponseHeaderSync_ValueUnion + + switch event.Type { + case eventTypeInsert: + key := event.GraphQLResponseHeader.GetKey() + val := event.GraphQLResponseHeader.GetValue() + value = &graphqlv1.GraphQLResponseHeaderSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseHeaderSync_ValueUnion_KIND_INSERT, + Insert: &graphqlv1.GraphQLResponseHeaderSyncInsert{ + GraphqlResponseHeaderId: event.GraphQLResponseHeader.GetGraphqlResponseHeaderId(), + GraphqlResponseId: event.GraphQLResponseHeader.GetGraphqlResponseId(), + Key: key, + Value: val, + }, + } + case eventTypeUpdate: + key := event.GraphQLResponseHeader.GetKey() + val := event.GraphQLResponseHeader.GetValue() + value = &graphqlv1.GraphQLResponseHeaderSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseHeaderSync_ValueUnion_KIND_UPDATE, + Update: &graphqlv1.GraphQLResponseHeaderSyncUpdate{ + GraphqlResponseHeaderId: event.GraphQLResponseHeader.GetGraphqlResponseHeaderId(), + Key: &key, + Value: &val, + }, + } + case eventTypeDelete: + value = &graphqlv1.GraphQLResponseHeaderSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseHeaderSync_ValueUnion_KIND_DELETE, + Delete: &graphqlv1.GraphQLResponseHeaderSyncDelete{GraphqlResponseHeaderId: event.GraphQLResponseHeader.GetGraphqlResponseHeaderId()}, + } + } + + return &graphqlv1.GraphQLResponseHeaderSyncResponse{ + Items: []*graphqlv1.GraphQLResponseHeaderSync{{Value: value}}, + } +} + +// graphqlDeltaSyncResponseFrom converts GraphQLEvent to GraphQLDeltaSync response +// TODO: Implement delta sync converter once delta event publishing is implemented +func graphqlDeltaSyncResponseFrom(event GraphQLEvent) *graphqlv1.GraphQLDeltaSyncResponse { + // For now, return nil as delta sync is not fully implemented + // Delta CRUD operations work, but real-time sync needs separate event streams + return nil +} + +// graphqlAssertSyncResponseFrom converts GraphQLAssertEvent to GraphQLAssertSync response +func graphqlAssertSyncResponseFrom(event GraphQLAssertEvent) *graphqlv1.GraphQLAssertSyncResponse { + var value *graphqlv1.GraphQLAssertSync_ValueUnion + + switch event.Type { + case eventTypeInsert: + value = &graphqlv1.GraphQLAssertSync_ValueUnion{ + Kind: graphqlv1.GraphQLAssertSync_ValueUnion_KIND_INSERT, + Insert: &graphqlv1.GraphQLAssertSyncInsert{ + GraphqlAssertId: event.GraphQLAssert.GetGraphqlAssertId(), + GraphqlId: event.GraphQLAssert.GetGraphqlId(), + Value: event.GraphQLAssert.GetValue(), + Enabled: event.GraphQLAssert.GetEnabled(), + Order: event.GraphQLAssert.GetOrder(), + }, + } + case eventTypeUpdate: + value_ := event.GraphQLAssert.GetValue() + enabled := event.GraphQLAssert.GetEnabled() + order := event.GraphQLAssert.GetOrder() + value = &graphqlv1.GraphQLAssertSync_ValueUnion{ + Kind: graphqlv1.GraphQLAssertSync_ValueUnion_KIND_UPDATE, + Update: &graphqlv1.GraphQLAssertSyncUpdate{ + GraphqlAssertId: event.GraphQLAssert.GetGraphqlAssertId(), + Value: &value_, + Enabled: &enabled, + Order: &order, + }, + } + case eventTypeDelete: + value = &graphqlv1.GraphQLAssertSync_ValueUnion{ + Kind: graphqlv1.GraphQLAssertSync_ValueUnion_KIND_DELETE, + Delete: &graphqlv1.GraphQLAssertSyncDelete{ + GraphqlAssertId: event.GraphQLAssert.GetGraphqlAssertId(), + }, + } + } + + return &graphqlv1.GraphQLAssertSyncResponse{ + Items: []*graphqlv1.GraphQLAssertSync{ + { + Value: value, + }, + }, + } +} + +// graphqlResponseAssertSyncResponseFrom converts GraphQLResponseAssertEvent to GraphQLResponseAssertSync response +func graphqlResponseAssertSyncResponseFrom(event GraphQLResponseAssertEvent) *graphqlv1.GraphQLResponseAssertSyncResponse { + var value *graphqlv1.GraphQLResponseAssertSync_ValueUnion + + switch event.Type { + case eventTypeInsert: + value_ := event.GraphQLResponseAssert.GetValue() + success := event.GraphQLResponseAssert.GetSuccess() + value = &graphqlv1.GraphQLResponseAssertSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseAssertSync_ValueUnion_KIND_INSERT, + Insert: &graphqlv1.GraphQLResponseAssertSyncInsert{ + GraphqlResponseAssertId: event.GraphQLResponseAssert.GetGraphqlResponseAssertId(), + GraphqlResponseId: event.GraphQLResponseAssert.GetGraphqlResponseId(), + Value: value_, + Success: success, + }, + } + case eventTypeUpdate: + value_ := event.GraphQLResponseAssert.GetValue() + success := event.GraphQLResponseAssert.GetSuccess() + value = &graphqlv1.GraphQLResponseAssertSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseAssertSync_ValueUnion_KIND_UPDATE, + Update: &graphqlv1.GraphQLResponseAssertSyncUpdate{ + GraphqlResponseAssertId: event.GraphQLResponseAssert.GetGraphqlResponseAssertId(), + Value: &value_, + Success: &success, + }, + } + case eventTypeDelete: + value = &graphqlv1.GraphQLResponseAssertSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseAssertSync_ValueUnion_KIND_DELETE, + Delete: &graphqlv1.GraphQLResponseAssertSyncDelete{ + GraphqlResponseAssertId: event.GraphQLResponseAssert.GetGraphqlResponseAssertId(), + }, + } + } + + return &graphqlv1.GraphQLResponseAssertSyncResponse{ + Items: []*graphqlv1.GraphQLResponseAssertSync{ + { + Value: value, + }, + }, + } +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_crud.go b/packages/server/internal/api/rgraphql/rgraphql_crud.go new file mode 100644 index 000000000..d4507eff4 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_crud.go @@ -0,0 +1,244 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "errors" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/mutation" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +func (s *GraphQLServiceRPC) GraphQLCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + workspaces, err := s.wsReader.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allItems []*graphqlv1.GraphQL + for _, ws := range workspaces { + items, err := s.graphqlService.GetByWorkspaceID(ctx, ws.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + for _, item := range items { + allItems = append(allItems, ToAPIGraphQL(item)) + } + } + + return connect.NewResponse(&graphqlv1.GraphQLCollectionResponse{Items: allItems}), nil +} + +func (s *GraphQLServiceRPC) GraphQLInsert(ctx context.Context, req *connect.Request[graphqlv1.GraphQLInsertRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one item must be provided")) + } + + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + // FETCH + workspaces, err := s.wsReader.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + if len(workspaces) == 0 { + return nil, connect.NewError(connect.CodeNotFound, errors.New("user has no workspaces")) + } + + defaultWorkspaceID := workspaces[0].ID + + // CHECK + if err := s.checkWorkspaceWriteAccess(ctx, defaultWorkspaceID); err != nil { + return nil, err + } + + // Parse items before starting transaction + items := make([]mutation.GraphQLInsertItem, 0, len(req.Msg.Items)) + for _, item := range req.Msg.Items { + if len(item.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required")) + } + + gqlID, err := idwrap.NewFromBytes(item.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + items = append(items, mutation.GraphQLInsertItem{ + GraphQL: &mgraphql.GraphQL{ + ID: gqlID, + WorkspaceID: defaultWorkspaceID, + Name: item.Name, + Url: item.Url, + Query: item.Query, + Variables: item.Variables, + }, + WorkspaceID: defaultWorkspaceID, + }) + } + + // ACT + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + if err := mut.InsertGraphQLBatch(ctx, items); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLUpdate(ctx context.Context, req *connect.Request[graphqlv1.GraphQLUpdateRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one item must be provided")) + } + + // FETCH + CHECK: parse items, read existing records, check permissions + updateItems := make([]mutation.GraphQLUpdateItem, 0, len(req.Msg.Items)) + for _, item := range req.Msg.Items { + if len(item.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required")) + } + + gqlID, err := idwrap.NewFromBytes(item.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + existing, err := s.graphqlService.Get(ctx, gqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceWriteAccess(ctx, existing.WorkspaceID); err != nil { + return nil, err + } + + if item.Name != nil { + existing.Name = *item.Name + } + if item.Url != nil { + existing.Url = *item.Url + } + if item.Query != nil { + existing.Query = *item.Query + } + if item.Variables != nil { + existing.Variables = *item.Variables + } + + updateItems = append(updateItems, mutation.GraphQLUpdateItem{ + GraphQL: existing, + WorkspaceID: existing.WorkspaceID, + }) + } + + // ACT + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + if err := mut.UpdateGraphQLBatch(ctx, updateItems); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLDelete(ctx context.Context, req *connect.Request[graphqlv1.GraphQLDeleteRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one item must be provided")) + } + + // FETCH + CHECK: parse items, read existing records, check permissions + deleteItems := make([]mutation.GraphQLDeleteItem, 0, len(req.Msg.Items)) + for _, item := range req.Msg.Items { + if len(item.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required")) + } + + gqlID, err := idwrap.NewFromBytes(item.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + existing, err := s.graphqlService.Get(ctx, gqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceDeleteAccess(ctx, existing.WorkspaceID); err != nil { + return nil, err + } + + deleteItems = append(deleteItems, mutation.GraphQLDeleteItem{ + ID: gqlID, + WorkspaceID: existing.WorkspaceID, + }) + } + + // ACT + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + if err := mut.DeleteGraphQLBatch(ctx, deleteItems); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +// getGraphQLsWithDeltasForWorkspace returns both base and delta GraphQL entries for a workspace. +func (s *GraphQLServiceRPC) getGraphQLsWithDeltasForWorkspace(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQL, error) { + graphqlList, err := s.graphqlReader.GetByWorkspaceID(ctx, workspaceID) + if err != nil { + return nil, err + } + deltaList, err := s.graphqlReader.GetDeltasByWorkspaceID(ctx, workspaceID) + if err != nil { + return nil, err + } + all := make([]mgraphql.GraphQL, 0, len(graphqlList)+len(deltaList)) + return append(append(all, graphqlList...), deltaList...), nil +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_crud_assert.go b/packages/server/internal/api/rgraphql/rgraphql_crud_assert.go new file mode 100644 index 000000000..746bf4097 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_crud_assert.go @@ -0,0 +1,857 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "database/sql" + "errors" + "sync" + "time" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/internal/converter" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/mutation" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/patch" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" + + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +// GraphQLAssert CRUD operations + +func (s *GraphQLServiceRPC) GraphQLAssertCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLAssertCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + workspaces, err := s.wsReader.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allAsserts []*graphqlv1.GraphQLAssert + for _, workspace := range workspaces { + allGraphQLs, err := s.getGraphQLsWithDeltasForWorkspace(ctx, workspace.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + for _, graphql := range allGraphQLs { + asserts, err := s.graphqlAssertService.GetByGraphQLID(ctx, graphql.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + for _, assert := range asserts { + allAsserts = append(allAsserts, converter.ToAPIGraphQLAssert(assert)) + } + } + } + + return connect.NewResponse(&graphqlv1.GraphQLAssertCollectionResponse{Items: allAsserts}), nil +} + +func (s *GraphQLServiceRPC) GraphQLAssertInsert(ctx context.Context, req *connect.Request[graphqlv1.GraphQLAssertInsertRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL assert must be provided")) + } + + // FETCH: Gather data and check permissions OUTSIDE transaction + type insertItem struct { + assertID idwrap.IDWrap + graphqlID idwrap.IDWrap + value string + enabled bool + order float32 + workspaceID idwrap.IDWrap + } + insertData := make([]insertItem, 0, len(req.Msg.Items)) + + for _, item := range req.Msg.Items { + if len(item.GraphqlAssertId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_assert_id is required")) + } + if len(item.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required")) + } + + assertID, err := idwrap.NewFromBytes(item.GraphqlAssertId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + graphqlID, err := idwrap.NewFromBytes(item.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Verify the GraphQL entry exists and user has access - use pool service + graphqlEntry, err := s.graphqlReader.Get(ctx, graphqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // CHECK: Validate write access to the workspace + if err := s.checkWorkspaceWriteAccess(ctx, graphqlEntry.WorkspaceID); err != nil { + return nil, err + } + + insertData = append(insertData, insertItem{ + assertID: assertID, + graphqlID: graphqlID, + value: item.Value, + enabled: item.Enabled, + order: item.Order, + workspaceID: graphqlEntry.WorkspaceID, + }) + } + + // ACT: Insert asserts using mutation context with auto-publish + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + now := time.Now().UnixMilli() + for _, data := range insertData { + assert := mgraphql.GraphQLAssert{ + ID: data.assertID, + GraphQLID: data.graphqlID, + Value: data.value, + Enabled: data.enabled, + Description: "", + DisplayOrder: data.order, + } + + if err := mut.InsertGraphQLAssert(ctx, mutation.GraphQLAssertInsertItem{ + ID: data.assertID, + GraphQLID: data.graphqlID, + WorkspaceID: data.workspaceID, + IsDelta: false, + Params: gen.CreateGraphQLAssertParams{ + ID: data.assertID.Bytes(), + GraphqlID: data.graphqlID.Bytes(), + Value: data.value, + Enabled: data.enabled, + Description: "", + DisplayOrder: float64(data.order), + IsDelta: false, + CreatedAt: now, + UpdatedAt: now, + }, + }); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQLAssert, + Op: mutation.OpInsert, + ID: data.assertID, + ParentID: data.graphqlID, + WorkspaceID: data.workspaceID, + Payload: assert, + }) + } + + if err := mut.Commit(ctx); err != nil { // Auto-publishes events! + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLAssertUpdate(ctx context.Context, req *connect.Request[graphqlv1.GraphQLAssertUpdateRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL assert must be provided")) + } + + // FETCH: Process request data and perform all reads/checks OUTSIDE transaction + type updateItem struct { + existingAssert mgraphql.GraphQLAssert + value *string + enabled *bool + order *float32 + workspaceID idwrap.IDWrap + } + updateData := make([]updateItem, 0, len(req.Msg.Items)) + + for _, item := range req.Msg.Items { + if len(item.GraphqlAssertId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_assert_id is required")) + } + + assertID, err := idwrap.NewFromBytes(item.GraphqlAssertId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get existing assert - use pool service + existingAssert, err := s.graphqlAssertService.GetByID(ctx, assertID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLAssertFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Verify the GraphQL entry exists and user has access - use pool service + graphqlEntry, err := s.graphqlReader.Get(ctx, existingAssert.GraphQLID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // CHECK: Validate write access to the workspace + if err := s.checkWorkspaceWriteAccess(ctx, graphqlEntry.WorkspaceID); err != nil { + return nil, err + } + + updateData = append(updateData, updateItem{ + existingAssert: *existingAssert, + value: item.Value, + enabled: item.Enabled, + order: item.Order, + workspaceID: graphqlEntry.WorkspaceID, + }) + } + + // ACT: Update asserts using mutation context with auto-publish + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + now := time.Now().UnixMilli() + for _, data := range updateData { + assert := data.existingAssert + + // Build patch with only changed fields + assertPatch := patch.GraphQLAssertPatch{} + + // Update fields if provided and track in patch + if data.value != nil { + assert.Value = *data.value + assertPatch.Value = patch.NewOptional(*data.value) + } + if data.enabled != nil { + assert.Enabled = *data.enabled + assertPatch.Enabled = patch.NewOptional(*data.enabled) + } + if data.order != nil { + assert.DisplayOrder = *data.order + assertPatch.Order = patch.NewOptional(*data.order) + } + + if err := mut.UpdateGraphQLAssert(ctx, mutation.GraphQLAssertUpdateItem{ + ID: assert.ID, + GraphQLID: assert.GraphQLID, + WorkspaceID: data.workspaceID, + IsDelta: assert.IsDelta, + Params: gen.UpdateGraphQLAssertParams{ + ID: assert.ID.Bytes(), + Value: assert.Value, + Enabled: assert.Enabled, + Description: assert.Description, + DisplayOrder: float64(assert.DisplayOrder), + UpdatedAt: now, + }, + Patch: assertPatch, + }); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + if err := mut.Commit(ctx); err != nil { // Auto-publishes events! + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLAssertDelete(ctx context.Context, req *connect.Request[graphqlv1.GraphQLAssertDeleteRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL assert must be provided")) + } + + // FETCH: Gather data and check permissions OUTSIDE transaction + type deleteItem struct { + ID idwrap.IDWrap + GraphQLID idwrap.IDWrap + WorkspaceID idwrap.IDWrap + IsDelta bool + } + deleteItems := make([]deleteItem, 0, len(req.Msg.Items)) + + for _, item := range req.Msg.Items { + if len(item.GraphqlAssertId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_assert_id is required")) + } + + assertID, err := idwrap.NewFromBytes(item.GraphqlAssertId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get existing assert - use pool service + existingAssert, err := s.graphqlAssertService.GetByID(ctx, assertID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLAssertFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Verify the GraphQL entry exists and user has access - use pool service + graphqlEntry, err := s.graphqlReader.Get(ctx, existingAssert.GraphQLID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // CHECK: Validate delete access to the workspace + if err := s.checkWorkspaceDeleteAccess(ctx, graphqlEntry.WorkspaceID); err != nil { + return nil, err + } + + deleteItems = append(deleteItems, deleteItem{ + ID: assertID, + GraphQLID: existingAssert.GraphQLID, + WorkspaceID: graphqlEntry.WorkspaceID, + IsDelta: existingAssert.IsDelta, + }) + } + + // ACT: Delete using mutation context with auto-publish + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + for _, item := range deleteItems { + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQLAssert, + Op: mutation.OpDelete, + ID: item.ID, + ParentID: item.GraphQLID, + WorkspaceID: item.WorkspaceID, + IsDelta: item.IsDelta, + }) + if err := mut.Queries().DeleteGraphQLAssert(ctx, item.ID.Bytes()); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + if err := mut.Commit(ctx); err != nil { // Auto-publishes events! + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLAssertSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLAssertSyncResponse]) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + + return s.streamGraphQLAssertSync(ctx, userID, stream.Send) +} + +func (s *GraphQLServiceRPC) streamGraphQLAssertSync(ctx context.Context, userID idwrap.IDWrap, send func(*graphqlv1.GraphQLAssertSyncResponse) error) error { + var workspaceSet sync.Map + + filter := func(topic GraphQLAssertTopic) bool { + if _, ok := workspaceSet.Load(topic.WorkspaceID.String()); ok { + return true + } + belongs, err := s.us.CheckUserBelongsToWorkspace(ctx, userID, topic.WorkspaceID) + if err != nil || !belongs { + return false + } + workspaceSet.Store(topic.WorkspaceID.String(), struct{}{}) + return true + } + + converter := func(events []GraphQLAssertEvent) *graphqlv1.GraphQLAssertSyncResponse { + var items []*graphqlv1.GraphQLAssertSync + for _, event := range events { + // Skip delta asserts (they have separate sync) + if event.IsDelta { + continue + } + if resp := graphqlAssertSyncResponseFrom(event); resp != nil && len(resp.Items) > 0 { + items = append(items, resp.Items...) + } + } + if len(items) == 0 { + return nil + } + return &graphqlv1.GraphQLAssertSyncResponse{Items: items} + } + + return eventstream.StreamToClient( + ctx, + s.streamers.GraphQLAssert, + filter, + converter, + send, + nil, + ) +} + +// Delta operations +func (s *GraphQLServiceRPC) GraphQLAssertDeltaCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLAssertDeltaCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + // Get user's workspaces + workspaces, err := s.wsReader.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allDeltas []*graphqlv1.GraphQLAssertDelta + for _, workspace := range workspaces { + // Get GraphQL delta entries for this workspace + graphqlList, err := s.graphqlReader.GetDeltasByWorkspaceID(ctx, workspace.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Get asserts for each GraphQL entry + for _, graphql := range graphqlList { + asserts, err := s.graphqlAssertService.GetByGraphQLID(ctx, graphql.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Convert to delta format + for _, assert := range asserts { + if !assert.IsDelta { + continue + } + + delta := &graphqlv1.GraphQLAssertDelta{ + DeltaGraphqlAssertId: assert.ID.Bytes(), + GraphqlId: assert.GraphQLID.Bytes(), + } + + if assert.ParentGraphQLAssertID != nil { + delta.GraphqlAssertId = assert.ParentGraphQLAssertID.Bytes() + } + + // Only include delta fields if they exist + if assert.DeltaValue != nil { + delta.Value = assert.DeltaValue + } + if assert.DeltaEnabled != nil { + delta.Enabled = assert.DeltaEnabled + } + if assert.DeltaDisplayOrder != nil { + delta.Order = assert.DeltaDisplayOrder + } + + allDeltas = append(allDeltas, delta) + } + } + } + + return connect.NewResponse(&graphqlv1.GraphQLAssertDeltaCollectionResponse{ + Items: allDeltas, + }), nil +} + +func (s *GraphQLServiceRPC) GraphQLAssertDeltaInsert(ctx context.Context, req *connect.Request[graphqlv1.GraphQLAssertDeltaInsertRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.Items) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one delta item is required")) + } + + // FETCH: Gather data and check permissions OUTSIDE transaction + type insertItem struct { + graphqlID idwrap.IDWrap + newID idwrap.IDWrap + parentID idwrap.IDWrap + workspaceID idwrap.IDWrap + baseAssert mgraphql.GraphQLAssert + item *graphqlv1.GraphQLAssertDeltaInsert + } + insertData := make([]insertItem, 0, len(req.Msg.Items)) + + for _, item := range req.Msg.Items { + if len(item.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required for each delta item")) + } + + graphqlID, err := idwrap.NewFromBytes(item.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + graphqlEntry, err := s.graphqlReader.Get(ctx, graphqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + if !graphqlEntry.IsDelta { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("specified GraphQL entry is not a delta")) + } + + if err := s.checkWorkspaceWriteAccess(ctx, graphqlEntry.WorkspaceID); err != nil { + return nil, err + } + + if len(item.GraphqlAssertId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_assert_id is required")) + } + + parentAssertID, err := idwrap.NewFromBytes(item.GraphqlAssertId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + baseAssert, err := s.graphqlAssertService.GetByID(ctx, parentAssertID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLAssertFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + newID := idwrap.NewNow() + if len(item.DeltaGraphqlAssertId) > 0 { + newID, err = idwrap.NewFromBytes(item.DeltaGraphqlAssertId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + } + + insertData = append(insertData, insertItem{ + graphqlID: graphqlID, + newID: newID, + parentID: parentAssertID, + workspaceID: graphqlEntry.WorkspaceID, + baseAssert: *baseAssert, + item: item, + }) + } + + // ACT: Insert new delta records using mutation context + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + now := time.Now().UnixMilli() + for _, data := range insertData { + params := gen.CreateGraphQLAssertParams{ + ID: data.newID.Bytes(), + GraphqlID: data.graphqlID.Bytes(), + Value: data.baseAssert.Value, + Enabled: data.baseAssert.Enabled, + Description: data.baseAssert.Description, + DisplayOrder: float64(data.baseAssert.DisplayOrder), + ParentGraphqlAssertID: data.parentID.Bytes(), + IsDelta: true, + DeltaValue: stringPtrToNullString(data.item.Value), + DeltaEnabled: boolPtrToNullBool(data.item.Enabled), + DeltaDescription: stringPtrToNullString(nil), + DeltaDisplayOrder: float32PtrToNullFloat64(data.item.Order), + CreatedAt: now, + UpdatedAt: now, + } + + if err := mut.InsertGraphQLAssert(ctx, mutation.GraphQLAssertInsertItem{ + ID: data.newID, + GraphQLID: data.graphqlID, + WorkspaceID: data.workspaceID, + IsDelta: true, + Params: params, + }); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + assertService := s.graphqlAssertService.TX(mut.TX()) + updated, err := assertService.GetByID(ctx, data.newID) + if err == nil { + mut.UpdateLastEventPayload(*updated) + } + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLAssertDeltaUpdate(ctx context.Context, req *connect.Request[graphqlv1.GraphQLAssertDeltaUpdateRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL assert delta must be provided")) + } + + // FETCH: Gather data and check permissions OUTSIDE transaction + type updateItem struct { + deltaID idwrap.IDWrap + existingAssert mgraphql.GraphQLAssert + workspaceID idwrap.IDWrap + item *graphqlv1.GraphQLAssertDeltaUpdate + } + updateData := make([]updateItem, 0, len(req.Msg.Items)) + + for _, item := range req.Msg.Items { + if len(item.DeltaGraphqlAssertId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("delta_graphql_assert_id is required")) + } + + deltaID, err := idwrap.NewFromBytes(item.DeltaGraphqlAssertId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get existing delta assert - use pool service + existingAssert, err := s.graphqlAssertService.GetByID(ctx, deltaID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLAssertFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Verify this is actually a delta record + if !existingAssert.IsDelta { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("specified GraphQL assert is not a delta")) + } + + // Get the GraphQL entry to check workspace access - use pool service + graphqlEntry, err := s.graphqlReader.Get(ctx, existingAssert.GraphQLID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // CHECK: Validate write access to the workspace + if err := s.checkWorkspaceWriteAccess(ctx, graphqlEntry.WorkspaceID); err != nil { + return nil, err + } + + updateData = append(updateData, updateItem{ + deltaID: deltaID, + existingAssert: *existingAssert, + workspaceID: graphqlEntry.WorkspaceID, + item: item, + }) + } + + // ACT: Update using mutation context + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + for _, data := range updateData { + item := data.item + deltaValue := data.existingAssert.DeltaValue + deltaEnabled := data.existingAssert.DeltaEnabled + deltaOrder := data.existingAssert.DeltaDisplayOrder + var patchData patch.GraphQLAssertPatch + + if item.Value != nil { + switch item.Value.GetKind() { + case graphqlv1.GraphQLAssertDeltaUpdate_ValueUnion_KIND_UNSET: + deltaValue = nil + patchData.Value = patch.Unset[string]() + case graphqlv1.GraphQLAssertDeltaUpdate_ValueUnion_KIND_VALUE: + valueStr := item.Value.GetValue() + deltaValue = &valueStr + patchData.Value = patch.NewOptional(valueStr) + } + } + if item.Enabled != nil { + switch item.Enabled.GetKind() { + case graphqlv1.GraphQLAssertDeltaUpdate_EnabledUnion_KIND_UNSET: + deltaEnabled = nil + patchData.Enabled = patch.Unset[bool]() + case graphqlv1.GraphQLAssertDeltaUpdate_EnabledUnion_KIND_VALUE: + enabledBool := item.Enabled.GetValue() + deltaEnabled = &enabledBool + patchData.Enabled = patch.NewOptional(enabledBool) + } + } + if item.Order != nil { + switch item.Order.GetKind() { + case graphqlv1.GraphQLAssertDeltaUpdate_OrderUnion_KIND_UNSET: + deltaOrder = nil + patchData.Order = patch.Unset[float32]() + case graphqlv1.GraphQLAssertDeltaUpdate_OrderUnion_KIND_VALUE: + orderFloat := item.Order.GetValue() + deltaOrder = &orderFloat + patchData.Order = patch.NewOptional(orderFloat) + } + } + + assertService := s.graphqlAssertService.TX(mut.TX()) + if err := mut.UpdateGraphQLAssertDelta(ctx, mutation.GraphQLAssertDeltaUpdateItem{ + ID: data.deltaID, + GraphQLID: data.existingAssert.GraphQLID, + WorkspaceID: data.workspaceID, + Params: gen.UpdateGraphQLAssertDeltaParams{ + ID: data.deltaID.Bytes(), + DeltaValue: stringPtrToNullString(deltaValue), + DeltaEnabled: boolPtrToNullBool(deltaEnabled), + DeltaDisplayOrder: float32PtrToNullFloat64(deltaOrder), + UpdatedAt: time.Now().UnixMilli(), + }, + Patch: patchData, + }); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Update payload in tracked event + updated, err := assertService.GetByID(ctx, data.deltaID) + if err == nil { + mut.UpdateLastEventPayload(*updated) + } + } + + if err := mut.Commit(ctx); err != nil { // Auto-publishes events! + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLAssertDeltaDelete(ctx context.Context, req *connect.Request[graphqlv1.GraphQLAssertDeltaDeleteRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL assert delta must be provided")) + } + + // FETCH: Gather data and check permissions OUTSIDE transaction + type deleteItem struct { + deltaID idwrap.IDWrap + graphqlID idwrap.IDWrap + workspaceID idwrap.IDWrap + assert mgraphql.GraphQLAssert + } + deleteData := make([]deleteItem, 0, len(req.Msg.Items)) + + for _, item := range req.Msg.Items { + if len(item.DeltaGraphqlAssertId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("delta_graphql_assert_id is required")) + } + + deltaID, err := idwrap.NewFromBytes(item.DeltaGraphqlAssertId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get existing delta assert + existingAssert, err := s.graphqlAssertService.GetByID(ctx, deltaID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLAssertFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Verify this is actually a delta record + if !existingAssert.IsDelta { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("specified GraphQL assert is not a delta")) + } + + // Get the GraphQL entry to check workspace access + graphqlEntry, err := s.graphqlReader.Get(ctx, existingAssert.GraphQLID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Check delete access to the workspace + if err := s.checkWorkspaceDeleteAccess(ctx, graphqlEntry.WorkspaceID); err != nil { + return nil, err + } + + deleteData = append(deleteData, deleteItem{ + deltaID: deltaID, + graphqlID: existingAssert.GraphQLID, + workspaceID: graphqlEntry.WorkspaceID, + assert: *existingAssert, + }) + } + + // ACT: Execute deletes in transaction + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + for _, data := range deleteData { + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQLAssert, + Op: mutation.OpDelete, + ID: data.deltaID, + ParentID: data.graphqlID, + WorkspaceID: data.workspaceID, + IsDelta: true, + Payload: data.assert, + }) + if err := mut.Queries().DeleteGraphQLAssert(ctx, data.deltaID.Bytes()); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLAssertDeltaSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLAssertDeltaSyncResponse]) error { + // TODO: Implement streaming delta sync + return nil +} + +// Helper functions for null conversions +func stringPtrToNullString(s *string) sql.NullString { + if s == nil { + return sql.NullString{Valid: false} + } + return sql.NullString{String: *s, Valid: true} +} + +func boolPtrToNullBool(b *bool) sql.NullBool { + if b == nil { + return sql.NullBool{Valid: false} + } + return sql.NullBool{Bool: *b, Valid: true} +} + +func float32PtrToNullFloat64(f *float32) sql.NullFloat64 { + if f == nil { + return sql.NullFloat64{Valid: false} + } + return sql.NullFloat64{Float64: float64(*f), Valid: true} +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_crud_delta.go b/packages/server/internal/api/rgraphql/rgraphql_crud_delta.go new file mode 100644 index 000000000..d2b7e4ab9 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_crud_delta.go @@ -0,0 +1,355 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "errors" + "sync" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/mutation" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" + + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +// GraphQLDeltaCollection fetches all delta GraphQL entries for the user's workspaces +func (s *GraphQLServiceRPC) GraphQLDeltaCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLDeltaCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + // Get user's workspaces + workspaces, err := s.ws.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allDeltas []*graphqlv1.GraphQLDelta + for _, workspace := range workspaces { + // Get GraphQL delta entries for this workspace + graphqlList, err := s.graphqlService.Reader().GetDeltasByWorkspaceID(ctx, workspace.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Convert to delta format + for _, gql := range graphqlList { + delta := &graphqlv1.GraphQLDelta{ + DeltaGraphqlId: gql.ID.Bytes(), + } + + if gql.ParentGraphQLID != nil { + delta.GraphqlId = gql.ParentGraphQLID.Bytes() + } + + // Only include delta fields if they exist + if gql.DeltaName != nil { + delta.Name = gql.DeltaName + } + if gql.DeltaUrl != nil { + delta.Url = gql.DeltaUrl + } + if gql.DeltaQuery != nil { + delta.Query = gql.DeltaQuery + } + if gql.DeltaVariables != nil { + delta.Variables = gql.DeltaVariables + } + + allDeltas = append(allDeltas, delta) + } + } + + return connect.NewResponse(&graphqlv1.GraphQLDeltaCollectionResponse{ + Items: allDeltas, + }), nil +} + +// GraphQLDeltaInsert creates new delta GraphQL entries +func (s *GraphQLServiceRPC) GraphQLDeltaInsert(ctx context.Context, req *connect.Request[graphqlv1.GraphQLDeltaInsertRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.Items) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one delta item is required")) + } + + // Process each delta item + for _, item := range req.Msg.Items { + if len(item.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required for each delta item")) + } + + graphqlID, err := idwrap.NewFromBytes(item.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Check workspace write access + graphqlEntry, err := s.graphqlService.Reader().Get(ctx, graphqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceWriteAccess(ctx, graphqlEntry.WorkspaceID); err != nil { + return nil, err + } + + var deltaID idwrap.IDWrap + if len(item.DeltaGraphqlId) > 0 { + var err error + deltaID, err = idwrap.NewFromBytes(item.DeltaGraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + } else { + deltaID = idwrap.NewNow() + } + + // Create delta GraphQL entry + deltaGraphQL := &mgraphql.GraphQL{ + ID: deltaID, + WorkspaceID: graphqlEntry.WorkspaceID, + FolderID: graphqlEntry.FolderID, + Name: graphqlEntry.Name, + Url: graphqlEntry.Url, + Query: graphqlEntry.Query, + Variables: graphqlEntry.Variables, + Description: graphqlEntry.Description, + ParentGraphQLID: &graphqlID, + IsDelta: true, + DeltaName: item.Name, + DeltaUrl: item.Url, + DeltaQuery: item.Query, + DeltaVariables: item.Variables, + CreatedAt: 0, // Will be set by service + UpdatedAt: 0, // Will be set by service + } + + // Use mutation pattern for create with auto-publish + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + err = s.graphqlService.TX(mut.TX()).Create(ctx, deltaGraphQL) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +// GraphQLDeltaUpdate updates existing delta GraphQL entries +func (s *GraphQLServiceRPC) GraphQLDeltaUpdate(ctx context.Context, req *connect.Request[graphqlv1.GraphQLDeltaUpdateRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL delta must be provided")) + } + + // Process each delta item + for _, item := range req.Msg.Items { + if len(item.DeltaGraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("delta_graphql_id is required")) + } + + deltaID, err := idwrap.NewFromBytes(item.DeltaGraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get existing delta GraphQL entry + existingDelta, err := s.graphqlService.Reader().Get(ctx, deltaID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Verify this is actually a delta record + if !existingDelta.IsDelta { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("specified GraphQL entry is not a delta")) + } + + // Check write access to the workspace + if err := s.checkWorkspaceWriteAccess(ctx, existingDelta.WorkspaceID); err != nil { + return nil, err + } + + // Apply updates + if item.Name != nil { + switch item.Name.GetKind() { + case graphqlv1.GraphQLDeltaUpdate_NameUnion_KIND_UNSET: + existingDelta.DeltaName = nil + case graphqlv1.GraphQLDeltaUpdate_NameUnion_KIND_VALUE: + nameStr := item.Name.GetValue() + existingDelta.DeltaName = &nameStr + } + } + if item.Url != nil { + switch item.Url.GetKind() { + case graphqlv1.GraphQLDeltaUpdate_UrlUnion_KIND_UNSET: + existingDelta.DeltaUrl = nil + case graphqlv1.GraphQLDeltaUpdate_UrlUnion_KIND_VALUE: + urlStr := item.Url.GetValue() + existingDelta.DeltaUrl = &urlStr + } + } + if item.Query != nil { + switch item.Query.GetKind() { + case graphqlv1.GraphQLDeltaUpdate_QueryUnion_KIND_UNSET: + existingDelta.DeltaQuery = nil + case graphqlv1.GraphQLDeltaUpdate_QueryUnion_KIND_VALUE: + queryStr := item.Query.GetValue() + existingDelta.DeltaQuery = &queryStr + } + } + if item.Variables != nil { + switch item.Variables.GetKind() { + case graphqlv1.GraphQLDeltaUpdate_VariablesUnion_KIND_UNSET: + existingDelta.DeltaVariables = nil + case graphqlv1.GraphQLDeltaUpdate_VariablesUnion_KIND_VALUE: + variablesStr := item.Variables.GetValue() + existingDelta.DeltaVariables = &variablesStr + } + } + + // Use mutation pattern for update with auto-publish + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.graphqlService.TX(mut.TX()).Update(ctx, existingDelta); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +// GraphQLDeltaDelete deletes delta GraphQL entries +func (s *GraphQLServiceRPC) GraphQLDeltaDelete(ctx context.Context, req *connect.Request[graphqlv1.GraphQLDeltaDeleteRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL delta must be provided")) + } + + // Step 1: Gather data and check permissions OUTSIDE transaction + var deleteData []struct { + deltaID idwrap.IDWrap + existingDelta *mgraphql.GraphQL + } + + for _, item := range req.Msg.Items { + if len(item.DeltaGraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("delta_graphql_id is required")) + } + + deltaID, err := idwrap.NewFromBytes(item.DeltaGraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get existing delta GraphQL entry + existingDelta, err := s.graphqlService.Reader().Get(ctx, deltaID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Verify this is actually a delta record + if !existingDelta.IsDelta { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("specified GraphQL entry is not a delta")) + } + + // Check write access to the workspace + if err := s.checkWorkspaceWriteAccess(ctx, existingDelta.WorkspaceID); err != nil { + return nil, err + } + + deleteData = append(deleteData, struct { + deltaID idwrap.IDWrap + existingDelta *mgraphql.GraphQL + }{ + deltaID: deltaID, + existingDelta: existingDelta, + }) + } + + // Step 2: Execute deletes in transaction using mutation pattern + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + for _, data := range deleteData { + if err := s.graphqlService.TX(mut.TX()).Delete(ctx, data.deltaID); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +// GraphQLDeltaSync streams delta GraphQL changes in real-time +func (s *GraphQLServiceRPC) GraphQLDeltaSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLDeltaSyncResponse]) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + return s.streamGraphQLDeltaSync(ctx, userID, stream.Send) +} + +func (s *GraphQLServiceRPC) streamGraphQLDeltaSync(ctx context.Context, userID idwrap.IDWrap, send func(*graphqlv1.GraphQLDeltaSyncResponse) error) error { + var workspaceSet sync.Map + + filter := func(topic GraphQLTopic) bool { + if _, ok := workspaceSet.Load(topic.WorkspaceID.String()); ok { + return true + } + belongs, err := s.us.CheckUserBelongsToWorkspace(ctx, userID, topic.WorkspaceID) + if err != nil || !belongs { + return false + } + workspaceSet.Store(topic.WorkspaceID.String(), struct{}{}) + return true + } + + converter := func(events []GraphQLEvent) *graphqlv1.GraphQLDeltaSyncResponse { + var items []*graphqlv1.GraphQLDeltaSync + for _, event := range events { + if resp := graphqlDeltaSyncResponseFrom(event); resp != nil && len(resp.Items) > 0 { + items = append(items, resp.Items...) + } + } + if len(items) == 0 { + return nil + } + return &graphqlv1.GraphQLDeltaSyncResponse{Items: items} + } + + return eventstream.StreamToClient(ctx, s.streamers.GraphQL, filter, converter, send, nil) +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_crud_header.go b/packages/server/internal/api/rgraphql/rgraphql_crud_header.go new file mode 100644 index 000000000..557a3f47e --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_crud_header.go @@ -0,0 +1,212 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "errors" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +func (s *GraphQLServiceRPC) GraphQLHeaderCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLHeaderCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + workspaces, err := s.wsReader.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allItems []*graphqlv1.GraphQLHeader + for _, ws := range workspaces { + gqlList, err := s.graphqlService.GetByWorkspaceID(ctx, ws.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + for _, gql := range gqlList { + headers, err := s.headerService.GetByGraphQLID(ctx, gql.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + for _, h := range headers { + allItems = append(allItems, ToAPIGraphQLHeader(h)) + } + } + } + + return connect.NewResponse(&graphqlv1.GraphQLHeaderCollectionResponse{Items: allItems}), nil +} + +func (s *GraphQLServiceRPC) GraphQLHeaderInsert(ctx context.Context, req *connect.Request[graphqlv1.GraphQLHeaderInsertRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one item must be provided")) + } + + for _, item := range req.Msg.Items { + if len(item.GraphqlHeaderId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_header_id is required")) + } + if len(item.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required")) + } + + headerID, err := idwrap.NewFromBytes(item.GraphqlHeaderId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + gqlID, err := idwrap.NewFromBytes(item.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + workspaceID, err := s.graphqlService.GetWorkspaceID(ctx, gqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceWriteAccess(ctx, workspaceID); err != nil { + return nil, err + } + + header := &mgraphql.GraphQLHeader{ + ID: headerID, + GraphQLID: gqlID, + Key: item.Key, + Value: item.Value, + Enabled: item.Enabled, + Description: item.Description, + DisplayOrder: item.Order, + } + + if err := s.headerService.Create(ctx, header); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if s.streamers.GraphQLHeader != nil { + s.streamers.GraphQLHeader.Publish(GraphQLHeaderTopic{WorkspaceID: workspaceID}, GraphQLHeaderEvent{ + Type: eventTypeInsert, + GraphQLHeader: ToAPIGraphQLHeader(*header), + }) + } + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLHeaderUpdate(ctx context.Context, req *connect.Request[graphqlv1.GraphQLHeaderUpdateRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one item must be provided")) + } + + for _, item := range req.Msg.Items { + if len(item.GraphqlHeaderId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_header_id is required")) + } + + headerID, err := idwrap.NewFromBytes(item.GraphqlHeaderId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + existingHeaders, err := s.headerService.GetByIDs(ctx, []idwrap.IDWrap{headerID}) + if err != nil || len(existingHeaders) == 0 { + return nil, connect.NewError(connect.CodeNotFound, errors.New("header not found")) + } + existing := existingHeaders[0] + + workspaceID, err := s.graphqlService.GetWorkspaceID(ctx, existing.GraphQLID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceWriteAccess(ctx, workspaceID); err != nil { + return nil, err + } + + if item.Key != nil { + existing.Key = *item.Key + } + if item.Value != nil { + existing.Value = *item.Value + } + if item.Enabled != nil { + existing.Enabled = *item.Enabled + } + if item.Description != nil { + existing.Description = *item.Description + } + if item.Order != nil { + existing.DisplayOrder = *item.Order + } + + if err := s.headerService.Update(ctx, &existing); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if s.streamers.GraphQLHeader != nil { + s.streamers.GraphQLHeader.Publish(GraphQLHeaderTopic{WorkspaceID: workspaceID}, GraphQLHeaderEvent{ + Type: eventTypeUpdate, + GraphQLHeader: ToAPIGraphQLHeader(existing), + }) + } + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLHeaderDelete(ctx context.Context, req *connect.Request[graphqlv1.GraphQLHeaderDeleteRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one item must be provided")) + } + + for _, item := range req.Msg.Items { + if len(item.GraphqlHeaderId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_header_id is required")) + } + + headerID, err := idwrap.NewFromBytes(item.GraphqlHeaderId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + existingHeaders, err := s.headerService.GetByIDs(ctx, []idwrap.IDWrap{headerID}) + if err != nil || len(existingHeaders) == 0 { + return nil, connect.NewError(connect.CodeNotFound, errors.New("header not found")) + } + existing := existingHeaders[0] + + workspaceID, err := s.graphqlService.GetWorkspaceID(ctx, existing.GraphQLID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceDeleteAccess(ctx, workspaceID); err != nil { + return nil, err + } + + if err := s.headerService.Delete(ctx, headerID); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if s.streamers.GraphQLHeader != nil { + s.streamers.GraphQLHeader.Publish(GraphQLHeaderTopic{WorkspaceID: workspaceID}, GraphQLHeaderEvent{ + Type: eventTypeDelete, + GraphQLHeader: &graphqlv1.GraphQLHeader{GraphqlHeaderId: headerID.Bytes(), GraphqlId: existing.GraphQLID.Bytes()}, + }) + } + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_crud_header_delta.go b/packages/server/internal/api/rgraphql/rgraphql_crud_header_delta.go new file mode 100644 index 000000000..5789f81c3 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_crud_header_delta.go @@ -0,0 +1,343 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "errors" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/mutation" + + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +// GraphQLHeaderDeltaCollection fetches all delta GraphQL headers for the user's workspaces +func (s *GraphQLServiceRPC) GraphQLHeaderDeltaCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLHeaderDeltaCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + // Get user's workspaces + workspaces, err := s.ws.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allDeltas []*graphqlv1.GraphQLHeaderDelta + for _, workspace := range workspaces { + // Get GraphQL header delta entries for this workspace + headerList, err := s.headerService.GetDeltasByWorkspaceID(ctx, workspace.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Convert to delta format + for _, header := range headerList { + delta := &graphqlv1.GraphQLHeaderDelta{ + DeltaGraphqlHeaderId: header.ID.Bytes(), + GraphqlId: header.GraphQLID.Bytes(), + } + + if header.ParentGraphQLHeaderID != nil { + delta.GraphqlHeaderId = header.ParentGraphQLHeaderID.Bytes() + } + + // Only include delta fields if they exist + if header.DeltaKey != nil { + delta.Key = header.DeltaKey + } + if header.DeltaValue != nil { + delta.Value = header.DeltaValue + } + if header.DeltaEnabled != nil { + delta.Enabled = header.DeltaEnabled + } + if header.DeltaDescription != nil { + delta.Description = header.DeltaDescription + } + if header.DeltaDisplayOrder != nil { + delta.Order = header.DeltaDisplayOrder + } + + allDeltas = append(allDeltas, delta) + } + } + + return connect.NewResponse(&graphqlv1.GraphQLHeaderDeltaCollectionResponse{ + Items: allDeltas, + }), nil +} + +// GraphQLHeaderDeltaInsert creates new delta GraphQL header entries +func (s *GraphQLServiceRPC) GraphQLHeaderDeltaInsert(ctx context.Context, req *connect.Request[graphqlv1.GraphQLHeaderDeltaInsertRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.Items) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one delta item is required")) + } + + // Process each delta item + for _, item := range req.Msg.Items { + if len(item.GraphqlHeaderId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_header_id is required for each delta item")) + } + if len(item.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required for each delta item")) + } + + headerID, err := idwrap.NewFromBytes(item.GraphqlHeaderId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + graphqlID, err := idwrap.NewFromBytes(item.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get parent header to copy base values + parentHeaders, err := s.headerService.GetByIDs(ctx, []idwrap.IDWrap{headerID}) + if err != nil || len(parentHeaders) == 0 { + return nil, connect.NewError(connect.CodeNotFound, errors.New("parent header not found")) + } + parentHeader := parentHeaders[0] + + // Check workspace write access through the GraphQL entry + workspaceID, err := s.graphqlService.Reader().GetWorkspaceID(ctx, graphqlID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceWriteAccess(ctx, workspaceID); err != nil { + return nil, err + } + + var deltaID idwrap.IDWrap + if len(item.DeltaGraphqlHeaderId) > 0 { + var err error + deltaID, err = idwrap.NewFromBytes(item.DeltaGraphqlHeaderId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + } else { + deltaID = idwrap.NewNow() + } + + // Create delta GraphQL header entry + deltaHeader := &mgraphql.GraphQLHeader{ + ID: deltaID, + GraphQLID: graphqlID, + Key: parentHeader.Key, + Value: parentHeader.Value, + Enabled: parentHeader.Enabled, + Description: parentHeader.Description, + DisplayOrder: parentHeader.DisplayOrder, + ParentGraphQLHeaderID: &headerID, + IsDelta: true, + DeltaKey: item.Key, + DeltaValue: item.Value, + DeltaEnabled: item.Enabled, + DeltaDescription: item.Description, + DeltaDisplayOrder: item.Order, + CreatedAt: 0, // Will be set by service + UpdatedAt: 0, // Will be set by service + } + + // Use mutation pattern for create with auto-publish + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + err = s.headerService.TX(mut.TX()).Create(ctx, deltaHeader) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +// GraphQLHeaderDeltaUpdate updates existing delta GraphQL header entries +func (s *GraphQLServiceRPC) GraphQLHeaderDeltaUpdate(ctx context.Context, req *connect.Request[graphqlv1.GraphQLHeaderDeltaUpdateRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL header delta must be provided")) + } + + // Process each delta item + for _, item := range req.Msg.Items { + if len(item.DeltaGraphqlHeaderId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("delta_graphql_header_id is required")) + } + + deltaID, err := idwrap.NewFromBytes(item.DeltaGraphqlHeaderId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get existing delta GraphQL header entry + existingDeltas, err := s.headerService.GetByIDs(ctx, []idwrap.IDWrap{deltaID}) + if err != nil || len(existingDeltas) == 0 { + return nil, connect.NewError(connect.CodeNotFound, errors.New("delta header not found")) + } + existingDelta := existingDeltas[0] + + // Verify this is actually a delta record + if !existingDelta.IsDelta { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("specified GraphQL header entry is not a delta")) + } + + // Check write access to the workspace + workspaceID, err := s.graphqlService.Reader().GetWorkspaceID(ctx, existingDelta.GraphQLID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceWriteAccess(ctx, workspaceID); err != nil { + return nil, err + } + + // Apply updates to delta fields + if item.Key != nil { + switch item.Key.GetKind() { + case graphqlv1.GraphQLHeaderDeltaUpdate_KeyUnion_KIND_UNSET: + existingDelta.DeltaKey = nil + case graphqlv1.GraphQLHeaderDeltaUpdate_KeyUnion_KIND_VALUE: + keyStr := item.Key.GetValue() + existingDelta.DeltaKey = &keyStr + } + } + + if item.Value != nil { + switch item.Value.GetKind() { + case graphqlv1.GraphQLHeaderDeltaUpdate_ValueUnion_KIND_UNSET: + existingDelta.DeltaValue = nil + case graphqlv1.GraphQLHeaderDeltaUpdate_ValueUnion_KIND_VALUE: + valueStr := item.Value.GetValue() + existingDelta.DeltaValue = &valueStr + } + } + + if item.Enabled != nil { + switch item.Enabled.GetKind() { + case graphqlv1.GraphQLHeaderDeltaUpdate_EnabledUnion_KIND_UNSET: + existingDelta.DeltaEnabled = nil + case graphqlv1.GraphQLHeaderDeltaUpdate_EnabledUnion_KIND_VALUE: + enabledVal := item.Enabled.GetValue() + existingDelta.DeltaEnabled = &enabledVal + } + } + + if item.Description != nil { + switch item.Description.GetKind() { + case graphqlv1.GraphQLHeaderDeltaUpdate_DescriptionUnion_KIND_UNSET: + existingDelta.DeltaDescription = nil + case graphqlv1.GraphQLHeaderDeltaUpdate_DescriptionUnion_KIND_VALUE: + descStr := item.Description.GetValue() + existingDelta.DeltaDescription = &descStr + } + } + + if item.Order != nil { + switch item.Order.GetKind() { + case graphqlv1.GraphQLHeaderDeltaUpdate_OrderUnion_KIND_UNSET: + existingDelta.DeltaDisplayOrder = nil + case graphqlv1.GraphQLHeaderDeltaUpdate_OrderUnion_KIND_VALUE: + orderVal := item.Order.GetValue() + existingDelta.DeltaDisplayOrder = &orderVal + } + } + + // Use mutation pattern for update with auto-publish + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + err = s.headerService.TX(mut.TX()).Update(ctx, &existingDelta) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +// GraphQLHeaderDeltaDelete deletes delta GraphQL header entries +func (s *GraphQLServiceRPC) GraphQLHeaderDeltaDelete(ctx context.Context, req *connect.Request[graphqlv1.GraphQLHeaderDeltaDeleteRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL header delta must be provided")) + } + + // Process each delta item + for _, item := range req.Msg.Items { + if len(item.DeltaGraphqlHeaderId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("delta_graphql_header_id is required")) + } + + deltaID, err := idwrap.NewFromBytes(item.DeltaGraphqlHeaderId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get existing delta GraphQL header entry + existingDeltas, err := s.headerService.GetByIDs(ctx, []idwrap.IDWrap{deltaID}) + if err != nil || len(existingDeltas) == 0 { + return nil, connect.NewError(connect.CodeNotFound, errors.New("delta header not found")) + } + existingDelta := existingDeltas[0] + + // Verify this is actually a delta record + if !existingDelta.IsDelta { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("specified GraphQL header entry is not a delta")) + } + + // Check delete access to the workspace + workspaceID, err := s.graphqlService.Reader().GetWorkspaceID(ctx, existingDelta.GraphQLID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceDeleteAccess(ctx, workspaceID); err != nil { + return nil, err + } + + // Use mutation pattern for delete with auto-publish + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + err = s.headerService.TX(mut.TX()).Delete(ctx, deltaID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +// GraphQLHeaderDeltaSync streams delta header changes to the client +func (s *GraphQLServiceRPC) GraphQLHeaderDeltaSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLHeaderDeltaSyncResponse]) error { + // TODO: Implement streaming delta sync with proper event filtering + // Similar to GraphQLDeltaSync, this requires a delta-specific event stream + // that only publishes delta-related changes to prevent flooding clients + // with non-delta header updates. + return nil +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_crud_response.go b/packages/server/internal/api/rgraphql/rgraphql_crud_response.go new file mode 100644 index 000000000..1ea2978c4 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_crud_response.go @@ -0,0 +1,62 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +func (s *GraphQLServiceRPC) GraphQLResponseCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLResponseCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + workspaces, err := s.wsReader.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allItems []*graphqlv1.GraphQLResponse + for _, ws := range workspaces { + responses, err := s.responseService.GetByWorkspaceID(ctx, ws.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + for _, r := range responses { + allItems = append(allItems, ToAPIGraphQLResponse(r)) + } + } + + return connect.NewResponse(&graphqlv1.GraphQLResponseCollectionResponse{Items: allItems}), nil +} + +func (s *GraphQLServiceRPC) GraphQLResponseHeaderCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLResponseHeaderCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + workspaces, err := s.wsReader.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allItems []*graphqlv1.GraphQLResponseHeader + for _, ws := range workspaces { + headers, err := s.responseService.GetHeadersByWorkspaceID(ctx, ws.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + for _, h := range headers { + allItems = append(allItems, ToAPIGraphQLResponseHeader(h)) + } + } + + return connect.NewResponse(&graphqlv1.GraphQLResponseHeaderCollectionResponse{Items: allItems}), nil +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_crud_response_assert.go b/packages/server/internal/api/rgraphql/rgraphql_crud_response_assert.go new file mode 100644 index 000000000..9bb0c0c3e --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_crud_response_assert.go @@ -0,0 +1,91 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "sync" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +// GraphQLResponseAssert operations + +func (s *GraphQLServiceRPC) GraphQLResponseAssertCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLResponseAssertCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + // Get user's workspaces + workspaces, err := s.ws.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Collect all response asserts across user's workspaces + var allAsserts []*graphqlv1.GraphQLResponseAssert + for _, workspace := range workspaces { + asserts, err := s.responseService.GetAssertsByWorkspaceID(ctx, workspace.ID) + if err != nil { + continue + } + for _, assert := range asserts { + allAsserts = append(allAsserts, ToAPIGraphQLResponseAssert(assert)) + } + } + + return connect.NewResponse(&graphqlv1.GraphQLResponseAssertCollectionResponse{Items: allAsserts}), nil +} + +func (s *GraphQLServiceRPC) GraphQLResponseAssertSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLResponseAssertSyncResponse]) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + + return s.streamGraphQLResponseAssertSync(ctx, userID, stream.Send) +} + +func (s *GraphQLServiceRPC) streamGraphQLResponseAssertSync(ctx context.Context, userID idwrap.IDWrap, send func(*graphqlv1.GraphQLResponseAssertSyncResponse) error) error { + var workspaceSet sync.Map + + filter := func(topic GraphQLResponseAssertTopic) bool { + if _, ok := workspaceSet.Load(topic.WorkspaceID.String()); ok { + return true + } + belongs, err := s.us.CheckUserBelongsToWorkspace(ctx, userID, topic.WorkspaceID) + if err != nil || !belongs { + return false + } + workspaceSet.Store(topic.WorkspaceID.String(), struct{}{}) + return true + } + + converter := func(events []GraphQLResponseAssertEvent) *graphqlv1.GraphQLResponseAssertSyncResponse { + var items []*graphqlv1.GraphQLResponseAssertSync + for _, event := range events { + if resp := graphqlResponseAssertSyncResponseFrom(event); resp != nil && len(resp.Items) > 0 { + items = append(items, resp.Items...) + } + } + if len(items) == 0 { + return nil + } + return &graphqlv1.GraphQLResponseAssertSyncResponse{Items: items} + } + + return eventstream.StreamToClient( + ctx, + s.streamers.GraphQLResponseAssert, + filter, + converter, + send, + nil, + ) +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_crud_version.go b/packages/server/internal/api/rgraphql/rgraphql_crud_version.go new file mode 100644 index 000000000..969967e54 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_crud_version.go @@ -0,0 +1,170 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "sync" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +// GraphQLVersion operations + +func (s *GraphQLServiceRPC) GraphQLVersionCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLVersionCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + // Get user's workspaces + workspaces, err := s.wsReader.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allVersions []*graphqlv1.GraphQLVersion + for _, workspace := range workspaces { + // Get base GraphQL entries for this workspace + graphqlList, err := s.graphqlReader.GetByWorkspaceID(ctx, workspace.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Also get delta GraphQL entries (versions can be stored against delta IDs) + deltaList, err := s.graphqlReader.GetDeltasByWorkspaceID(ctx, workspace.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Combine base and delta entries + allGraphQLs := make([]mgraphql.GraphQL, 0, len(graphqlList)+len(deltaList)) + allGraphQLs = append(allGraphQLs, graphqlList...) + allGraphQLs = append(allGraphQLs, deltaList...) + + // Get versions for each GraphQL entry + for _, graphql := range allGraphQLs { + versions, err := s.graphqlReader.GetGraphQLVersionsByGraphQLID(ctx, graphql.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Convert to API format + for _, version := range versions { + apiVersion := ToAPIGraphQLVersion(version) + allVersions = append(allVersions, apiVersion) + } + } + } + + return connect.NewResponse(&graphqlv1.GraphQLVersionCollectionResponse{Items: allVersions}), nil +} + +func (s *GraphQLServiceRPC) GraphQLVersionSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLVersionSyncResponse]) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + + return s.streamGraphQLVersionSync(ctx, userID, stream.Send) +} + +func (s *GraphQLServiceRPC) streamGraphQLVersionSync(ctx context.Context, userID idwrap.IDWrap, send func(*graphqlv1.GraphQLVersionSyncResponse) error) error { + var workspaceSet sync.Map + + filter := func(topic GraphQLVersionTopic) bool { + if _, ok := workspaceSet.Load(topic.WorkspaceID.String()); ok { + return true + } + belongs, err := s.us.CheckUserBelongsToWorkspace(ctx, userID, topic.WorkspaceID) + if err != nil || !belongs { + return false + } + workspaceSet.Store(topic.WorkspaceID.String(), struct{}{}) + return true + } + + converter := func(events []GraphQLVersionEvent) *graphqlv1.GraphQLVersionSyncResponse { + var items []*graphqlv1.GraphQLVersionSync + for _, event := range events { + if resp := graphqlVersionSyncResponseFrom(event); resp != nil && len(resp.Items) > 0 { + items = append(items, resp.Items...) + } + } + if len(items) == 0 { + return nil + } + return &graphqlv1.GraphQLVersionSyncResponse{Items: items} + } + + return eventstream.StreamToClient( + ctx, + s.streamers.GraphQLVersion, + filter, + converter, + send, + nil, + ) +} + +// ToAPIGraphQLVersion converts model to API type +func ToAPIGraphQLVersion(version mgraphql.GraphQLVersion) *graphqlv1.GraphQLVersion{ + return &graphqlv1.GraphQLVersion{ + GraphqlVersionId: version.ID.Bytes(), + GraphqlId: version.GraphQLID.Bytes(), + Name: version.VersionName, + Description: version.VersionDescription, + CreatedAt: version.CreatedAt, + } +} + +// graphqlVersionSyncResponseFrom converts GraphQL version events to sync responses +func graphqlVersionSyncResponseFrom(event GraphQLVersionEvent) *graphqlv1.GraphQLVersionSyncResponse { + var value *graphqlv1.GraphQLVersionSync_ValueUnion + + switch event.Type { + case eventTypeInsert: + value = &graphqlv1.GraphQLVersionSync_ValueUnion{ + Kind: graphqlv1.GraphQLVersionSync_ValueUnion_KIND_INSERT, + Insert: &graphqlv1.GraphQLVersionSyncInsert{ + GraphqlVersionId: event.GraphQLVersion.GetGraphqlVersionId(), + GraphqlId: event.GraphQLVersion.GetGraphqlId(), + Name: event.GraphQLVersion.GetName(), + Description: event.GraphQLVersion.GetDescription(), + CreatedAt: event.GraphQLVersion.GetCreatedAt(), + }, + } + case eventTypeUpdate: + name := event.GraphQLVersion.GetName() + description := event.GraphQLVersion.GetDescription() + createdAt := event.GraphQLVersion.GetCreatedAt() + value = &graphqlv1.GraphQLVersionSync_ValueUnion{ + Kind: graphqlv1.GraphQLVersionSync_ValueUnion_KIND_UPDATE, + Update: &graphqlv1.GraphQLVersionSyncUpdate{ + GraphqlVersionId: event.GraphQLVersion.GetGraphqlVersionId(), + Name: &name, + Description: &description, + CreatedAt: &createdAt, + }, + } + case eventTypeDelete: + value = &graphqlv1.GraphQLVersionSync_ValueUnion{ + Kind: graphqlv1.GraphQLVersionSync_ValueUnion_KIND_DELETE, + Delete: &graphqlv1.GraphQLVersionSyncDelete{ + GraphqlVersionId: event.GraphQLVersion.GetGraphqlVersionId(), + }, + } + default: + return nil + } + + return &graphqlv1.GraphQLVersionSyncResponse{ + Items: []*graphqlv1.GraphQLVersionSync{{Value: value}}, + } +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_delta_converter.go b/packages/server/internal/api/rgraphql/rgraphql_delta_converter.go new file mode 100644 index 000000000..fc23dccd1 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_delta_converter.go @@ -0,0 +1,4 @@ +//nolint:revive // exported +package rgraphql + +// This file is deprecated. The graphqlDeltaSyncResponseFrom function is now in rgraphql_converter.go diff --git a/packages/server/internal/api/rgraphql/rgraphql_exec.go b/packages/server/internal/api/rgraphql/rgraphql_exec.go new file mode 100644 index 000000000..3c050ae6a --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_exec.go @@ -0,0 +1,788 @@ +//nolint:revive // exported +package rgraphql + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "log/slog" + "net/http" + "strings" + "time" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + devtoolsdb "github.com/the-dev-tools/dev-tools/packages/db" + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/mutation" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/httpclient" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/menv" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +const introspectionQuery = `query IntrospectionQuery { + __schema { + queryType { name } + mutationType { name } + subscriptionType { name } + types { + ...FullType + } + directives { + name + description + locations + args { + ...InputValue + } + } + } +} + +fragment FullType on __Type { + kind + name + description + fields(includeDeprecated: true) { + name + description + args { + ...InputValue + } + type { + ...TypeRef + } + isDeprecated + deprecationReason + } + inputFields { + ...InputValue + } + interfaces { + ...TypeRef + } + enumValues(includeDeprecated: true) { + name + description + isDeprecated + deprecationReason + } + possibleTypes { + ...TypeRef + } +} + +fragment InputValue on __InputValue { + name + description + type { ...TypeRef } + defaultValue +} + +fragment TypeRef on __Type { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + } + } + } + } + } + } +}` + +func (s *GraphQLServiceRPC) GraphQLRun(ctx context.Context, req *connect.Request[graphqlv1.GraphQLRunRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required")) + } + + gqlID, err := idwrap.NewFromBytes(req.Msg.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + gqlEntry, err := s.graphqlService.Get(ctx, gqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceReadAccess(ctx, gqlEntry.WorkspaceID); err != nil { + return nil, err + } + + // Get user ID for version creation + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + // Build variable map from workspace env + varMap, err := s.buildWorkspaceVarMap(ctx, gqlEntry.WorkspaceID) + if err != nil { + varMap = make(map[string]any) + } + + // Resolve GraphQL request (handles both delta and non-delta) + var resolvedGraphQL mgraphql.GraphQL + var headers []mgraphql.GraphQLHeader + var asserts []mgraphql.GraphQLAssert + + if gqlEntry.IsDelta && gqlEntry.ParentGraphQLID != nil { + // Delta request: use resolver to merge base + delta + resolved, err := s.resolver.Resolve(ctx, *gqlEntry.ParentGraphQLID, &gqlEntry.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to resolve delta request: %w", err)) + } + resolvedGraphQL = resolved.Resolved + headers = resolved.ResolvedHeaders + asserts = resolved.ResolvedAsserts + + // Use workspace ID from original entry + resolvedGraphQL.WorkspaceID = gqlEntry.WorkspaceID + } else { + // Non-delta request: load components directly + resolvedGraphQL = *gqlEntry + + hdrs, err := s.headerService.GetByGraphQLID(ctx, gqlID) + if err != nil { + hdrs = []mgraphql.GraphQLHeader{} + } + headers = hdrs + + assrts, err := s.graphqlAssertService.GetByGraphQLID(ctx, gqlID) + if err != nil { + assrts = []mgraphql.GraphQLAssert{} + } + asserts = assrts + } + + // Build and execute GraphQL request + httpReq, err := prepareGraphQLRequest(&resolvedGraphQL, headers, varMap) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, fmt.Errorf("failed to prepare request: %w", err)) + } + + client := httpclient.New() + startTime := time.Now() + + resp, err := client.Do(httpReq.WithContext(ctx)) + if err != nil { + return nil, connect.NewError(connect.CodeUnavailable, fmt.Errorf("request failed: %w", err)) + } + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to read response: %w", err)) + } + + duration := time.Since(startTime).Milliseconds() + + // Store response + responseID := idwrap.NewNow() + nowUnix := time.Now().Unix() + + gqlResponse := mgraphql.GraphQLResponse{ + ID: responseID, + GraphQLID: gqlID, + Status: int32(resp.StatusCode), //nolint:gosec + Body: body, + Time: startTime.Unix(), + Duration: int32(duration), //nolint:gosec + Size: int32(len(body)), //nolint:gosec + CreatedAt: nowUnix, + } + + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to begin transaction: %w", err)) + } + defer mut.Rollback() + + tx := mut.TX() + txResponseService := s.responseService.TX(tx) + + if err := txResponseService.Create(ctx, gqlResponse); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Store response headers + var respHeaderEvents []GraphQLResponseHeaderEvent + responseHeaders := make(map[string]string) + for key, values := range resp.Header { + for _, val := range values { + headerID := idwrap.NewNow() + respHeader := mgraphql.GraphQLResponseHeader{ + ID: headerID, + ResponseID: responseID, + HeaderKey: key, + HeaderValue: val, + CreatedAt: nowUnix, + } + if err := txResponseService.CreateHeader(ctx, respHeader); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + respHeaderEvents = append(respHeaderEvents, GraphQLResponseHeaderEvent{ + Type: eventTypeInsert, + GraphQLResponseHeader: ToAPIGraphQLResponseHeader(respHeader), + }) + // Store first value for each header key for assertion context + if _, exists := responseHeaders[key]; !exists { + responseHeaders[key] = val + } + } + } + + // Update last_run_at + now := time.Now().Unix() + gqlEntry.LastRunAt = &now + txGraphqlService := s.graphqlService.TX(tx) + if err := txGraphqlService.Update(ctx, gqlEntry); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Create version with snapshot + versionName := fmt.Sprintf("v%d", time.Now().UnixNano()) + versionDesc := "Auto-saved version (Run)" + txGraphqlWriter := sgraphql.NewWriterFromQueries(gen.New(tx)) + + version, err := txGraphqlWriter.CreateGraphQLVersion(ctx, gqlID, userID, versionName, versionDesc) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to create version: %w", err)) + } + + // Create snapshot GraphQL entry (using version ID as GraphQL ID) + snapshotGraphQL := &mgraphql.GraphQL{ + ID: version.ID, + WorkspaceID: gqlEntry.WorkspaceID, + FolderID: gqlEntry.FolderID, + Name: gqlEntry.Name, + Url: gqlEntry.Url, + Query: gqlEntry.Query, + Variables: gqlEntry.Variables, + Description: gqlEntry.Description, + IsSnapshot: true, + IsDelta: false, + } + if err := txGraphqlWriter.Create(ctx, snapshotGraphQL); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to create snapshot GraphQL: %w", err)) + } + + // Track snapshot GraphQL insertion event + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQL, + Op: mutation.OpInsert, + ID: version.ID, + ParentID: gqlEntry.WorkspaceID, + WorkspaceID: gqlEntry.WorkspaceID, + Payload: *snapshotGraphQL, + }) + + // Clone headers into snapshot + txHeaderService := s.headerService.TX(tx) + for _, header := range headers { + snapshotHeader := &mgraphql.GraphQLHeader{ + ID: idwrap.NewNow(), + GraphQLID: version.ID, + Key: header.Key, + Value: header.Value, + Enabled: header.Enabled, + Description: header.Description, + DisplayOrder: header.DisplayOrder, + } + if err := txHeaderService.Create(ctx, snapshotHeader); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to clone header: %w", err)) + } + + // Track snapshot header insertion event + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQLHeader, + Op: mutation.OpInsert, + ID: snapshotHeader.ID, + ParentID: version.ID, + WorkspaceID: gqlEntry.WorkspaceID, + Payload: *snapshotHeader, + }) + } + + // Clone request assertions into snapshot (matches HTTP pattern) + txAssertService := s.graphqlAssertService.TX(tx) + for _, assert := range asserts { + snapshotAssert := &mgraphql.GraphQLAssert{ + ID: idwrap.NewNow(), + GraphQLID: version.ID, + Value: assert.Value, + Enabled: assert.Enabled, + Description: assert.Description, + DisplayOrder: assert.DisplayOrder, + } + if err := txAssertService.Create(ctx, snapshotAssert); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to clone assertion: %w", err)) + } + + // Track snapshot assertion insertion event + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQLAssert, + Op: mutation.OpInsert, + ID: snapshotAssert.ID, + ParentID: version.ID, + WorkspaceID: gqlEntry.WorkspaceID, + Payload: *snapshotAssert, + }) + } + + // Clone response into snapshot + snapshotResponse := mgraphql.GraphQLResponse{ + ID: idwrap.NewNow(), + GraphQLID: version.ID, + Status: gqlResponse.Status, + Body: gqlResponse.Body, + Time: gqlResponse.Time, + Duration: gqlResponse.Duration, + Size: gqlResponse.Size, + CreatedAt: gqlResponse.CreatedAt, + } + txResponseSvc := s.responseService.TX(tx) + if err := txResponseSvc.Create(ctx, snapshotResponse); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to create snapshot response: %w", err)) + } + + // Track snapshot response insertion event + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQLResponse, + Op: mutation.OpInsert, + ID: snapshotResponse.ID, + ParentID: version.ID, + WorkspaceID: gqlEntry.WorkspaceID, + Payload: snapshotResponse, + }) + + // Clone response headers into snapshot + for key, values := range resp.Header { + for _, val := range values { + snapshotRespHeader := mgraphql.GraphQLResponseHeader{ + ID: idwrap.NewNow(), + ResponseID: snapshotResponse.ID, + HeaderKey: key, + HeaderValue: val, + CreatedAt: nowUnix, + } + if err := txResponseSvc.CreateHeader(ctx, snapshotRespHeader); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to create snapshot response header: %w", err)) + } + + // Track snapshot response header insertion event + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQLResponseHeader, + Op: mutation.OpInsert, + ID: snapshotRespHeader.ID, + ParentID: snapshotResponse.ID, + WorkspaceID: gqlEntry.WorkspaceID, + Payload: snapshotRespHeader, + }) + } + } + + // Evaluate assertions BEFORE commit (matches HTTP pattern) + // This ensures response assertions exist in DB before we clone them into snapshot + var responseAssertions []mgraphql.GraphQLResponseAssert + if len(asserts) > 0 { + // Prepare response data for assertion evaluation + respData := GraphQLResponseData{ + StatusCode: int(resp.StatusCode), + Body: body, + Headers: responseHeaders, + } + + // Evaluate and store assertions within the same transaction + responseAssertions, err = s.evaluateAndStoreAssertions(ctx, tx, gqlID, responseID, gqlEntry.WorkspaceID, respData, asserts) + if err != nil { + slog.WarnContext(ctx, "Failed to evaluate assertions", + "error", err, + "graphql_id", gqlID.String(), + "response_id", responseID.String()) + // Don't fail the request, assertions are supplementary + responseAssertions = []mgraphql.GraphQLResponseAssert{} + } + } + + // Clone response assertions into snapshot (matches HTTP pattern) + for _, responseAssert := range responseAssertions { + snapshotResponseAssert := mgraphql.GraphQLResponseAssert{ + ID: idwrap.NewNow(), + ResponseID: snapshotResponse.ID, + Value: responseAssert.Value, + Success: responseAssert.Success, + CreatedAt: nowUnix, + } + if err := txResponseSvc.CreateAssert(ctx, snapshotResponseAssert); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to clone response assertion: %w", err)) + } + + // Track snapshot response assertion insertion event + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQLResponseAssert, + Op: mutation.OpInsert, + ID: snapshotResponseAssert.ID, + ParentID: snapshotResponse.ID, + WorkspaceID: gqlEntry.WorkspaceID, + Payload: snapshotResponseAssert, + }) + } + + // Collect events before commit for manual publishing of snapshot entities + snapshotEvents := mut.Events() + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to commit transaction: %w", err)) + } + + // Publish events + if s.streamers.GraphQLResponse != nil { + s.streamers.GraphQLResponse.Publish(GraphQLResponseTopic{WorkspaceID: gqlEntry.WorkspaceID}, GraphQLResponseEvent{ + Type: eventTypeInsert, + GraphQLResponse: ToAPIGraphQLResponse(gqlResponse), + }) + } + if s.streamers.GraphQLResponseHeader != nil { + topic := GraphQLResponseHeaderTopic{WorkspaceID: gqlEntry.WorkspaceID} + for _, evt := range respHeaderEvents { + s.streamers.GraphQLResponseHeader.Publish(topic, evt) + } + } + if s.streamers.GraphQL != nil { + s.streamers.GraphQL.Publish(GraphQLTopic{WorkspaceID: gqlEntry.WorkspaceID}, GraphQLEvent{ + Type: eventTypeUpdate, + GraphQL: ToAPIGraphQL(*gqlEntry), + }) + } + + // Publish version insert event + if s.streamers.GraphQLVersion != nil { + s.streamers.GraphQLVersion.Publish(GraphQLVersionTopic{WorkspaceID: gqlEntry.WorkspaceID}, GraphQLVersionEvent{ + Type: eventTypeInsert, + GraphQLVersion: ToAPIGraphQLVersion(*version), + }) + } + + // Publish response assertion events (now that they're committed) + if len(responseAssertions) > 0 && s.streamers.GraphQLResponseAssert != nil { + topic := GraphQLResponseAssertTopic{WorkspaceID: gqlEntry.WorkspaceID} + for _, assert := range responseAssertions { + s.streamers.GraphQLResponseAssert.Publish(topic, GraphQLResponseAssertEvent{ + Type: eventTypeInsert, + GraphQLResponseAssert: ToAPIGraphQLResponseAssert(assert), + }) + } + } + + // Publish snapshot sync events for snapshot response/headers/assertions + // so the frontend receives real-time updates for the newly created snapshot data + s.publishSnapshotSyncEvents(snapshotEvents, gqlEntry.WorkspaceID) + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLDuplicate(ctx context.Context, req *connect.Request[graphqlv1.GraphQLDuplicateRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required")) + } + + gqlID, err := idwrap.NewFromBytes(req.Msg.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + gqlEntry, err := s.graphqlService.Get(ctx, gqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceWriteAccess(ctx, gqlEntry.WorkspaceID); err != nil { + return nil, err + } + + // Read headers outside TX + headers, err := s.headerService.GetByGraphQLID(ctx, gqlID) + if err != nil { + headers = []mgraphql.GraphQLHeader{} + } + + newGQLID := idwrap.NewNow() + + tx, err := s.DB.BeginTx(ctx, nil) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer devtoolsdb.TxnRollback(tx) + + txGraphqlService := s.graphqlService.TX(tx) + txHeaderService := s.headerService.TX(tx) + + newEntry := &mgraphql.GraphQL{ + ID: newGQLID, + WorkspaceID: gqlEntry.WorkspaceID, + FolderID: gqlEntry.FolderID, + Name: fmt.Sprintf("Copy of %s", gqlEntry.Name), + Url: gqlEntry.Url, + Query: gqlEntry.Query, + Variables: gqlEntry.Variables, + Description: gqlEntry.Description, + } + + if err := txGraphqlService.Create(ctx, newEntry); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + for _, h := range headers { + newHeader := &mgraphql.GraphQLHeader{ + ID: idwrap.NewNow(), + GraphQLID: newGQLID, + Key: h.Key, + Value: h.Value, + Enabled: h.Enabled, + Description: h.Description, + DisplayOrder: h.DisplayOrder, + } + if err := txHeaderService.Create(ctx, newHeader); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + if err := tx.Commit(); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Publish GraphQL insert event + if s.streamers.GraphQL != nil { + s.streamers.GraphQL.Publish(GraphQLTopic{WorkspaceID: gqlEntry.WorkspaceID}, GraphQLEvent{ + Type: eventTypeInsert, + GraphQL: ToAPIGraphQL(*newEntry), + }) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLIntrospect(ctx context.Context, req *connect.Request[graphqlv1.GraphQLIntrospectRequest]) (*connect.Response[graphqlv1.GraphQLIntrospectResponse], error) { + if len(req.Msg.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required")) + } + + gqlID, err := idwrap.NewFromBytes(req.Msg.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + gqlEntry, err := s.graphqlService.Get(ctx, gqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceReadAccess(ctx, gqlEntry.WorkspaceID); err != nil { + return nil, err + } + + varMap, err := s.buildWorkspaceVarMap(ctx, gqlEntry.WorkspaceID) + if err != nil { + varMap = make(map[string]any) + } + + headers, err := s.headerService.GetByGraphQLID(ctx, gqlID) + if err != nil { + headers = []mgraphql.GraphQLHeader{} + } + + // Build introspection request + body, _ := json.Marshal(map[string]any{ + "query": introspectionQuery, + }) + + url := interpolateString(gqlEntry.Url, varMap) + httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(body)) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, fmt.Errorf("failed to create request: %w", err)) + } + httpReq.Header.Set("Content-Type", "application/json") + + for _, h := range headers { + if h.Enabled && h.Key != "" { + httpReq.Header.Set(interpolateString(h.Key, varMap), interpolateString(h.Value, varMap)) + } + } + + client := httpclient.New() + resp, err := client.Do(httpReq) + if err != nil { + return nil, connect.NewError(connect.CodeUnavailable, fmt.Errorf("introspection request failed: %w", err)) + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to read response: %w", err)) + } + + return connect.NewResponse(&graphqlv1.GraphQLIntrospectResponse{ + IntrospectionJson: string(respBody), + Sdl: "", // SDL conversion would need a graphql library - return empty for now + }), nil +} + +// Helper functions + +func (s *GraphQLServiceRPC) buildWorkspaceVarMap(ctx context.Context, workspaceID idwrap.IDWrap) (map[string]any, error) { + workspace, err := s.ws.Get(ctx, workspaceID) + if err != nil { + return nil, fmt.Errorf("failed to get workspace: %w", err) + } + + var globalVars []menv.Variable + if workspace.GlobalEnv != (idwrap.IDWrap{}) { + globalVars, err = s.vs.GetVariableByEnvID(ctx, workspace.GlobalEnv) + if err != nil && !errors.Is(err, senv.ErrNoVarFound) { + return nil, fmt.Errorf("failed to get global environment variables: %w", err) + } + } + + varMap := make(map[string]any) + for _, envVar := range globalVars { + if envVar.IsEnabled() { + varMap[envVar.VarKey] = envVar.Value + } + } + + return varMap, nil +} + +func prepareGraphQLRequest(gql *mgraphql.GraphQL, headers []mgraphql.GraphQLHeader, varMap map[string]any) (*http.Request, error) { + url := interpolateString(gql.Url, varMap) + query := interpolateString(gql.Query, varMap) + variables := interpolateString(gql.Variables, varMap) + + var varsMap map[string]any + if variables != "" { + if err := json.Unmarshal([]byte(variables), &varsMap); err != nil { + varsMap = nil + } + } + + bodyMap := map[string]any{"query": query} + if varsMap != nil { + bodyMap["variables"] = varsMap + } + + bodyBytes, err := json.Marshal(bodyMap) + if err != nil { + return nil, fmt.Errorf("failed to marshal body: %w", err) + } + + req, err := http.NewRequest(http.MethodPost, url, bytes.NewReader(bodyBytes)) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + req.Header.Set("Content-Type", "application/json") + for _, h := range headers { + if h.Enabled && h.Key != "" { + req.Header.Set(interpolateString(h.Key, varMap), interpolateString(h.Value, varMap)) + } + } + + return req, nil +} + +func interpolateString(s string, varMap map[string]any) string { + result := s + for key, val := range varMap { + placeholder := "{{" + key + "}}" + valStr := fmt.Sprintf("%v", val) + result = strings.ReplaceAll(result, placeholder, valStr) + // Also support {{ key }} (with spaces) + placeholder = "{{ " + key + " }}" + result = strings.ReplaceAll(result, placeholder, valStr) + } + return result +} + +// publishSnapshotSyncEvents publishes sync events for snapshot entities +// so the frontend receives real-time updates for the newly created snapshot data. +// This function follows the same pattern as HTTP's publishSnapshotSyncEvents. +func (s *GraphQLServiceRPC) publishSnapshotSyncEvents(events []mutation.Event, workspaceID idwrap.IDWrap) { + for _, evt := range events { + //nolint:exhaustive + switch evt.Entity { + case mutation.EntityGraphQLResponse: + if s.streamers.GraphQLResponse != nil { + if resp, ok := evt.Payload.(mgraphql.GraphQLResponse); ok { + s.streamers.GraphQLResponse.Publish( + GraphQLResponseTopic{WorkspaceID: workspaceID}, + GraphQLResponseEvent{ + Type: eventTypeInsert, + GraphQLResponse: ToAPIGraphQLResponse(resp), + }, + ) + } + } + case mutation.EntityGraphQLResponseHeader: + if s.streamers.GraphQLResponseHeader != nil { + if rh, ok := evt.Payload.(mgraphql.GraphQLResponseHeader); ok { + s.streamers.GraphQLResponseHeader.Publish( + GraphQLResponseHeaderTopic{WorkspaceID: workspaceID}, + GraphQLResponseHeaderEvent{ + Type: eventTypeInsert, + GraphQLResponseHeader: ToAPIGraphQLResponseHeader(rh), + }, + ) + } + } + case mutation.EntityGraphQLResponseAssert: + if s.streamers.GraphQLResponseAssert != nil { + if ra, ok := evt.Payload.(mgraphql.GraphQLResponseAssert); ok { + s.streamers.GraphQLResponseAssert.Publish( + GraphQLResponseAssertTopic{WorkspaceID: workspaceID}, + GraphQLResponseAssertEvent{ + Type: eventTypeInsert, + GraphQLResponseAssert: ToAPIGraphQLResponseAssert(ra), + }, + ) + } + } + } + } +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_exec_assert.go b/packages/server/internal/api/rgraphql/rgraphql_exec_assert.go new file mode 100644 index 000000000..9b5628f8e --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_exec_assert.go @@ -0,0 +1,521 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + "log/slog" + "strings" + "sync" + "time" + + devtoolsdb "github.com/the-dev-tools/dev-tools/packages/db" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/expression" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +// AssertionResult represents the result of evaluating a single assertion +type AssertionResult struct { + AssertionID idwrap.IDWrap + Expression string + Success bool + Error error + EvaluatedAt time.Time +} + +// GraphQLResponseData wraps the response for assertion evaluation +type GraphQLResponseData struct { + StatusCode int + Body []byte + Headers map[string]string +} + +// evaluateAndStoreAssertions evaluates assertions and stores them within a transaction, returning the created assertions +// This is used by GraphQLRun to evaluate assertions before commit so they can be cloned into snapshots +func (s *GraphQLServiceRPC) evaluateAndStoreAssertions(ctx context.Context, tx *sql.Tx, graphqlID idwrap.IDWrap, responseID idwrap.IDWrap, workspaceID idwrap.IDWrap, resp GraphQLResponseData, asserts []mgraphql.GraphQLAssert) ([]mgraphql.GraphQLResponseAssert, error) { + if len(asserts) == 0 { + return []mgraphql.GraphQLResponseAssert{}, nil + } + + enabledAsserts := make([]mgraphql.GraphQLAssert, 0, len(asserts)) + for _, assert := range asserts { + if assert.IsEnabled() { + enabledAsserts = append(enabledAsserts, assert) + } + } + + if len(enabledAsserts) == 0 { + return []mgraphql.GraphQLResponseAssert{}, nil + } + + evalContext := s.createAssertionEvalContext(resp) + results := s.evaluateAssertionsParallel(ctx, enabledAsserts, evalContext) + + // Store results within the provided transaction + responseAsserts, err := s.storeAssertionResultsInTx(ctx, tx, responseID, results) + if err != nil { + return nil, fmt.Errorf("failed to store assertion results for GraphQL %s: %w", graphqlID.String(), err) + } + + return responseAsserts, nil +} + +// evaluateResolvedAssertions evaluates pre-resolved assertions against the response and stores the results +// This is the original function used for standalone assertion evaluation (kept for compatibility) +func (s *GraphQLServiceRPC) evaluateResolvedAssertions(ctx context.Context, graphqlID idwrap.IDWrap, responseID idwrap.IDWrap, workspaceID idwrap.IDWrap, resp GraphQLResponseData, asserts []mgraphql.GraphQLAssert) error { + if len(asserts) == 0 { + return nil + } + + enabledAsserts := make([]mgraphql.GraphQLAssert, 0, len(asserts)) + for _, assert := range asserts { + if assert.IsEnabled() { + enabledAsserts = append(enabledAsserts, assert) + } + } + + if len(enabledAsserts) == 0 { + return nil + } + + evalContext := s.createAssertionEvalContext(resp) + results := s.evaluateAssertionsParallel(ctx, enabledAsserts, evalContext) + + if err := s.storeAssertionResultsBatch(ctx, graphqlID, responseID, workspaceID, results); err != nil { + return fmt.Errorf("failed to store assertion results for GraphQL %s: %w", graphqlID.String(), err) + } + + return nil +} + +// evaluateAssertionsParallel evaluates multiple assertions in parallel with timeout and error handling +func (s *GraphQLServiceRPC) evaluateAssertionsParallel(ctx context.Context, asserts []mgraphql.GraphQLAssert, evalContext map[string]any) []AssertionResult { + results := make([]AssertionResult, len(asserts)) + resultChan := make(chan AssertionResult, len(asserts)) + + var wg sync.WaitGroup + + // Create a context with timeout for assertion evaluation (30 seconds per assertion batch) + evalCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + + // Evaluate each assertion in a separate goroutine + for i, assert := range asserts { + wg.Add(1) + go func(idx int, assertion mgraphql.GraphQLAssert) { + defer wg.Done() + startTime := time.Now() + result := AssertionResult{ + AssertionID: assertion.ID, + EvaluatedAt: startTime, + } + + // Recover from panics in assertion evaluation + defer func() { + if r := recover(); r != nil { + result.Error = fmt.Errorf("panic during assertion evaluation: %v", r) + result.Success = false + resultChan <- result + } + }() + + // Use the assertion value directly as the expression + expression := assertion.Value + result.Expression = expression + + // Evaluate the assertion expression with context + success, err := s.evaluateAssertion(evalCtx, expression, evalContext) + if err != nil { + // Check for context timeout + if evalCtx.Err() == context.DeadlineExceeded { + result.Error = fmt.Errorf("assertion evaluation timed out: %w", err) + } else { + result.Error = fmt.Errorf("evaluation failed: %w", err) + } + result.Success = false + } else { + result.Success = success + } + + // Add evaluation duration for monitoring + duration := time.Since(startTime) + if duration > 5*time.Second { + slog.WarnContext(ctx, "Slow assertion evaluation", + "assertion_id", assertion.ID.String(), + "duration", duration) + } + + resultChan <- result + }(i, assert) + } + + // Close the result channel when all goroutines complete + go func() { + wg.Wait() + close(resultChan) + }() + + // Collect results preserving order with timeout + collectCtx, collectCancel := context.WithTimeout(ctx, 35*time.Second) + defer collectCancel() + + collectedCount := 0 + for { + select { + case result, ok := <-resultChan: + if !ok { + // Channel closed, all results collected + goto done + } + // Find the original index for this result + for j, assert := range asserts { + if assert.ID == result.AssertionID { + results[j] = result + collectedCount++ + break + } + } + + case <-collectCtx.Done(): + // Collection timeout - fill missing results with timeout error + slog.WarnContext(ctx, "Assertion result collection timed out after 35 seconds") + for j, assert := range asserts { + if results[j].AssertionID.String() == "" { + results[j] = AssertionResult{ + AssertionID: assert.ID, + Expression: assert.Value, + Success: false, + Error: fmt.Errorf("collection timeout"), + EvaluatedAt: time.Now(), + } + } + } + goto done + + case <-evalCtx.Done(): + // Evaluation context cancelled + slog.WarnContext(ctx, "Assertion evaluation context cancelled", "error", evalCtx.Err()) + for j, assert := range asserts { + if results[j].AssertionID.String() == "" { + results[j] = AssertionResult{ + AssertionID: assert.ID, + Expression: assert.Value, + Success: false, + Error: fmt.Errorf("evaluation cancelled: %w", evalCtx.Err()), + EvaluatedAt: time.Now(), + } + } + } + goto done + } + } + +done: + if collectedCount != len(asserts) { + slog.WarnContext(ctx, "Incomplete assertion result collection", + "collected", collectedCount, + "total", len(asserts)) + } + + return results +} + +// storeAssertionResultsInTx stores assertion results within an existing transaction and returns the created assertions +func (s *GraphQLServiceRPC) storeAssertionResultsInTx(ctx context.Context, tx *sql.Tx, responseID idwrap.IDWrap, results []AssertionResult) ([]mgraphql.GraphQLResponseAssert, error) { + if len(results) == 0 { + return []mgraphql.GraphQLResponseAssert{}, nil + } + + txResponseService := s.responseService.TX(tx) + now := time.Now().Unix() + responseAsserts := make([]mgraphql.GraphQLResponseAssert, 0, len(results)) + + for _, result := range results { + var value string + var success bool + + if result.Error != nil { + // Store error information in the value field + value = fmt.Sprintf("ERROR: %s", result.Error.Error()) + success = false + } else { + // Store successful assertion result + value = result.Expression + success = result.Success + } + + assertID := idwrap.NewNow() + assert := mgraphql.GraphQLResponseAssert{ + ID: assertID, + ResponseID: responseID, + Value: value, + Success: success, + CreatedAt: now, + } + + if err := txResponseService.CreateAssert(ctx, assert); err != nil { + return nil, fmt.Errorf("failed to insert assertion result for %s: %w", result.AssertionID.String(), err) + } + + responseAsserts = append(responseAsserts, assert) + } + + slog.InfoContext(ctx, "Stored assertion results in transaction", + "count", len(results), + "response_id", responseID.String()) + + return responseAsserts, nil +} + +// storeAssertionResultsBatch stores multiple assertion results in a single database transaction +func (s *GraphQLServiceRPC) storeAssertionResultsBatch(ctx context.Context, graphqlID idwrap.IDWrap, responseID idwrap.IDWrap, workspaceID idwrap.IDWrap, results []AssertionResult) error { + if len(results) == 0 { + return nil + } + + // Start transaction for batch insertion + tx, err := s.DB.BeginTx(ctx, nil) + if err != nil { + return fmt.Errorf("failed to begin transaction: %w", err) + } + defer devtoolsdb.TxnRollback(tx) + + txResponseService := s.responseService.TX(tx) + + // Insert all results in batch + now := time.Now().Unix() + var events []GraphQLResponseAssertEvent + + for _, result := range results { + var value string + var success bool + + if result.Error != nil { + // Store error information in the value field + value = fmt.Sprintf("ERROR: %s", result.Error.Error()) + success = false + } else { + // Store successful assertion result + value = result.Expression + success = result.Success + } + + assertID := idwrap.NewNow() + assert := mgraphql.GraphQLResponseAssert{ + ID: assertID, + ResponseID: responseID, + Value: value, + Success: success, + CreatedAt: now, + } + + if err := txResponseService.CreateAssert(ctx, assert); err != nil { + return fmt.Errorf("failed to insert assertion result for %s: %w", result.AssertionID.String(), err) + } + + events = append(events, GraphQLResponseAssertEvent{ + Type: eventTypeInsert, + GraphQLResponseAssert: ToAPIGraphQLResponseAssert(assert), + }) + } + + slog.InfoContext(ctx, "Stored assertion results", + "count", len(results), + "graphql_id", graphqlID.String(), + "response_id", responseID.String()) + + // Commit transaction + if err := tx.Commit(); err != nil { + return fmt.Errorf("failed to commit transaction: %w", err) + } + + // Publish events + if s.streamers.GraphQLResponseAssert != nil { + topic := GraphQLResponseAssertTopic{WorkspaceID: workspaceID} + for _, evt := range events { + s.streamers.GraphQLResponseAssert.Publish(topic, evt) + } + } + + return nil +} + +// createAssertionEvalContext creates the evaluation context with response data +func (s *GraphQLServiceRPC) createAssertionEvalContext(resp GraphQLResponseData) map[string]any { + // Parse response body as JSON if possible + var body any + var bodyMap map[string]any + bodyString := string(resp.Body) + + if err := json.Unmarshal(resp.Body, &body); err != nil { + // If JSON parsing fails, use as string + body = bodyString + } else { + // Also try to parse as map for easier access + if mapBody, ok := body.(map[string]any); ok { + bodyMap = mapBody + } + } + + // Convert headers to map + headers := make(map[string]string) + headersLower := make(map[string]string) + contentType := "" + + for key, value := range resp.Headers { + lowerKey := strings.ToLower(key) + headers[key] = value + headersLower[lowerKey] = value + + if lowerKey == "content-type" { + contentType = value + } + } + + // Extract GraphQL-specific fields from response + var data any + var errors any + if bodyMap != nil { + if d, ok := bodyMap["data"]; ok { + data = d + } + if e, ok := bodyMap["errors"]; ok { + errors = e + } + } + + // Extract JSON path helpers (for full body navigation) + jsonPathHelpers := s.createJSONPathHelpers(bodyMap) + + // Extract JSON path helpers for data field specifically + var dataMap map[string]any + if data != nil { + if dm, ok := data.(map[string]any); ok { + dataMap = dm + } + } + dataPathHelpers := s.createJSONPathHelpers(dataMap) + + // Create comprehensive evaluation context + context := map[string]any{ + // Main response object + "response": map[string]any{ + "status": resp.StatusCode, + "body": body, + "headers": headers, + "data": data, + "errors": errors, + }, + + // Direct access to commonly used fields + "status": resp.StatusCode, + "body": body, + "body_string": bodyString, + "headers": headers, + "content_type": contentType, + + // GraphQL-specific fields (top-level for convenience) + "data": data, + "errors": errors, + + // Convenience variables + "success": resp.StatusCode >= 200 && resp.StatusCode < 300, + "client_error": resp.StatusCode >= 400 && resp.StatusCode < 500, + "server_error": resp.StatusCode >= 500 && resp.StatusCode < 600, + "is_json": strings.HasPrefix(contentType, "application/json"), + "has_body": len(resp.Body) > 0, + "has_data": data != nil, + "has_errors": errors != nil, + + // JSON path helpers (for full body) + "json": jsonPathHelpers, + // JSON path helpers specifically for data field + "dataJson": dataPathHelpers, + } + + return context +} + +// createJSONPathHelpers creates helper functions for JSON path navigation +func (s *GraphQLServiceRPC) createJSONPathHelpers(bodyMap map[string]any) map[string]any { + helpers := make(map[string]any) + + if bodyMap == nil { + return helpers + } + + // Helper function to get nested value by path + getPath := func(path string) any { + parts := strings.Split(path, ".") + current := bodyMap + + for _, part := range parts { + if next, ok := current[part]; ok { + if nextMap, ok := next.(map[string]any); ok { + current = nextMap + } else { + return next + } + } else { + return nil + } + } + return current + } + + // Helper to check if path exists + hasPath := func(path string) bool { + return getPath(path) != nil + } + + // Helper to get string value + getString := func(path string) string { + val := getPath(path) + if val == nil { + return "" + } + if str, ok := val.(string); ok { + return str + } + return fmt.Sprintf("%v", val) + } + + // Helper to get numeric value + getNumber := func(path string) float64 { + val := getPath(path) + if val == nil { + return 0 + } + switch num := val.(type) { + case float64: + return num + case int: + return float64(num) + case int64: + return float64(num) + default: + if str, ok := val.(string); ok { + var f float64 + fmt.Sscanf(str, "%f", &f) + return f + } + } + return 0 + } + + helpers["path"] = getPath + helpers["has"] = hasPath + helpers["string"] = getString + helpers["number"] = getNumber + + return helpers +} + +// evaluateAssertion evaluates an assertion expression against the provided context +func (s *GraphQLServiceRPC) evaluateAssertion(ctx context.Context, expressionStr string, context map[string]any) (bool, error) { + env := expression.NewEnv(context) + return expression.ExpressionEvaluteAsBool(ctx, env, expressionStr) +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_exec_assert_test.go b/packages/server/internal/api/rgraphql/rgraphql_exec_assert_test.go new file mode 100644 index 000000000..a9567c1f1 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_exec_assert_test.go @@ -0,0 +1,442 @@ +//nolint:revive // test file +package rgraphql + +import ( + "context" + "testing" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +func TestCreateAssertionEvalContext(t *testing.T) { + t.Parallel() + + srv := &GraphQLServiceRPC{} + + tests := []struct { + name string + response GraphQLResponseData + validate func(t *testing.T, ctx map[string]any) + }{ + { + name: "basic JSON response", + response: GraphQLResponseData{ + StatusCode: 200, + Body: []byte(`{"data": {"user": {"name": "Alice"}}}`), + Headers: map[string]string{ + "Content-Type": "application/json", + }, + }, + validate: func(t *testing.T, ctx map[string]any) { + // Check status code + if status, ok := ctx["status"].(int); !ok || status != 200 { + t.Errorf("expected status 200, got %v", ctx["status"]) + } + + // Check success flag + if success, ok := ctx["success"].(bool); !ok || !success { + t.Errorf("expected success=true for 2xx status, got %v", ctx["success"]) + } + + // Check is_json flag + if isJSON, ok := ctx["is_json"].(bool); !ok || !isJSON { + t.Errorf("expected is_json=true for JSON content-type, got %v", ctx["is_json"]) + } + + // Check body parsing + if body, ok := ctx["body"].(map[string]any); !ok { + t.Errorf("expected body to be parsed as map, got %T", ctx["body"]) + } else { + if data, ok := body["data"].(map[string]any); !ok { + t.Errorf("expected body.data to exist") + } else { + if user, ok := data["user"].(map[string]any); !ok { + t.Errorf("expected body.data.user to exist") + } else { + if name, ok := user["name"].(string); !ok || name != "Alice" { + t.Errorf("expected body.data.user.name='Alice', got %v", name) + } + } + } + } + + // Check JSON path helpers + if jsonHelpers, ok := ctx["json"].(map[string]any); !ok { + t.Errorf("expected json helpers to exist") + } else { + // Test path helper + if pathFn, ok := jsonHelpers["path"].(func(string) any); ok { + result := pathFn("data.user.name") + if name, ok := result.(string); !ok || name != "Alice" { + t.Errorf("json.path('data.user.name') expected 'Alice', got %v", result) + } + } else { + t.Errorf("expected json.path function to exist") + } + + // Test has helper + if hasFn, ok := jsonHelpers["has"].(func(string) bool); ok { + if !hasFn("data.user.name") { + t.Errorf("json.has('data.user.name') should return true") + } + if hasFn("data.missing") { + t.Errorf("json.has('data.missing') should return false") + } + } else { + t.Errorf("expected json.has function to exist") + } + } + }, + }, + { + name: "client error response", + response: GraphQLResponseData{ + StatusCode: 404, + Body: []byte(`{"error": "Not found"}`), + Headers: map[string]string{ + "Content-Type": "application/json", + }, + }, + validate: func(t *testing.T, ctx map[string]any) { + if status, ok := ctx["status"].(int); !ok || status != 404 { + t.Errorf("expected status 404, got %v", ctx["status"]) + } + + if success, ok := ctx["success"].(bool); !ok || success { + t.Errorf("expected success=false for 4xx status, got %v", ctx["success"]) + } + + if clientError, ok := ctx["client_error"].(bool); !ok || !clientError { + t.Errorf("expected client_error=true for 4xx status, got %v", ctx["client_error"]) + } + + if serverError, ok := ctx["server_error"].(bool); !ok || serverError { + t.Errorf("expected server_error=false for 4xx status, got %v", ctx["server_error"]) + } + }, + }, + { + name: "server error response", + response: GraphQLResponseData{ + StatusCode: 500, + Body: []byte(`Internal Server Error`), + Headers: map[string]string{ + "Content-Type": "text/plain", + }, + }, + validate: func(t *testing.T, ctx map[string]any) { + if status, ok := ctx["status"].(int); !ok || status != 500 { + t.Errorf("expected status 500, got %v", ctx["status"]) + } + + if success, ok := ctx["success"].(bool); !ok || success { + t.Errorf("expected success=false for 5xx status, got %v", ctx["success"]) + } + + if serverError, ok := ctx["server_error"].(bool); !ok || !serverError { + t.Errorf("expected server_error=true for 5xx status, got %v", ctx["server_error"]) + } + + if isJSON, ok := ctx["is_json"].(bool); !ok || isJSON { + t.Errorf("expected is_json=false for text/plain, got %v", ctx["is_json"]) + } + + // Body should be string since JSON parsing fails + if bodyStr, ok := ctx["body_string"].(string); !ok || bodyStr != "Internal Server Error" { + t.Errorf("expected body_string='Internal Server Error', got %v", ctx["body_string"]) + } + }, + }, + { + name: "empty response", + response: GraphQLResponseData{ + StatusCode: 204, + Body: []byte{}, + Headers: map[string]string{}, + }, + validate: func(t *testing.T, ctx map[string]any) { + if hasBody, ok := ctx["has_body"].(bool); !ok || hasBody { + t.Errorf("expected has_body=false for empty body, got %v", ctx["has_body"]) + } + + if success, ok := ctx["success"].(bool); !ok || !success { + t.Errorf("expected success=true for 204 status, got %v", ctx["success"]) + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := srv.createAssertionEvalContext(tt.response) + tt.validate(t, ctx) + }) + } +} + +func TestEvaluateAssertionsParallel(t *testing.T) { + t.Parallel() + + srv := &GraphQLServiceRPC{} + ctx := context.Background() + + tests := []struct { + name string + asserts []mgraphql.GraphQLAssert + evalContext map[string]any + validateCount int + checkResults func(t *testing.T, results []AssertionResult) + }{ + { + name: "empty assertions list", + asserts: []mgraphql.GraphQLAssert{}, + evalContext: map[string]any{}, + validateCount: 0, + checkResults: func(t *testing.T, results []AssertionResult) { + if len(results) != 0 { + t.Errorf("expected 0 results for empty assertions, got %d", len(results)) + } + }, + }, + { + name: "single successful assertion", + asserts: []mgraphql.GraphQLAssert{ + { + ID: idwrap.NewNow(), + Value: "status == 200", + Enabled: true, + }, + }, + evalContext: map[string]any{ + "status": 200, + }, + validateCount: 1, + checkResults: func(t *testing.T, results []AssertionResult) { + if len(results) != 1 { + t.Fatalf("expected 1 result, got %d", len(results)) + } + if results[0].Error != nil { + t.Errorf("expected no error, got %v", results[0].Error) + } + if !results[0].Success { + t.Errorf("expected success=true for status == 200") + } + }, + }, + { + name: "single failing assertion", + asserts: []mgraphql.GraphQLAssert{ + { + ID: idwrap.NewNow(), + Value: "status == 404", + Enabled: true, + }, + }, + evalContext: map[string]any{ + "status": 200, + }, + validateCount: 1, + checkResults: func(t *testing.T, results []AssertionResult) { + if len(results) != 1 { + t.Fatalf("expected 1 result, got %d", len(results)) + } + if results[0].Error != nil { + t.Errorf("expected no error, got %v", results[0].Error) + } + if results[0].Success { + t.Errorf("expected success=false for status == 404 when status is 200") + } + }, + }, + { + name: "multiple assertions", + asserts: []mgraphql.GraphQLAssert{ + { + ID: idwrap.NewNow(), + Value: "status == 200", + Enabled: true, + }, + { + ID: idwrap.NewNow(), + Value: "success == true", + Enabled: true, + }, + { + ID: idwrap.NewNow(), + Value: "is_json == true", + Enabled: true, + }, + }, + evalContext: map[string]any{ + "status": 200, + "success": true, + "is_json": true, + }, + validateCount: 3, + checkResults: func(t *testing.T, results []AssertionResult) { + if len(results) != 3 { + t.Fatalf("expected 3 results, got %d", len(results)) + } + for i, result := range results { + if result.Error != nil { + t.Errorf("result[%d]: expected no error, got %v", i, result.Error) + } + if !result.Success { + t.Errorf("result[%d]: expected success=true, expression=%s", i, result.Expression) + } + } + }, + }, + { + name: "invalid expression", + asserts: []mgraphql.GraphQLAssert{ + { + ID: idwrap.NewNow(), + Value: "invalid syntax %%%", + Enabled: true, + }, + }, + evalContext: map[string]any{ + "status": 200, + }, + validateCount: 1, + checkResults: func(t *testing.T, results []AssertionResult) { + if len(results) != 1 { + t.Fatalf("expected 1 result, got %d", len(results)) + } + // Should have an error for invalid syntax + if results[0].Error == nil { + t.Errorf("expected error for invalid expression syntax") + } + if results[0].Success { + t.Errorf("expected success=false for invalid expression") + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + results := srv.evaluateAssertionsParallel(ctx, tt.asserts, tt.evalContext) + + if len(results) != tt.validateCount { + t.Fatalf("expected %d results, got %d", tt.validateCount, len(results)) + } + + tt.checkResults(t, results) + + // Verify all results have timestamps + for i, result := range results { + if result.EvaluatedAt.IsZero() { + t.Errorf("result[%d]: expected non-zero EvaluatedAt timestamp", i) + } + } + }) + } +} + +func TestCreateJSONPathHelpers(t *testing.T) { + t.Parallel() + + srv := &GraphQLServiceRPC{} + + tests := []struct { + name string + bodyMap map[string]any + checks func(t *testing.T, helpers map[string]any) + }{ + { + name: "nil body map", + bodyMap: nil, + checks: func(t *testing.T, helpers map[string]any) { + if helpers == nil { + t.Errorf("expected non-nil helpers map") + } + if len(helpers) != 0 { + t.Errorf("expected empty helpers for nil body, got %d", len(helpers)) + } + }, + }, + { + name: "simple nested object", + bodyMap: map[string]any{ + "data": map[string]any{ + "user": map[string]any{ + "name": "Bob", + "age": 30, + }, + }, + }, + checks: func(t *testing.T, helpers map[string]any) { + // Test path function + if pathFn, ok := helpers["path"].(func(string) any); ok { + // Test valid path + if result := pathFn("data.user.name"); result != "Bob" { + t.Errorf("path('data.user.name') expected 'Bob', got %v", result) + } + + // Test nested path + if result := pathFn("data.user.age"); result != 30 { + t.Errorf("path('data.user.age') expected 30, got %v", result) + } + + // Test invalid path + if result := pathFn("data.missing"); result != nil { + t.Errorf("path('data.missing') expected nil, got %v", result) + } + } else { + t.Errorf("expected path function to exist") + } + + // Test has function + if hasFn, ok := helpers["has"].(func(string) bool); ok { + if !hasFn("data.user.name") { + t.Errorf("has('data.user.name') should return true") + } + if hasFn("data.missing") { + t.Errorf("has('data.missing') should return false") + } + } else { + t.Errorf("expected has function to exist") + } + + // Test string function + if strFn, ok := helpers["string"].(func(string) string); ok { + if result := strFn("data.user.name"); result != "Bob" { + t.Errorf("string('data.user.name') expected 'Bob', got %v", result) + } + // Non-string value should be converted + if result := strFn("data.user.age"); result != "30" { + t.Errorf("string('data.user.age') expected '30', got %v", result) + } + } else { + t.Errorf("expected string function to exist") + } + + // Test number function + if numFn, ok := helpers["number"].(func(string) float64); ok { + if result := numFn("data.user.age"); result != 30.0 { + t.Errorf("number('data.user.age') expected 30.0, got %v", result) + } + // Missing path should return 0 + if result := numFn("data.missing"); result != 0 { + t.Errorf("number('data.missing') expected 0, got %v", result) + } + } else { + t.Errorf("expected number function to exist") + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + helpers := srv.createJSONPathHelpers(tt.bodyMap) + tt.checks(t, helpers) + }) + } +} diff --git a/packages/server/internal/api/rreference/rreference.go b/packages/server/internal/api/rreference/rreference.go index fb77f3f41..4f0ad9fe0 100644 --- a/packages/server/internal/api/rreference/rreference.go +++ b/packages/server/internal/api/rreference/rreference.go @@ -23,6 +23,7 @@ import ( referencev1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/reference/v1" "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/reference/v1/referencev1connect" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "connectrpc.com/connect" @@ -48,6 +49,9 @@ type ReferenceServiceRPC struct { // http httpResponseReader *shttp.HttpResponseReader + + // graphql + graphqlResponseReader *sgraphql.GraphQLResponseService } type ReferenceServiceRPCReaders struct { @@ -60,8 +64,9 @@ type ReferenceServiceRPCReaders struct { NodeRequest *sflow.NodeRequestReader FlowVariable *sflow.FlowVariableReader FlowEdge *sflow.EdgeReader - NodeExecution *sflow.NodeExecutionReader - HttpResponse *shttp.HttpResponseReader + NodeExecution *sflow.NodeExecutionReader + HttpResponse *shttp.HttpResponseReader + GraphQLResponse *sgraphql.GraphQLResponseService } func (r *ReferenceServiceRPCReaders) Validate() error { @@ -98,6 +103,9 @@ func (r *ReferenceServiceRPCReaders) Validate() error { if r.HttpResponse == nil { return fmt.Errorf("http response reader is required") } + if r.GraphQLResponse == nil { + return fmt.Errorf("graphql response reader is required") + } return nil } @@ -137,6 +145,7 @@ func NewReferenceServiceRPC(deps ReferenceServiceRPCDeps) *ReferenceServiceRPC { flowEdgeReader: deps.Readers.FlowEdge, nodeExecutionReader: deps.Readers.NodeExecution, httpResponseReader: deps.Readers.HttpResponse, + graphqlResponseReader: deps.Readers.GraphQLResponse, } } @@ -233,6 +242,59 @@ func (c *ReferenceServiceRPC) getLatestResponse(ctx context.Context, httpID idwr }, nil } +func (c *ReferenceServiceRPC) getLatestGraphQLResponse(ctx context.Context, graphqlID idwrap.IDWrap) (map[string]interface{}, error) { + responses, err := c.graphqlResponseReader.GetByGraphQLID(ctx, graphqlID) + if err != nil { + return nil, err + } + if len(responses) == 0 { + return nil, nil + } + + // Find latest response + latest := responses[0] + for _, r := range responses { + if r.Time > latest.Time { + latest = r + } + } + + // Parse body + var body interface{} = string(latest.Body) + var bodyMap map[string]interface{} + if len(latest.Body) > 0 { + var jsonBody interface{} + if err := json.Unmarshal(latest.Body, &jsonBody); err == nil { + body = jsonBody + if m, ok := jsonBody.(map[string]interface{}); ok { + bodyMap = m + } + } + } + + // Extract GraphQL-specific fields (data and errors) + var data interface{} + var errors interface{} + if bodyMap != nil { + if d, ok := bodyMap["data"]; ok { + data = d + } + if e, ok := bodyMap["errors"]; ok { + errors = e + } + } + + return map[string]interface{}{ + "status": latest.Status, + "body": body, + "data": data, + "errors": errors, + "headers": map[string]string{}, // Headers not currently linkable to specific response + "duration": latest.Duration, + "size": latest.Size, + }, nil +} + func (c *ReferenceServiceRPC) ReferenceTree(ctx context.Context, req *connect.Request[referencev1.ReferenceTreeRequest]) (*connect.Response[referencev1.ReferenceTreeResponse], error) { var Items []*referencev1.ReferenceTreeItem @@ -510,7 +572,7 @@ func (c *ReferenceServiceRPC) HandleNode(ctx context.Context, nodeID idwrap.IDWr // ReferenceCompletion calls reference.v1.ReferenceService.ReferenceCompletion. func (c *ReferenceServiceRPC) ReferenceCompletion(ctx context.Context, req *connect.Request[referencev1.ReferenceCompletionRequest]) (*connect.Response[referencev1.ReferenceCompletionResponse], error) { - var workspaceID, httpID, flowNodeID *idwrap.IDWrap + var workspaceID, httpID, graphqlID, flowNodeID *idwrap.IDWrap msg := req.Msg if msg.WorkspaceId != nil { tempID, err := idwrap.NewFromBytes(msg.WorkspaceId) @@ -526,6 +588,13 @@ func (c *ReferenceServiceRPC) ReferenceCompletion(ctx context.Context, req *conn } httpID = &tempID } + if msg.GraphqlId != nil { + tempID, err := idwrap.NewFromBytes(msg.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + graphqlID = &tempID + } if msg.FlowNodeId != nil { tempID, err := idwrap.NewFromBytes(msg.FlowNodeId) if err != nil { @@ -592,6 +661,51 @@ func (c *ReferenceServiceRPC) ReferenceCompletion(ctx context.Context, req *conn }) } + if graphqlID != nil { + resp, err := c.getLatestGraphQLResponse(ctx, *graphqlID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if resp != nil { + // Add full response object + creator.AddWithKey("response", resp) + + // Add GraphQL-specific top-level fields for convenience + if data, ok := resp["data"]; ok && data != nil { + creator.AddWithKey("data", data) + } + if errors, ok := resp["errors"]; ok && errors != nil { + creator.AddWithKey("errors", errors) + } + + // Add convenience variables + status := int(0) + if s, ok := resp["status"].(int32); ok { + status = int(s) + } + creator.AddWithKey("status", status) + creator.AddWithKey("success", status >= 200 && status < 300) + creator.AddWithKey("has_data", resp["data"] != nil) + creator.AddWithKey("has_errors", resp["errors"] != nil) + } else { + // Fallback schema for GraphQL + creator.AddWithKey("response", map[string]interface{}{ + "status": 200, + "body": map[string]interface{}{}, + "data": map[string]interface{}{}, + "errors": nil, + "headers": map[string]string{}, + "duration": 0, + }) + creator.AddWithKey("data", map[string]interface{}{}) + creator.AddWithKey("status", 200) + creator.AddWithKey("success", true) + creator.AddWithKey("has_data", false) + creator.AddWithKey("has_errors", false) + } + } + if flowNodeID != nil { nodeID := *flowNodeID nodeInst, err := c.nodeReader.GetNode(ctx, nodeID) @@ -927,7 +1041,7 @@ func (c *ReferenceServiceRPC) ReferenceCompletion(ctx context.Context, req *conn // ReferenceValue calls reference.v1.ReferenceService.ReferenceValue. func (c *ReferenceServiceRPC) ReferenceValue(ctx context.Context, req *connect.Request[referencev1.ReferenceValueRequest]) (*connect.Response[referencev1.ReferenceValueResponse], error) { - var workspaceID, httpID, flowNodeID *idwrap.IDWrap + var workspaceID, httpID, graphqlID, flowNodeID *idwrap.IDWrap msg := req.Msg if msg.WorkspaceId != nil { tempID, err := idwrap.NewFromBytes(msg.WorkspaceId) @@ -943,6 +1057,13 @@ func (c *ReferenceServiceRPC) ReferenceValue(ctx context.Context, req *connect.R } httpID = &tempID } + if msg.GraphqlId != nil { + tempID, err := idwrap.NewFromBytes(msg.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + graphqlID = &tempID + } if msg.FlowNodeId != nil { tempID, err := idwrap.NewFromBytes(msg.FlowNodeId) if err != nil { @@ -1009,6 +1130,51 @@ func (c *ReferenceServiceRPC) ReferenceValue(ctx context.Context, req *connect.R }) } + if graphqlID != nil { + resp, err := c.getLatestGraphQLResponse(ctx, *graphqlID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if resp != nil { + // Add full response object + lookup.AddWithKey("response", resp) + + // Add GraphQL-specific top-level fields for convenience + if data, ok := resp["data"]; ok && data != nil { + lookup.AddWithKey("data", data) + } + if errors, ok := resp["errors"]; ok && errors != nil { + lookup.AddWithKey("errors", errors) + } + + // Add convenience variables + status := int(0) + if s, ok := resp["status"].(int32); ok { + status = int(s) + } + lookup.AddWithKey("status", status) + lookup.AddWithKey("success", status >= 200 && status < 300) + lookup.AddWithKey("has_data", resp["data"] != nil) + lookup.AddWithKey("has_errors", resp["errors"] != nil) + } else { + // Fallback schema for GraphQL + lookup.AddWithKey("response", map[string]interface{}{ + "status": 200, + "body": map[string]interface{}{}, + "data": map[string]interface{}{}, + "errors": nil, + "headers": map[string]string{}, + "duration": 0, + }) + lookup.AddWithKey("data", map[string]interface{}{}) + lookup.AddWithKey("status", 200) + lookup.AddWithKey("success", true) + lookup.AddWithKey("has_data", false) + lookup.AddWithKey("has_errors", false) + } + } + if flowNodeID != nil { nodeID := *flowNodeID nodeInst, err := c.nodeReader.GetNode(ctx, nodeID) diff --git a/packages/server/internal/converter/converter.go b/packages/server/internal/converter/converter.go index f2aab59ee..1bb61310a 100644 --- a/packages/server/internal/converter/converter.go +++ b/packages/server/internal/converter/converter.go @@ -8,16 +8,18 @@ import ( "google.golang.org/protobuf/types/known/timestamppb" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mcredential" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/menv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mfile" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" - "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mcredential" credentialv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/credential/v1" environmentv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/environment/v1" filev1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/file_system/v1" flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" httpv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/http/v1" ) @@ -375,6 +377,8 @@ func ToAPINodeKind(kind mflow.NodeKind) flowv1.NodeKind { return flowv1.NodeKind_NODE_KIND_AI_PROVIDER case mflow.NODE_KIND_AI_MEMORY: return flowv1.NodeKind_NODE_KIND_AI_MEMORY + case mflow.NODE_KIND_GRAPHQL: + return flowv1.NodeKind_NODE_KIND_GRAPH_Q_L default: return flowv1.NodeKind_NODE_KIND_UNSPECIFIED } @@ -466,3 +470,14 @@ func ToAPIErrorHandling(eh mflow.ErrorHandling) flowv1.ErrorHandling { return flowv1.ErrorHandling_ERROR_HANDLING_UNSPECIFIED } } + +// ToAPIGraphQLAssert converts model GraphQLAssert to API GraphQLAssert +func ToAPIGraphQLAssert(assert mgraphql.GraphQLAssert) *graphqlv1.GraphQLAssert { + return &graphqlv1.GraphQLAssert{ + GraphqlAssertId: assert.ID.Bytes(), + GraphqlId: assert.GraphQLID.Bytes(), + Value: assert.Value, + Enabled: assert.Enabled, + Order: assert.DisplayOrder, + } +} diff --git a/packages/server/internal/migrations/01KHDYWX_add_graphql_tables.go b/packages/server/internal/migrations/01KHDYWX_add_graphql_tables.go new file mode 100644 index 000000000..227aa61b3 --- /dev/null +++ b/packages/server/internal/migrations/01KHDYWX_add_graphql_tables.go @@ -0,0 +1,316 @@ +package migrations + +import ( + "context" + "database/sql" + "fmt" + "strings" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/migrate" +) + +// MigrationAddGraphQLTablesID is the ULID for the GraphQL tables migration. +const MigrationAddGraphQLTablesID = "01KHDYWX1KV5MX8H9MNTPCWDV9" + +// MigrationAddGraphQLTablesChecksum is a stable hash of this migration. +const MigrationAddGraphQLTablesChecksum = "sha256:add-graphql-tables-v1" + +func init() { + if err := migrate.Register(migrate.Migration{ + ID: MigrationAddGraphQLTablesID, + Checksum: MigrationAddGraphQLTablesChecksum, + Description: "Add GraphQL request, header, response, and response header tables", + Apply: applyGraphQLTables, + Validate: validateGraphQLTables, + RequiresBackup: true, + }); err != nil { + panic("failed to register GraphQL tables migration: " + err.Error()) + } +} + +// applyGraphQLTables creates all GraphQL-related tables: +// - graphql (core request) +// - graphql_header (request headers) +// - graphql_response (cached response) +// - graphql_response_header (response headers) +func applyGraphQLTables(ctx context.Context, tx *sql.Tx) error { + // 1. Create graphql table + if _, err := tx.ExecContext(ctx, ` + CREATE TABLE IF NOT EXISTS graphql ( + id BLOB NOT NULL PRIMARY KEY, + workspace_id BLOB NOT NULL, + folder_id BLOB, + name TEXT NOT NULL, + url TEXT NOT NULL, + query TEXT NOT NULL DEFAULT '', + variables TEXT NOT NULL DEFAULT '', + description TEXT NOT NULL DEFAULT '', + last_run_at BIGINT NULL, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + updated_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (workspace_id) REFERENCES workspaces (id) ON DELETE CASCADE, + FOREIGN KEY (folder_id) REFERENCES files (id) ON DELETE SET NULL + ) + `); err != nil { + return err + } + + if _, err := tx.ExecContext(ctx, ` + CREATE INDEX IF NOT EXISTS graphql_workspace_idx ON graphql (workspace_id) + `); err != nil { + return err + } + + if _, err := tx.ExecContext(ctx, ` + CREATE INDEX IF NOT EXISTS graphql_folder_idx ON graphql (folder_id) WHERE folder_id IS NOT NULL + `); err != nil { + return err + } + + // 2. Create graphql_header table + if _, err := tx.ExecContext(ctx, ` + CREATE TABLE IF NOT EXISTS graphql_header ( + id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + header_key TEXT NOT NULL, + header_value TEXT NOT NULL, + description TEXT NOT NULL DEFAULT '', + enabled BOOLEAN NOT NULL DEFAULT TRUE, + display_order REAL NOT NULL DEFAULT 0, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + updated_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE + ) + `); err != nil { + return err + } + + if _, err := tx.ExecContext(ctx, ` + CREATE INDEX IF NOT EXISTS graphql_header_graphql_idx ON graphql_header (graphql_id) + `); err != nil { + return err + } + + if _, err := tx.ExecContext(ctx, ` + CREATE INDEX IF NOT EXISTS graphql_header_order_idx ON graphql_header (graphql_id, display_order) + `); err != nil { + return err + } + + // 3. Create graphql_response table + if _, err := tx.ExecContext(ctx, ` + CREATE TABLE IF NOT EXISTS graphql_response ( + id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + status INT32 NOT NULL, + body BLOB, + time DATETIME NOT NULL, + duration INT32 NOT NULL, + size INT32 NOT NULL, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE + ) + `); err != nil { + return err + } + + if _, err := tx.ExecContext(ctx, ` + CREATE INDEX IF NOT EXISTS graphql_response_graphql_idx ON graphql_response (graphql_id) + `); err != nil { + return err + } + + if _, err := tx.ExecContext(ctx, ` + CREATE INDEX IF NOT EXISTS graphql_response_time_idx ON graphql_response (graphql_id, time DESC) + `); err != nil { + return err + } + + // 4. Create graphql_response_header table + if _, err := tx.ExecContext(ctx, ` + CREATE TABLE IF NOT EXISTS graphql_response_header ( + id BLOB NOT NULL PRIMARY KEY, + response_id BLOB NOT NULL, + key TEXT NOT NULL, + value TEXT NOT NULL, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (response_id) REFERENCES graphql_response (id) ON DELETE CASCADE + ) + `); err != nil { + return err + } + + if _, err := tx.ExecContext(ctx, ` + CREATE INDEX IF NOT EXISTS graphql_response_header_response_idx ON graphql_response_header (response_id) + `); err != nil { + return err + } + + // 5. Create flow_node_graphql table (links flow nodes to GraphQL requests) + if _, err := tx.ExecContext(ctx, ` + CREATE TABLE IF NOT EXISTS flow_node_graphql ( + flow_node_id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE + ) + `); err != nil { + return err + } + + // 6. Add graphql_response_id column to node_execution table + // Check if column already exists before adding + var colCount int + err := tx.QueryRowContext(ctx, ` + SELECT COUNT(*) FROM pragma_table_info('node_execution') + WHERE name = 'graphql_response_id' + `).Scan(&colCount) + if err != nil { + return fmt.Errorf("check node_execution column: %w", err) + } + if colCount == 0 { + if _, err := tx.ExecContext(ctx, ` + ALTER TABLE node_execution ADD COLUMN graphql_response_id BLOB + REFERENCES graphql_response (id) ON DELETE SET NULL + `); err != nil { + return err + } + } + + // 7. Update files table CHECK constraint to allow content_kind = 5 (graphql) + // SQLite requires table recreation to modify CHECK constraints + if err := updateFilesCheckConstraint(ctx, tx); err != nil { + return fmt.Errorf("update files check constraint: %w", err) + } + + return nil +} + +// updateFilesCheckConstraint recreates the files table with GraphQL content_kind support. +func updateFilesCheckConstraint(ctx context.Context, tx *sql.Tx) error { + // Check if already updated (content_kind = 5 works) + // We detect by checking the table SQL for "5" + var tableSql string + err := tx.QueryRowContext(ctx, ` + SELECT sql FROM sqlite_master WHERE type='table' AND name='files' + `).Scan(&tableSql) + if err != nil { + return fmt.Errorf("read files table schema: %w", err) + } + // If the constraint already includes 5, skip + if strings.Contains(tableSql, "4, 5)") { + return nil + } + + // Recreate table with updated CHECK constraints + if _, err := tx.ExecContext(ctx, ` + CREATE TABLE files_new ( + id BLOB NOT NULL PRIMARY KEY, + workspace_id BLOB NOT NULL, + parent_id BLOB, + content_id BLOB, + content_kind INT8 NOT NULL DEFAULT 0, + name TEXT NOT NULL, + display_order REAL NOT NULL DEFAULT 0, + path_hash TEXT, + updated_at BIGINT NOT NULL DEFAULT (unixepoch()), + CHECK (length (id) == 16), + CHECK (content_kind IN (0, 1, 2, 3, 4, 5)), + CHECK ( + (content_kind = 0 AND content_id IS NOT NULL) OR + (content_kind = 1 AND content_id IS NOT NULL) OR + (content_kind = 2 AND content_id IS NOT NULL) OR + (content_kind = 3 AND content_id IS NOT NULL) OR + (content_kind = 4 AND content_id IS NOT NULL) OR + (content_kind = 5 AND content_id IS NOT NULL) OR + (content_id IS NULL) + ), + FOREIGN KEY (workspace_id) REFERENCES workspaces (id) ON DELETE CASCADE, + FOREIGN KEY (parent_id) REFERENCES files (id) ON DELETE SET NULL + ) + `); err != nil { + return fmt.Errorf("create files_new: %w", err) + } + + if _, err := tx.ExecContext(ctx, ` + INSERT INTO files_new SELECT * FROM files + `); err != nil { + return fmt.Errorf("copy files data: %w", err) + } + + if _, err := tx.ExecContext(ctx, `DROP TABLE files`); err != nil { + return fmt.Errorf("drop old files: %w", err) + } + + if _, err := tx.ExecContext(ctx, `ALTER TABLE files_new RENAME TO files`); err != nil { + return fmt.Errorf("rename files_new: %w", err) + } + + // Recreate indexes + indexes := []string{ + `CREATE INDEX files_workspace_idx ON files (workspace_id)`, + `CREATE UNIQUE INDEX files_path_hash_idx ON files (workspace_id, path_hash) WHERE path_hash IS NOT NULL`, + `CREATE INDEX files_hierarchy_idx ON files (workspace_id, parent_id, display_order)`, + `CREATE INDEX files_content_lookup_idx ON files (content_kind, content_id) WHERE content_id IS NOT NULL`, + `CREATE INDEX files_parent_lookup_idx ON files (parent_id, display_order) WHERE parent_id IS NOT NULL`, + `CREATE INDEX files_name_search_idx ON files (workspace_id, name)`, + `CREATE INDEX files_kind_filter_idx ON files (workspace_id, content_kind)`, + `CREATE INDEX files_workspace_hierarchy_idx ON files (workspace_id, parent_id, content_kind, display_order)`, + } + for _, idx := range indexes { + if _, err := tx.ExecContext(ctx, idx); err != nil { + return fmt.Errorf("recreate index: %w", err) + } + } + + return nil +} + + +// validateGraphQLTables verifies all tables and indexes were created successfully. +func validateGraphQLTables(ctx context.Context, db *sql.DB) error { + tables := []string{ + "graphql", + "graphql_header", + "graphql_response", + "graphql_response_header", + "flow_node_graphql", + } + + for _, table := range tables { + var name string + err := db.QueryRowContext(ctx, ` + SELECT name FROM sqlite_master + WHERE type='table' AND name=? + `, table).Scan(&name) + if err != nil { + return fmt.Errorf("table %s not found: %w", table, err) + } + } + + indexes := []string{ + "graphql_workspace_idx", + "graphql_folder_idx", + "graphql_header_graphql_idx", + "graphql_header_order_idx", + "graphql_response_graphql_idx", + "graphql_response_time_idx", + "graphql_response_header_response_idx", + } + + for _, idx := range indexes { + var name string + err := db.QueryRowContext(ctx, ` + SELECT name FROM sqlite_master + WHERE type='index' AND name=? + `, idx).Scan(&name) + if err != nil { + return fmt.Errorf("index %s not found: %w", idx, err) + } + } + + return nil +} diff --git a/packages/server/internal/migrations/01KHEX5H_add_graphql_delta.go b/packages/server/internal/migrations/01KHEX5H_add_graphql_delta.go new file mode 100644 index 000000000..e2382ab51 --- /dev/null +++ b/packages/server/internal/migrations/01KHEX5H_add_graphql_delta.go @@ -0,0 +1,245 @@ +package migrations + +import ( + "context" + "database/sql" + "fmt" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/migrate" +) + +// MigrationAddGraphQLDeltaID is the ULID for the GraphQL delta system migration. +const MigrationAddGraphQLDeltaID = "01KHEX5HB7REY2NXDPCYFS6S02" + +// MigrationAddGraphQLDeltaChecksum is a stable hash of this migration. +const MigrationAddGraphQLDeltaChecksum = "sha256:add-graphql-delta-v1" + +func init() { + if err := migrate.Register(migrate.Migration{ + ID: MigrationAddGraphQLDeltaID, + Checksum: MigrationAddGraphQLDeltaChecksum, + Description: "Add delta/variant support to GraphQL tables for flow node overrides", + Apply: applyGraphQLDelta, + Validate: validateGraphQLDelta, + RequiresBackup: true, + }); err != nil { + panic("failed to register GraphQL delta migration: " + err.Error()) + } +} + +// applyGraphQLDelta adds delta system fields to GraphQL tables. +func applyGraphQLDelta(ctx context.Context, tx *sql.Tx) error { + // 1. Add delta system fields to graphql table + graphqlColumns := []struct { + name string + sqlType string + }{ + {"parent_graphql_id", "BLOB DEFAULT NULL"}, + {"is_delta", "BOOLEAN NOT NULL DEFAULT FALSE"}, + {"is_snapshot", "BOOLEAN NOT NULL DEFAULT FALSE"}, + {"delta_name", "TEXT NULL"}, + {"delta_url", "TEXT NULL"}, + {"delta_query", "TEXT NULL"}, + {"delta_variables", "TEXT NULL"}, + {"delta_description", "TEXT NULL"}, + } + + for _, col := range graphqlColumns { + if err := addColumnIfNotExists(ctx, tx, "graphql", col.name, col.sqlType); err != nil { + return fmt.Errorf("add graphql.%s: %w", col.name, err) + } + } + + // 2. Add indexes for graphql delta resolution and performance + graphqlIndexes := []string{ + `CREATE INDEX IF NOT EXISTS graphql_parent_delta_idx ON graphql (parent_graphql_id, is_delta)`, + `CREATE INDEX IF NOT EXISTS graphql_delta_resolution_idx ON graphql (parent_graphql_id, is_delta, updated_at DESC)`, + `CREATE INDEX IF NOT EXISTS graphql_active_streaming_idx ON graphql (workspace_id, updated_at DESC) WHERE is_delta = FALSE`, + } + + for _, idx := range graphqlIndexes { + if _, err := tx.ExecContext(ctx, idx); err != nil { + return fmt.Errorf("create graphql index: %w", err) + } + } + + // 3. Add delta system fields to graphql_header table + headerColumns := []struct { + name string + sqlType string + }{ + {"parent_graphql_header_id", "BLOB DEFAULT NULL"}, + {"is_delta", "BOOLEAN NOT NULL DEFAULT FALSE"}, + {"delta_header_key", "TEXT NULL"}, + {"delta_header_value", "TEXT NULL"}, + {"delta_description", "TEXT NULL"}, + {"delta_enabled", "BOOLEAN NULL"}, + {"delta_display_order", "REAL NULL"}, + } + + for _, col := range headerColumns { + if err := addColumnIfNotExists(ctx, tx, "graphql_header", col.name, col.sqlType); err != nil { + return fmt.Errorf("add graphql_header.%s: %w", col.name, err) + } + } + + // 4. Add indexes for graphql_header delta support + headerIndexes := []string{ + `CREATE INDEX IF NOT EXISTS graphql_header_parent_delta_idx ON graphql_header (parent_graphql_header_id, is_delta)`, + `CREATE INDEX IF NOT EXISTS graphql_header_delta_streaming_idx ON graphql_header (parent_graphql_header_id, is_delta, updated_at DESC)`, + } + + for _, idx := range headerIndexes { + if _, err := tx.ExecContext(ctx, idx); err != nil { + return fmt.Errorf("create graphql_header index: %w", err) + } + } + + // 5. Add delta system fields to graphql_assert table + assertColumns := []struct { + name string + sqlType string + }{ + {"parent_graphql_assert_id", "BLOB DEFAULT NULL"}, + {"is_delta", "BOOLEAN NOT NULL DEFAULT FALSE"}, + {"delta_value", "TEXT NULL"}, + {"delta_enabled", "BOOLEAN NULL"}, + {"delta_description", "TEXT NULL"}, + {"delta_display_order", "REAL NULL"}, + } + + for _, col := range assertColumns { + if err := addColumnIfNotExists(ctx, tx, "graphql_assert", col.name, col.sqlType); err != nil { + return fmt.Errorf("add graphql_assert.%s: %w", col.name, err) + } + } + + // 6. Add indexes for graphql_assert delta support + assertIndexes := []string{ + `CREATE INDEX IF NOT EXISTS graphql_assert_parent_delta_idx ON graphql_assert (parent_graphql_assert_id, is_delta)`, + `CREATE INDEX IF NOT EXISTS graphql_assert_delta_streaming_idx ON graphql_assert (parent_graphql_assert_id, is_delta, updated_at DESC)`, + } + + for _, idx := range assertIndexes { + if _, err := tx.ExecContext(ctx, idx); err != nil { + return fmt.Errorf("create graphql_assert index: %w", err) + } + } + + return nil +} + +// addColumnIfNotExists adds a column to a table if it doesn't already exist. +func addColumnIfNotExists(ctx context.Context, tx *sql.Tx, table, column, sqlType string) error { + var colCount int + err := tx.QueryRowContext(ctx, ` + SELECT COUNT(*) FROM pragma_table_info(?) + WHERE name = ? + `, table, column).Scan(&colCount) + if err != nil { + return fmt.Errorf("check column existence: %w", err) + } + if colCount > 0 { + return nil // Column already exists + } + + query := fmt.Sprintf("ALTER TABLE %s ADD COLUMN %s %s", table, column, sqlType) + if _, err := tx.ExecContext(ctx, query); err != nil { + return fmt.Errorf("alter table: %w", err) + } + return nil +} + +// validateGraphQLDelta verifies all delta columns and indexes were created successfully. +func validateGraphQLDelta(ctx context.Context, db *sql.DB) error { + // Verify graphql table columns + graphqlColumns := []string{ + "parent_graphql_id", + "is_delta", + "is_snapshot", + "delta_name", + "delta_url", + "delta_query", + "delta_variables", + "delta_description", + } + + for _, col := range graphqlColumns { + if err := verifyColumnExists(ctx, db, "graphql", col); err != nil { + return err + } + } + + // Verify graphql_header table columns + headerColumns := []string{ + "parent_graphql_header_id", + "is_delta", + "delta_header_key", + "delta_header_value", + "delta_description", + "delta_enabled", + "delta_display_order", + } + + for _, col := range headerColumns { + if err := verifyColumnExists(ctx, db, "graphql_header", col); err != nil { + return err + } + } + + // Verify graphql_assert table columns + assertColumns := []string{ + "parent_graphql_assert_id", + "is_delta", + "delta_value", + "delta_enabled", + "delta_description", + "delta_display_order", + } + + for _, col := range assertColumns { + if err := verifyColumnExists(ctx, db, "graphql_assert", col); err != nil { + return err + } + } + + // Verify indexes + indexes := []string{ + "graphql_parent_delta_idx", + "graphql_delta_resolution_idx", + "graphql_active_streaming_idx", + "graphql_header_parent_delta_idx", + "graphql_header_delta_streaming_idx", + "graphql_assert_parent_delta_idx", + "graphql_assert_delta_streaming_idx", + } + + for _, idx := range indexes { + var name string + err := db.QueryRowContext(ctx, ` + SELECT name FROM sqlite_master + WHERE type='index' AND name=? + `, idx).Scan(&name) + if err != nil { + return fmt.Errorf("index %s not found: %w", idx, err) + } + } + + return nil +} + +// verifyColumnExists checks if a column exists in a table. +func verifyColumnExists(ctx context.Context, db *sql.DB, table, column string) error { + var colCount int + err := db.QueryRowContext(ctx, ` + SELECT COUNT(*) FROM pragma_table_info(?) + WHERE name = ? + `, table, column).Scan(&colCount) + if err != nil { + return fmt.Errorf("check %s.%s: %w", table, column, err) + } + if colCount == 0 { + return fmt.Errorf("column %s.%s not found", table, column) + } + return nil +} diff --git a/packages/server/internal/migrations/migrations_test.go b/packages/server/internal/migrations/migrations_test.go index c9948a0cd..00932679b 100644 --- a/packages/server/internal/migrations/migrations_test.go +++ b/packages/server/internal/migrations/migrations_test.go @@ -210,7 +210,7 @@ func TestFilesTableConstraintUpdated(t *testing.T) { t.Fatalf("failed to run migrations: %v", err) } - // Verify files table supports content_kind=4 + // Verify files table supports content_kind=5 (graphql) var tableDef string err = db.QueryRowContext(ctx, ` SELECT sql FROM sqlite_master @@ -220,9 +220,9 @@ func TestFilesTableConstraintUpdated(t *testing.T) { t.Fatalf("failed to get files table definition: %v", err) } - // Check that the constraint includes content_kind=4 - if !contains(tableDef, "content_kind IN (0, 1, 2, 3, 4)") { - t.Errorf("files table CHECK constraint doesn't include content_kind=4: %s", tableDef) + // Check that the constraint includes content_kind=5 + if !contains(tableDef, "content_kind IN (0, 1, 2, 3, 4, 5)") { + t.Errorf("files table CHECK constraint doesn't include content_kind=5: %s", tableDef) } } @@ -238,3 +238,118 @@ func containsHelper(s, substr string) bool { } return false } + +func TestGraphQLDeltaColumnsCreated(t *testing.T) { + ctx := context.Background() + + db, cleanup, err := sqlitemem.NewSQLiteMem(ctx) + if err != nil { + t.Fatalf("failed to create test db: %v", err) + } + t.Cleanup(cleanup) + + cfg := Config{ + DatabasePath: ":memory:", + DataDir: t.TempDir(), + } + if err := Run(ctx, db, cfg); err != nil { + t.Fatalf("failed to run migrations: %v", err) + } + + // Verify graphql table delta columns + graphqlColumns := []string{ + "parent_graphql_id", + "is_delta", + "is_snapshot", + "delta_name", + "delta_url", + "delta_query", + "delta_variables", + "delta_description", + } + + for _, col := range graphqlColumns { + var count int + err := db.QueryRowContext(ctx, ` + SELECT COUNT(*) FROM pragma_table_info('graphql') + WHERE name = ? + `, col).Scan(&count) + if err != nil { + t.Fatalf("failed to check graphql.%s: %v", col, err) + } + if count == 0 { + t.Errorf("graphql table missing column: %s", col) + } + } + + // Verify graphql_header table delta columns + headerColumns := []string{ + "parent_graphql_header_id", + "is_delta", + "delta_header_key", + "delta_header_value", + "delta_description", + "delta_enabled", + "delta_display_order", + } + + for _, col := range headerColumns { + var count int + err := db.QueryRowContext(ctx, ` + SELECT COUNT(*) FROM pragma_table_info('graphql_header') + WHERE name = ? + `, col).Scan(&count) + if err != nil { + t.Fatalf("failed to check graphql_header.%s: %v", col, err) + } + if count == 0 { + t.Errorf("graphql_header table missing column: %s", col) + } + } + + // Verify graphql_assert table delta columns + assertColumns := []string{ + "parent_graphql_assert_id", + "is_delta", + "delta_value", + "delta_enabled", + "delta_description", + "delta_display_order", + } + + for _, col := range assertColumns { + var count int + err := db.QueryRowContext(ctx, ` + SELECT COUNT(*) FROM pragma_table_info('graphql_assert') + WHERE name = ? + `, col).Scan(&count) + if err != nil { + t.Fatalf("failed to check graphql_assert.%s: %v", col, err) + } + if count == 0 { + t.Errorf("graphql_assert table missing column: %s", col) + } + } + + // Verify delta indexes were created + indexes := []string{ + "graphql_parent_delta_idx", + "graphql_delta_resolution_idx", + "graphql_active_streaming_idx", + "graphql_header_parent_delta_idx", + "graphql_header_delta_streaming_idx", + "graphql_assert_parent_delta_idx", + "graphql_assert_delta_streaming_idx", + } + + for _, idx := range indexes { + var name string + err := db.QueryRowContext(ctx, ` + SELECT name FROM sqlite_master + WHERE type='index' AND name=? + `, idx).Scan(&name) + if err != nil { + t.Errorf("index %s not found: %v", idx, err) + } + } +} diff --git a/packages/server/pkg/delta/delta.go b/packages/server/pkg/delta/delta.go index 0bed30ba9..76c0e5528 100644 --- a/packages/server/pkg/delta/delta.go +++ b/packages/server/pkg/delta/delta.go @@ -5,6 +5,7 @@ import ( "sort" "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" ) @@ -428,3 +429,206 @@ func orderAsserts(asserts []mhttp.HTTPAssert) []mhttp.HTTPAssert { return ordered } + +// GraphQL Delta Resolution + +// ResolveGraphQLInput holds the base and delta information required for GraphQL resolution. +type ResolveGraphQLInput struct { + Base, Delta mgraphql.GraphQL + BaseHeaders, DeltaHeaders []mgraphql.GraphQLHeader + BaseAsserts, DeltaAsserts []mgraphql.GraphQLAssert +} + +// ResolveGraphQLOutput holds the fully resolved GraphQL request. +type ResolveGraphQLOutput struct { + Resolved mgraphql.GraphQL + ResolvedHeaders []mgraphql.GraphQLHeader + ResolvedAsserts []mgraphql.GraphQLAssert +} + +// ResolveGraphQL merges a base GraphQL request with a delta, applying overrides +// based on the Delta System architecture (Overlay Pattern). +func ResolveGraphQL(input ResolveGraphQLInput) ResolveGraphQLOutput { + output := ResolveGraphQLOutput{} + + // 1. Resolve Root GraphQL Entity + output.Resolved = resolveGraphQLScalar(input.Base, input.Delta) + + // 2. Resolve Collections + output.ResolvedHeaders = resolveGraphQLHeaders(input.BaseHeaders, input.DeltaHeaders) + output.ResolvedAsserts = resolveGraphQLAsserts(input.BaseAsserts, input.DeltaAsserts) + + return output +} + +// resolveGraphQLScalar applies delta scalar overrides to the base entity. +func resolveGraphQLScalar(base, delta mgraphql.GraphQL) mgraphql.GraphQL { + resolved := base + + // Explicitly set ID to Base ID (The "Identity" remains the Base) + resolved.ID = base.ID + resolved.IsDelta = false // The resolved object is a "Live" representation + + // Apply Overrides if Delta* fields are present (non-nil) + if delta.DeltaName != nil { + resolved.Name = *delta.DeltaName + } + if delta.DeltaUrl != nil { + resolved.Url = *delta.DeltaUrl + } + if delta.DeltaQuery != nil { + resolved.Query = *delta.DeltaQuery + } + if delta.DeltaVariables != nil { + resolved.Variables = *delta.DeltaVariables + } + if delta.DeltaDescription != nil { + resolved.Description = *delta.DeltaDescription + } + + // Clear delta fields in the resolved object to avoid ambiguity + resolved.DeltaName = nil + resolved.DeltaUrl = nil + resolved.DeltaQuery = nil + resolved.DeltaVariables = nil + resolved.DeltaDescription = nil + + return resolved +} + +// resolveGraphQLHeaders resolves GraphQL Headers. +func resolveGraphQLHeaders(base []mgraphql.GraphQLHeader, delta []mgraphql.GraphQLHeader) []mgraphql.GraphQLHeader { + overrideMap := make(map[idwrap.IDWrap]mgraphql.GraphQLHeader) + additions := make([]mgraphql.GraphQLHeader, 0) + + for _, d := range delta { + if d.ParentGraphQLHeaderID != nil { + overrideMap[*d.ParentGraphQLHeaderID] = d + } else { + additions = append(additions, d) + } + } + + resolved := make([]mgraphql.GraphQLHeader, 0, len(base)+len(additions)) + + for _, b := range base { + if override, ok := overrideMap[b.ID]; ok { + merged := b + if override.DeltaKey != nil { + merged.Key = *override.DeltaKey + } + if override.DeltaValue != nil { + merged.Value = *override.DeltaValue + } + if override.DeltaDescription != nil { + merged.Description = *override.DeltaDescription + } + if override.DeltaEnabled != nil { + merged.Enabled = *override.DeltaEnabled + } + + merged.IsDelta = false + merged.ParentGraphQLHeaderID = nil + merged.DeltaKey = nil + merged.DeltaValue = nil + merged.DeltaDescription = nil + merged.DeltaEnabled = nil + + resolved = append(resolved, merged) + } else { + resolved = append(resolved, b) + } + } + + for _, a := range additions { + item := a + item.IsDelta = false + resolved = append(resolved, item) + } + + return resolved +} + +// resolveGraphQLAsserts resolves GraphQL Asserts using specific ordering logic. +func resolveGraphQLAsserts(base, delta []mgraphql.GraphQLAssert) []mgraphql.GraphQLAssert { + // 1. Order the inputs first to ensure we process them in the correct logical order + orderedBase := orderGraphQLAsserts(base) + if len(delta) == 0 { + return orderedBase + } + orderedDelta := orderGraphQLAsserts(delta) + + // 2. Map Base items + baseMap := make(map[idwrap.IDWrap]mgraphql.GraphQLAssert, len(orderedBase)) + baseOrder := make([]idwrap.IDWrap, 0, len(orderedBase)) + for _, assert := range orderedBase { + baseMap[assert.ID] = assert + baseOrder = append(baseOrder, assert.ID) + } + + // 3. Process Deltas (Overrides and Additions) + additions := make([]mgraphql.GraphQLAssert, 0) + for _, d := range orderedDelta { + if d.ParentGraphQLAssertID != nil { + if b, exists := baseMap[*d.ParentGraphQLAssertID]; exists { + // Apply Overrides + merged := b + if d.DeltaValue != nil { + merged.Value = *d.DeltaValue + } + if d.DeltaDescription != nil { + merged.Description = *d.DeltaDescription + } + if d.DeltaEnabled != nil { + merged.Enabled = *d.DeltaEnabled + } + + merged.IsDelta = false + merged.ParentGraphQLAssertID = nil + merged.DeltaValue = nil + merged.DeltaDescription = nil + merged.DeltaEnabled = nil + + baseMap[*d.ParentGraphQLAssertID] = merged + } + } else { + // New Addition + item := d + item.IsDelta = false + additions = append(additions, item) + } + } + + // 4. Reconstruct the list + merged := make([]mgraphql.GraphQLAssert, 0, len(baseMap)+len(additions)) + + // Add base items (which may be merged/updated) in original order + for _, id := range baseOrder { + if assert, exists := baseMap[id]; exists { + merged = append(merged, assert) + } + } + + // Append additions (ensure they are also ordered relative to each other if possible) + if len(additions) > 0 { + merged = append(merged, orderGraphQLAsserts(additions)...) + } + + return merged +} + +// orderGraphQLAsserts orders asserts by DisplayOrder field. +func orderGraphQLAsserts(asserts []mgraphql.GraphQLAssert) []mgraphql.GraphQLAssert { + if len(asserts) <= 1 { + return append([]mgraphql.GraphQLAssert(nil), asserts...) + } + + // Create a copy and sort by DisplayOrder field + ordered := make([]mgraphql.GraphQLAssert, len(asserts)) + copy(ordered, asserts) + sort.Slice(ordered, func(i, j int) bool { + return ordered[i].DisplayOrder < ordered[j].DisplayOrder + }) + + return ordered +} diff --git a/packages/server/pkg/flow/flowbuilder/builder.go b/packages/server/pkg/flow/flowbuilder/builder.go index 49389b52c..e7dda7f9d 100644 --- a/packages/server/pkg/flow/flowbuilder/builder.go +++ b/packages/server/pkg/flow/flowbuilder/builder.go @@ -13,12 +13,14 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nai" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nfor" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nforeach" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/ngraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nif" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/njs" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nmemory" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/naiprovider" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nrequest" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nstart" + gqlresolver "github.com/the-dev-tools/dev-tools/packages/server/pkg/graphql/resolver" "github.com/the-dev-tools/dev-tools/packages/server/pkg/http/resolver" "github.com/the-dev-tools/dev-tools/packages/server/pkg/httpclient" "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" @@ -27,6 +29,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/scredential" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/node_js_executor/v1/node_js_executorv1connect" ) @@ -41,12 +44,16 @@ type Builder struct { NodeAI *sflow.NodeAIService NodeAiProvider *sflow.NodeAiProviderService NodeMemory *sflow.NodeMemoryService + NodeGraphQL *sflow.NodeGraphQLService + GraphQL *sgraphql.GraphQLService + GraphQLHeader *sgraphql.GraphQLHeaderService Workspace *sworkspace.WorkspaceService Variable *senv.VariableService FlowVariable *sflow.FlowVariableService Resolver resolver.RequestResolver + GraphQLResolver gqlresolver.GraphQLResolver Logger *slog.Logger LLMProviderFactory *scredential.LLMProviderFactory } @@ -61,10 +68,14 @@ func New( nais *sflow.NodeAIService, naps *sflow.NodeAiProviderService, nmems *sflow.NodeMemoryService, + ngqs *sflow.NodeGraphQLService, + gqls *sgraphql.GraphQLService, + gqlhs *sgraphql.GraphQLHeaderService, ws *sworkspace.WorkspaceService, vs *senv.VariableService, fvs *sflow.FlowVariableService, resolver resolver.RequestResolver, + graphQLResolver gqlresolver.GraphQLResolver, logger *slog.Logger, llmFactory *scredential.LLMProviderFactory, ) *Builder { @@ -78,10 +89,14 @@ func New( NodeAI: nais, NodeAiProvider: naps, NodeMemory: nmems, + NodeGraphQL: ngqs, + GraphQL: gqls, + GraphQLHeader: gqlhs, Workspace: ws, Variable: vs, FlowVariable: fvs, Resolver: resolver, + GraphQLResolver: graphQLResolver, Logger: logger, LLMProviderFactory: llmFactory, } @@ -94,6 +109,7 @@ func (b *Builder) BuildNodes( timeout time.Duration, httpClient httpclient.HttpClient, respChan chan nrequest.NodeRequestSideResp, + gqlRespChan chan ngraphql.NodeGraphQLSideResp, jsClient node_js_executorv1connect.NodeJsExecutorServiceClient, ) (map[idwrap.IDWrap]node.FlowNode, idwrap.IDWrap, error) { flowNodeMap := make(map[idwrap.IDWrap]node.FlowNode, len(nodes)) @@ -264,6 +280,31 @@ func (b *Builder) BuildNodes( memoryCfg.WindowSize, ) } + case mflow.NODE_KIND_GRAPHQL: + gqlCfg, err := b.NodeGraphQL.GetNodeGraphQL(ctx, nodeModel.ID) + if err != nil { + return nil, idwrap.IDWrap{}, err + } + if gqlCfg == nil || gqlCfg.GraphQLID == nil || isZeroID(*gqlCfg.GraphQLID) { + return nil, idwrap.IDWrap{}, fmt.Errorf("graphql node %s missing graphql configuration", nodeModel.ID.String()) + } + + // Resolve GraphQL entity with delta + resolved, err := b.GraphQLResolver.Resolve(ctx, *gqlCfg.GraphQLID, gqlCfg.DeltaGraphQLID) + if err != nil { + return nil, idwrap.IDWrap{}, fmt.Errorf("resolve graphql %s: %w", gqlCfg.GraphQLID.String(), err) + } + + flowNodeMap[nodeModel.ID] = ngraphql.New( + nodeModel.ID, + nodeModel.Name, + resolved.Resolved, + resolved.ResolvedHeaders, + resolved.ResolvedAsserts, + httpClient, + gqlRespChan, + b.Logger, + ) default: return nil, idwrap.IDWrap{}, fmt.Errorf("node kind %d not supported", nodeModel.NodeKind) } diff --git a/packages/server/pkg/flow/node/ngraphql/ngraphql.go b/packages/server/pkg/flow/node/ngraphql/ngraphql.go new file mode 100644 index 000000000..27e03ae3b --- /dev/null +++ b/packages/server/pkg/flow/node/ngraphql/ngraphql.go @@ -0,0 +1,379 @@ +//nolint:revive // exported +package ngraphql + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "log/slog" + "net/http" + "time" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/expression" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node" + graphqlresponse "github.com/the-dev-tools/dev-tools/packages/server/pkg/graphql/response" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/httpclient" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +type NodeGraphQL struct { + FlowNodeID idwrap.IDWrap + Name string + + GraphQL mgraphql.GraphQL + Headers []mgraphql.GraphQLHeader + Asserts []mgraphql.GraphQLAssert + HttpClient httpclient.HttpClient + SideRespChan chan NodeGraphQLSideResp + logger *slog.Logger +} + +type NodeGraphQLSideResp struct { + ExecutionID idwrap.IDWrap + GraphQL mgraphql.GraphQL + Headers []mgraphql.GraphQLHeader + Response mgraphql.GraphQLResponse + RespHeaders []mgraphql.GraphQLResponseHeader + RespAsserts []mgraphql.GraphQLResponseAssert + Done chan struct{} +} + +const ( + outputResponseName = "response" + outputRequestName = "request" +) + +type graphqlRequestBody struct { + Query string `json:"query"` + Variables json.RawMessage `json:"variables,omitempty"` +} + +func New( + id idwrap.IDWrap, + name string, + gql mgraphql.GraphQL, + headers []mgraphql.GraphQLHeader, + asserts []mgraphql.GraphQLAssert, + httpClient httpclient.HttpClient, + sideRespChan chan NodeGraphQLSideResp, + logger *slog.Logger, +) *NodeGraphQL { + return &NodeGraphQL{ + FlowNodeID: id, + Name: name, + GraphQL: gql, + Headers: headers, + Asserts: asserts, + HttpClient: httpClient, + SideRespChan: sideRespChan, + logger: logger, + } +} + +func (n *NodeGraphQL) GetID() idwrap.IDWrap { + return n.FlowNodeID +} + +func (n *NodeGraphQL) SetID(id idwrap.IDWrap) { + n.FlowNodeID = id +} + +func (n *NodeGraphQL) GetName() string { + return n.Name +} + +func (n *NodeGraphQL) RunSync(ctx context.Context, req *node.FlowNodeRequest) node.FlowNodeResult { + nextID := mflow.GetNextNodeID(req.EdgeSourceMap, n.GetID(), mflow.HandleUnspecified) + result := node.FlowNodeResult{ + NextNodeID: nextID, + Err: nil, + } + + varMapCopy := node.DeepCopyVarMap(req) + + // Build unified environment for interpolation + env := expression.NewUnifiedEnv(varMapCopy) + + // Track input variable reads if tracker is available + readVars := make(map[string]any) + + // Helper to interpolate and collect reads (same pattern as HTTP REQUEST nodes) + interpolate := func(raw string) (string, error) { + if !expression.HasVars(raw) { + return raw, nil + } + result, err := env.InterpolateWithResult(raw) + if err != nil { + return "", err + } + // Collect tracked reads + for k, v := range result.ReadVars { + readVars[k] = v + } + return result.Value, nil + } + + // Interpolate URL, query, variables, and headers + var err error + url, err := interpolate(n.GraphQL.Url) + if err != nil { + result.Err = fmt.Errorf("failed to interpolate url: %w", err) + return result + } + + query, err := interpolate(n.GraphQL.Query) + if err != nil { + result.Err = fmt.Errorf("failed to interpolate query: %w", err) + return result + } + + variables, err := interpolate(n.GraphQL.Variables) + if err != nil { + result.Err = fmt.Errorf("failed to interpolate variables: %w", err) + return result + } + + // Build request body + var varsJSON json.RawMessage + if variables != "" { + // Try to parse as JSON; if invalid, use as string + if json.Valid([]byte(variables)) { + varsJSON = json.RawMessage(variables) + } else { + // Wrap as JSON string + b, _ := json.Marshal(variables) + varsJSON = b + } + } + + body := graphqlRequestBody{ + Query: query, + Variables: varsJSON, + } + bodyBytes, err := json.Marshal(body) + if err != nil { + result.Err = fmt.Errorf("failed to marshal graphql request body: %w", err) + return result + } + + // Build HTTP request + httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(bodyBytes)) + if err != nil { + result.Err = fmt.Errorf("failed to create graphql http request: %w", err) + return result + } + httpReq.Header.Set("Content-Type", "application/json") + + // Apply headers with tracking + for _, h := range n.Headers { + if h.Enabled && h.Key != "" { + key, err := interpolate(h.Key) + if err != nil { + result.Err = fmt.Errorf("failed to interpolate header key: %w", err) + return result + } + value, err := interpolate(h.Value) + if err != nil { + result.Err = fmt.Errorf("failed to interpolate header value: %w", err) + return result + } + httpReq.Header.Set(key, value) + } + } + + // Track variable reads if tracker is available (before HTTP execution) + if req.VariableTracker != nil { + for varKey, varValue := range readVars { + req.VariableTracker.TrackRead(varKey, varValue) + } + } + + if ctx.Err() != nil { + return result + } + + // Execute request + startTime := time.Now() + httpResp, err := n.HttpClient.Do(httpReq) + duration := time.Since(startTime) + if err != nil { + result.Err = fmt.Errorf("graphql request failed: %w", err) + return result + } + defer httpResp.Body.Close() + + // Read response body + respBody, err := io.ReadAll(httpResp.Body) + if err != nil { + result.Err = fmt.Errorf("failed to read graphql response body: %w", err) + return result + } + + if ctx.Err() != nil { + return result + } + + // Build response headers + respHeaderModels := make([]mgraphql.GraphQLResponseHeader, 0) + for key, values := range httpResp.Header { + for _, value := range values { + respHeaderModels = append(respHeaderModels, mgraphql.GraphQLResponseHeader{ + ID: idwrap.NewNow(), + HeaderKey: key, + HeaderValue: value, + }) + } + } + + // Build output map + var respBodyParsed any + if err := json.Unmarshal(respBody, &respBodyParsed); err != nil { + // If not valid JSON, use as string + respBodyParsed = string(respBody) + } + + requestHeaders := make(map[string]any) + for _, h := range n.Headers { + if h.Enabled && h.Key != "" { + requestHeaders[h.Key] = h.Value + } + } + + respHeaders := make(map[string]any) + for key, values := range httpResp.Header { + if len(values) == 1 { + respHeaders[key] = values[0] + } else { + anyValues := make([]any, len(values)) + for i, v := range values { + anyValues[i] = v + } + respHeaders[key] = anyValues + } + } + + outputMap := map[string]any{ + outputRequestName: map[string]any{ + "url": url, + "query": query, + "variables": variables, + "headers": requestHeaders, + }, + outputResponseName: map[string]any{ + "status": float64(httpResp.StatusCode), + "body": respBodyParsed, + "headers": respHeaders, + "duration": float64(duration.Milliseconds()), + }, + } + + // Use tracking version if tracker is available (same pattern as HTTP REQUEST nodes) + if req.VariableTracker != nil { + if err := node.WriteNodeVarBulkWithTracking(req, n.Name, outputMap, req.VariableTracker); err != nil { + result.Err = err + return result + } + } else { + if err := node.WriteNodeVarBulk(req, n.Name, outputMap); err != nil { + result.Err = err + return result + } + } + + // Create response with assertions evaluated using UnifiedEnv (same pattern as HTTP) + respCreate, err := graphqlresponse.ResponseCreateGraphQL( + ctx, + respBody, + httpResp.StatusCode, + duration, + respHeaderModels, + n.GraphQL.ID, + n.Asserts, + varMapCopy, + ) + if err != nil { + result.Err = err + return result + } + + result.AuxiliaryID = &respCreate.GraphQLResponse.ID + + // Check if any assertions failed (same pattern as HTTP) + done := make(chan struct{}) + for _, assertRes := range respCreate.ResponseAsserts { + if !assertRes.Success { + result.Err = fmt.Errorf("assertion failed: %s", assertRes.Value) + + // Still send the response data even though we're failing + n.SideRespChan <- NodeGraphQLSideResp{ + ExecutionID: req.ExecutionID, + GraphQL: n.GraphQL, + Headers: n.Headers, + Response: respCreate.GraphQLResponse, + RespHeaders: respCreate.ResponseHeaders, + RespAsserts: respCreate.ResponseAsserts, + Done: done, + } + select { + case <-done: + case <-ctx.Done(): + } + return result + } + } + + // Send through side channel for persistence + n.SideRespChan <- NodeGraphQLSideResp{ + ExecutionID: req.ExecutionID, + GraphQL: n.GraphQL, + Headers: n.Headers, + Response: respCreate.GraphQLResponse, + RespHeaders: respCreate.ResponseHeaders, + RespAsserts: respCreate.ResponseAsserts, + Done: done, + } + select { + case <-done: + case <-ctx.Done(): + } + + return result +} + +func (n *NodeGraphQL) RunAsync(ctx context.Context, req *node.FlowNodeRequest, resultChan chan node.FlowNodeResult) { + result := n.RunSync(ctx, req) + if ctx.Err() != nil { + return + } + resultChan <- result +} + +// GetRequiredVariables implements node.VariableIntrospector. +func (n *NodeGraphQL) GetRequiredVariables() []string { + var sources []string + sources = append(sources, n.GraphQL.Url, n.GraphQL.Query, n.GraphQL.Variables) + for _, h := range n.Headers { + if h.Enabled { + sources = append(sources, h.Key, h.Value) + } + } + return expression.ExtractVarKeysFromMultiple(sources...) +} + +// GetOutputVariables implements node.VariableIntrospector. +func (n *NodeGraphQL) GetOutputVariables() []string { + return []string{ + "response.status", + "response.body", + "response.headers", + "response.duration", + "request.url", + "request.query", + "request.variables", + "request.headers", + } +} diff --git a/packages/server/pkg/graphql/resolver/resolver.go b/packages/server/pkg/graphql/resolver/resolver.go new file mode 100644 index 000000000..14400be92 --- /dev/null +++ b/packages/server/pkg/graphql/resolver/resolver.go @@ -0,0 +1,127 @@ +//nolint:revive // exported +package resolver + +import ( + "context" + "sort" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/delta" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" +) + +// GraphQLResolver defines the interface for resolving GraphQL requests with their delta overlays. +type GraphQLResolver interface { + Resolve(ctx context.Context, baseID idwrap.IDWrap, deltaID *idwrap.IDWrap) (*delta.ResolveGraphQLOutput, error) +} + +// StandardResolver implements GraphQLResolver using standard DB services. +type StandardResolver struct { + graphqlService *sgraphql.Reader + graphqlHeaderService *sgraphql.GraphQLHeaderService + graphqlAssertService *sgraphql.GraphQLAssertService +} + +// NewStandardResolver creates a new instance of StandardResolver. +func NewStandardResolver( + graphqlService *sgraphql.Reader, + graphqlHeaderService *sgraphql.GraphQLHeaderService, + graphqlAssertService *sgraphql.GraphQLAssertService, +) *StandardResolver { + return &StandardResolver{ + graphqlService: graphqlService, + graphqlHeaderService: graphqlHeaderService, + graphqlAssertService: graphqlAssertService, + } +} + +// Resolve fetches base and delta components and resolves them into a final GraphQL request. +func (r *StandardResolver) Resolve(ctx context.Context, baseID idwrap.IDWrap, deltaID *idwrap.IDWrap) (*delta.ResolveGraphQLOutput, error) { + // 1. Fetch Base Components + baseGraphQL, err := r.graphqlService.Get(ctx, baseID) + if err != nil { + return nil, err + } + + baseHeaders, _ := r.graphqlHeaderService.GetByGraphQLID(ctx, baseID) + baseAsserts, _ := r.graphqlAssertService.GetByGraphQLID(ctx, baseID) + + // 2. Fetch Delta Components (if present) + var deltaGraphQL *mgraphql.GraphQL + var deltaHeaders []mgraphql.GraphQLHeader + var deltaAsserts []mgraphql.GraphQLAssert + + if deltaID != nil { + d, err := r.graphqlService.Get(ctx, *deltaID) + if err != nil { + return nil, err + } + deltaGraphQL = d + + deltaHeaders, _ = r.graphqlHeaderService.GetByGraphQLID(ctx, *deltaID) + deltaAsserts, _ = r.graphqlAssertService.GetByGraphQLID(ctx, *deltaID) + } + + // 3. Prepare Input for Delta Resolution + input := delta.ResolveGraphQLInput{ + Base: *baseGraphQL, + BaseHeaders: convertGraphQLHeaders(baseHeaders), + BaseAsserts: convertGraphQLAsserts(baseAsserts), + } + + if deltaGraphQL != nil { + input.Delta = *deltaGraphQL + input.DeltaHeaders = convertGraphQLHeaders(deltaHeaders) + input.DeltaAsserts = convertGraphQLAsserts(deltaAsserts) + } + + // 4. Resolve + output := delta.ResolveGraphQL(input) + return &output, nil +} + +// Helper functions for type conversion + +func convertGraphQLHeaders(in []mgraphql.GraphQLHeader) []mgraphql.GraphQLHeader { + if in == nil { + return []mgraphql.GraphQLHeader{} + } + out := make([]mgraphql.GraphQLHeader, len(in)) + for i, v := range in { + out[i] = mgraphql.GraphQLHeader{ + ID: v.ID, + GraphQLID: v.GraphQLID, + Key: v.Key, + Value: v.Value, + Description: v.Description, + Enabled: v.Enabled, + ParentGraphQLHeaderID: v.ParentGraphQLHeaderID, + IsDelta: v.IsDelta, + DeltaKey: v.DeltaKey, + DeltaValue: v.DeltaValue, + DeltaDescription: v.DeltaDescription, + DeltaEnabled: v.DeltaEnabled, + DisplayOrder: v.DisplayOrder, + CreatedAt: v.CreatedAt, + UpdatedAt: v.UpdatedAt, + } + } + return out +} + +// convertGraphQLAsserts converts DB model asserts (ordered by float) to mgraphql model asserts. +func convertGraphQLAsserts(in []mgraphql.GraphQLAssert) []mgraphql.GraphQLAssert { + if len(in) == 0 { + return []mgraphql.GraphQLAssert{} + } + + // Sort by DisplayOrder (DB model uses float ordering) + sorted := make([]mgraphql.GraphQLAssert, len(in)) + copy(sorted, in) + sort.Slice(sorted, func(i, j int) bool { + return sorted[i].DisplayOrder < sorted[j].DisplayOrder + }) + + return sorted +} diff --git a/packages/server/pkg/graphql/response/response.go b/packages/server/pkg/graphql/response/response.go new file mode 100644 index 000000000..3bf69bc84 --- /dev/null +++ b/packages/server/pkg/graphql/response/response.go @@ -0,0 +1,194 @@ +//nolint:revive // exported +package response + +import ( + "context" + "encoding/json" + "fmt" + "sort" + "strings" + "time" + + "connectrpc.com/connect" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/expression" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +type ResponseCreateGraphQLOutput struct { + GraphQLResponse mgraphql.GraphQLResponse + ResponseHeaders []mgraphql.GraphQLResponseHeader + ResponseAsserts []mgraphql.GraphQLResponseAssert +} + +func ResponseCreateGraphQL( + ctx context.Context, + respBody []byte, + statusCode int, + duration time.Duration, + headers []mgraphql.GraphQLResponseHeader, + graphqlID idwrap.IDWrap, + assertions []mgraphql.GraphQLAssert, + flowVars map[string]any, +) (*ResponseCreateGraphQLOutput, error) { + responseID := idwrap.NewNow() + now := time.Now().Unix() + + // Create response model + graphqlResponse := mgraphql.GraphQLResponse{ + ID: responseID, + GraphQLID: graphqlID, + Status: int32(statusCode), + Body: respBody, + Time: now, + Duration: int32(duration.Milliseconds()), + Size: int32(len(respBody)), + CreatedAt: now, + } + + // Set response ID on headers + responseHeaders := make([]mgraphql.GraphQLResponseHeader, len(headers)) + for i, h := range headers { + responseHeaders[i] = h + responseHeaders[i].ResponseID = responseID + responseHeaders[i].CreatedAt = now + } + + // Parse response body as JSON (similar to HTTP) + var respBodyParsed any + if err := json.Unmarshal(respBody, &respBodyParsed); err != nil { + respBodyParsed = string(respBody) + } + + // Build response variable (similar to HTTP's ConvertResponseToVar) + responseVar := map[string]any{ + "status": float64(statusCode), + "body": respBodyParsed, + "headers": convertHeadersToMap(headers), + "duration": float64(duration.Milliseconds()), + } + + // Build unified environment with flowVars and response binding + // For GraphQL, also extract "data" and "errors" fields to top level for easier access + evalEnvMap := buildAssertionEnv(flowVars, responseVar, respBodyParsed) + env := expression.NewUnifiedEnv(evalEnvMap) + + responseAsserts := make([]mgraphql.GraphQLResponseAssert, 0) + + // Evaluate assertions (SAME pattern as HTTP) + for _, assertion := range assertions { + if assertion.Enabled { + expr := assertion.Value + + // Skip assertions with empty expressions + if strings.TrimSpace(expr) == "" { + continue + } + + // If expression contains {{ }}, interpolate first + evaluatedExpr := expr + if expression.HasVars(expr) { + interpolated, err := env.Interpolate(expr) + if err != nil { + return nil, err + } + evaluatedExpr = interpolated + } + + // Evaluate as boolean expression + ok, err := env.EvalBool(ctx, evaluatedExpr) + if err != nil { + annotatedErr := annotateUnknownNameError(err, evalEnvMap) + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("expression %q failed: %w", evaluatedExpr, annotatedErr)) + } + + responseAsserts = append(responseAsserts, mgraphql.GraphQLResponseAssert{ + ID: idwrap.NewNow(), + ResponseID: responseID, + Value: evaluatedExpr, + Success: ok, + CreatedAt: now, + }) + } + } + + return &ResponseCreateGraphQLOutput{ + GraphQLResponse: graphqlResponse, + ResponseHeaders: responseHeaders, + ResponseAsserts: responseAsserts, + }, nil +} + +func buildAssertionEnv(flowVars map[string]any, responseBinding map[string]any, respBodyParsed any) map[string]any { + env := make(map[string]any) + + // Add flow variables first + for k, v := range flowVars { + env[k] = v + } + + // Add response binding for backward compatibility + env["response"] = responseBinding + + // Extract GraphQL-specific fields from response body (matching GraphQL tab behavior) + var data any + var errors any + if bodyMap, ok := respBodyParsed.(map[string]any); ok { + if d, hasData := bodyMap["data"]; hasData { + data = d + } + if e, hasErrors := bodyMap["errors"]; hasErrors { + errors = e + } + } + + // Add GraphQL-specific fields at top level for easier access (matching GraphQL tab behavior) + // This allows assertions like: data.users[0].id == "1" + env["data"] = data + env["errors"] = errors + + return env +} + +func convertHeadersToMap(headers []mgraphql.GraphQLResponseHeader) map[string]any { + headersMap := make(map[string]any) + for _, h := range headers { + if existing, ok := headersMap[h.HeaderKey]; ok { + // Multiple values for same key - convert to array + if arr, isArr := existing.([]any); isArr { + headersMap[h.HeaderKey] = append(arr, h.HeaderValue) + } else { + headersMap[h.HeaderKey] = []any{existing, h.HeaderValue} + } + } else { + headersMap[h.HeaderKey] = h.HeaderValue + } + } + return headersMap +} + +func annotateUnknownNameError(err error, env map[string]any) error { + if err == nil { + return nil + } + lower := strings.ToLower(err.Error()) + if strings.Contains(lower, "unknown name") { + keys := collectEnvKeys(env) + if len(keys) > 0 { + return fmt.Errorf("%w (available variables: %s)", err, strings.Join(keys, ", ")) + } + } + return err +} + +func collectEnvKeys(env map[string]any) []string { + if len(env) == 0 { + return nil + } + keys := make([]string, 0, len(env)) + for k := range env { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} diff --git a/packages/server/pkg/ioworkspace/exporter.go b/packages/server/pkg/ioworkspace/exporter.go index 50fc8d8ac..56ca4c860 100644 --- a/packages/server/pkg/ioworkspace/exporter.go +++ b/packages/server/pkg/ioworkspace/exporter.go @@ -13,6 +13,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sfile" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" ) @@ -53,6 +54,10 @@ func (s *IOWorkspaceService) Export(ctx context.Context, opts ExportOptions) (*W if err := s.exportHTTP(ctx, opts, bundle); err != nil { return nil, fmt.Errorf("failed to export HTTP requests: %w", err) } + + if err := s.exportGraphQL(ctx, opts, bundle); err != nil { + return nil, fmt.Errorf("failed to export GraphQL requests: %w", err) + } } // Export flows if requested @@ -222,6 +227,7 @@ func (s *IOWorkspaceService) exportFlows(ctx context.Context, opts ExportOptions nodeAIService := sflow.NewNodeAIService(s.queries) nodeAIProviderService := sflow.NewNodeAiProviderService(s.queries) nodeMemoryService := sflow.NewNodeMemoryService(s.queries) + nodeGraphQLService := sflow.NewNodeGraphQLService(s.queries) var flowIDs []idwrap.IDWrap @@ -275,7 +281,7 @@ func (s *IOWorkspaceService) exportFlows(ctx context.Context, opts ExportOptions // Export node implementations based on node types for _, node := range nodes { - if err := s.exportNodeImplementation(ctx, node, bundle, nodeRequestService, nodeIfService, nodeForService, nodeForEachService, nodeJSService, nodeAIService, nodeAIProviderService, nodeMemoryService); err != nil { + if err := s.exportNodeImplementation(ctx, node, bundle, nodeRequestService, nodeIfService, nodeForService, nodeForEachService, nodeJSService, nodeAIService, nodeAIProviderService, nodeMemoryService, nodeGraphQLService); err != nil { return fmt.Errorf("failed to export node implementation for node %s: %w", node.ID.String(), err) } } @@ -292,7 +298,34 @@ func (s *IOWorkspaceService) exportFlows(ctx context.Context, opts ExportOptions "js_nodes", len(bundle.FlowJSNodes), "ai_nodes", len(bundle.FlowAINodes), "ai_provider_nodes", len(bundle.FlowAIProviderNodes), - "ai_memory_nodes", len(bundle.FlowAIMemoryNodes)) + "ai_memory_nodes", len(bundle.FlowAIMemoryNodes), + "graphql_nodes", len(bundle.FlowGraphQLNodes)) + + return nil +} + +// exportGraphQL exports GraphQL requests and their headers +func (s *IOWorkspaceService) exportGraphQL(ctx context.Context, opts ExportOptions, bundle *WorkspaceBundle) error { + graphqlService := sgraphql.New(s.queries, s.logger) + graphqlHeaderService := sgraphql.NewGraphQLHeaderService(s.queries) + + gqlRequests, err := graphqlService.GetByWorkspaceID(ctx, opts.WorkspaceID) + if err != nil { + return fmt.Errorf("failed to get GraphQL requests: %w", err) + } + bundle.GraphQLRequests = gqlRequests + + for _, gql := range gqlRequests { + headers, err := graphqlHeaderService.GetByGraphQLID(ctx, gql.ID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { + return fmt.Errorf("failed to get headers for GraphQL %s: %w", gql.ID.String(), err) + } + bundle.GraphQLHeaders = append(bundle.GraphQLHeaders, headers...) + } + + s.logger.DebugContext(ctx, "Exported GraphQL requests", + "count", len(bundle.GraphQLRequests), + "headers", len(bundle.GraphQLHeaders)) return nil } @@ -310,6 +343,7 @@ func (s *IOWorkspaceService) exportNodeImplementation( nodeAIService sflow.NodeAIService, nodeAIProviderService sflow.NodeAiProviderService, nodeMemoryService sflow.NodeMemoryService, + nodeGraphQLService sflow.NodeGraphQLService, ) error { switch node.NodeKind { case mflow.NODE_KIND_REQUEST: @@ -383,6 +417,15 @@ func (s *IOWorkspaceService) exportNodeImplementation( if nodeMemory != nil { bundle.FlowAIMemoryNodes = append(bundle.FlowAIMemoryNodes, *nodeMemory) } + + case mflow.NODE_KIND_GRAPHQL: + nodeGraphQL, err := nodeGraphQLService.GetNodeGraphQL(ctx, node.ID) + if err != nil { + return fmt.Errorf("failed to get graphql node: %w", err) + } + if nodeGraphQL != nil { + bundle.FlowGraphQLNodes = append(bundle.FlowGraphQLNodes, *nodeGraphQL) + } } return nil diff --git a/packages/server/pkg/ioworkspace/importer.go b/packages/server/pkg/ioworkspace/importer.go index e8cba2e3a..0f2e94490 100644 --- a/packages/server/pkg/ioworkspace/importer.go +++ b/packages/server/pkg/ioworkspace/importer.go @@ -9,6 +9,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sfile" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" ) @@ -35,6 +36,9 @@ type ImportResult struct { FlowAINodesCreated int FlowAIProviderNodesCreated int FlowAIMemoryNodesCreated int + FlowGraphQLNodesCreated int + GraphQLRequestsCreated int + GraphQLHeadersCreated int EnvironmentsCreated int EnvironmentVarsCreated int @@ -85,6 +89,10 @@ func (s *IOWorkspaceService) Import(ctx context.Context, tx *sql.Tx, bundle *Wor nodeAIService := sflow.NewNodeAIService(s.queries).TX(tx) nodeAIProviderService := sflow.NewNodeAiProviderService(s.queries).TX(tx) nodeMemoryService := sflow.NewNodeMemoryService(s.queries).TX(tx) + nodeGraphQLService := sflow.NewNodeGraphQLService(s.queries).TX(tx) + + graphqlService := sgraphql.New(s.queries, nil).TX(tx) + graphqlHeaderService := sgraphql.NewGraphQLHeaderService(s.queries).TX(tx) fileService := sfile.New(s.queries, nil).TX(tx) envService := senv.NewEnvironmentService(s.queries, nil).TX(tx) @@ -104,6 +112,12 @@ func (s *IOWorkspaceService) Import(ctx context.Context, tx *sql.Tx, bundle *Wor } } + if opts.ImportHTTP && len(bundle.GraphQLRequests) > 0 { + if err := s.importGraphQLRequests(ctx, graphqlService, bundle, opts, result); err != nil { + return nil, fmt.Errorf("failed to import GraphQL requests: %w", err) + } + } + if opts.CreateFiles && len(bundle.Files) > 0 { if err := s.importFiles(ctx, fileService, bundle, opts, result); err != nil { return nil, fmt.Errorf("failed to import files: %w", err) @@ -131,6 +145,12 @@ func (s *IOWorkspaceService) Import(ctx context.Context, tx *sql.Tx, bundle *Wor } } + if opts.ImportHTTP && len(bundle.GraphQLHeaders) > 0 { + if err := s.importGraphQLHeaders(ctx, graphqlHeaderService, bundle, opts, result); err != nil { + return nil, fmt.Errorf("failed to import GraphQL headers: %w", err) + } + } + if opts.ImportHTTP { if len(bundle.HTTPHeaders) > 0 { if err := s.importHTTPHeaders(ctx, httpHeaderService, bundle, opts, result); err != nil { @@ -231,6 +251,12 @@ func (s *IOWorkspaceService) Import(ctx context.Context, tx *sql.Tx, bundle *Wor return nil, fmt.Errorf("failed to import flow AI memory nodes: %w", err) } } + + if len(bundle.FlowGraphQLNodes) > 0 { + if err := s.importFlowGraphQLNodes(ctx, nodeGraphQLService, bundle, opts, result); err != nil { + return nil, fmt.Errorf("failed to import flow GraphQL nodes: %w", err) + } + } } return result, nil diff --git a/packages/server/pkg/ioworkspace/importer_flow.go b/packages/server/pkg/ioworkspace/importer_flow.go index 12cec53d8..5cdf45834 100644 --- a/packages/server/pkg/ioworkspace/importer_flow.go +++ b/packages/server/pkg/ioworkspace/importer_flow.go @@ -7,6 +7,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" ) // importFlows imports flows from the bundle. @@ -279,3 +280,60 @@ func (s *IOWorkspaceService) importFlowAIMemoryNodes(ctx context.Context, nodeMe } return nil } + +// importGraphQLRequests imports GraphQL requests from the bundle. +func (s *IOWorkspaceService) importGraphQLRequests(ctx context.Context, graphqlService sgraphql.GraphQLService, bundle *WorkspaceBundle, opts ImportOptions, result *ImportResult) error { + for _, gql := range bundle.GraphQLRequests { + // Generate new ID if not preserving + if !opts.PreserveIDs { + gql.ID = idwrap.NewNow() + } + + // Update workspace ID + gql.WorkspaceID = opts.WorkspaceID + + // Create GraphQL request + if err := graphqlService.Create(ctx, &gql); err != nil { + return fmt.Errorf("failed to create GraphQL request %s: %w", gql.Name, err) + } + + result.GraphQLRequestsCreated++ + } + return nil +} + +// importGraphQLHeaders imports GraphQL headers from the bundle. +func (s *IOWorkspaceService) importGraphQLHeaders(ctx context.Context, graphqlHeaderService sgraphql.GraphQLHeaderService, bundle *WorkspaceBundle, opts ImportOptions, result *ImportResult) error { + for _, header := range bundle.GraphQLHeaders { + // Generate new ID if not preserving + if !opts.PreserveIDs { + header.ID = idwrap.NewNow() + } + + // Create header + if err := graphqlHeaderService.Create(ctx, &header); err != nil { + return fmt.Errorf("failed to create GraphQL header: %w", err) + } + + result.GraphQLHeadersCreated++ + } + return nil +} + +// importFlowGraphQLNodes imports flow GraphQL nodes from the bundle. +func (s *IOWorkspaceService) importFlowGraphQLNodes(ctx context.Context, nodeGraphQLService sflow.NodeGraphQLService, bundle *WorkspaceBundle, opts ImportOptions, result *ImportResult) error { + for _, gqlNode := range bundle.FlowGraphQLNodes { + // Remap flow node ID + if newNodeID, ok := result.NodeIDMap[gqlNode.FlowNodeID]; ok { + gqlNode.FlowNodeID = newNodeID + } + + // Create GraphQL node + if err := nodeGraphQLService.CreateNodeGraphQL(ctx, gqlNode); err != nil { + return fmt.Errorf("failed to create flow GraphQL node: %w", err) + } + + result.FlowGraphQLNodesCreated++ + } + return nil +} diff --git a/packages/server/pkg/ioworkspace/types.go b/packages/server/pkg/ioworkspace/types.go index 39190e661..fa2f8b8aa 100644 --- a/packages/server/pkg/ioworkspace/types.go +++ b/packages/server/pkg/ioworkspace/types.go @@ -6,6 +6,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/menv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mfile" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mworkspace" ) @@ -18,7 +19,7 @@ type WorkspaceBundle struct { Workspace mworkspace.Workspace // HTTP requests and associated data structures - HTTPRequests []mhttp.HTTP + HTTPRequests []mhttp.HTTP HTTPSearchParams []mhttp.HTTPSearchParam HTTPHeaders []mhttp.HTTPHeader HTTPBodyForms []mhttp.HTTPBodyForm @@ -26,6 +27,10 @@ type WorkspaceBundle struct { HTTPBodyRaw []mhttp.HTTPBodyRaw HTTPAsserts []mhttp.HTTPAssert + // GraphQL requests and associated data + GraphQLRequests []mgraphql.GraphQL + GraphQLHeaders []mgraphql.GraphQLHeader + // File organization Files []mfile.File @@ -44,6 +49,7 @@ type WorkspaceBundle struct { FlowAINodes []mflow.NodeAI FlowAIProviderNodes []mflow.NodeAiProvider FlowAIMemoryNodes []mflow.NodeMemory + FlowGraphQLNodes []mflow.NodeGraphQL // Environments and variables Environments []menv.Env @@ -64,6 +70,8 @@ func (wb *WorkspaceBundle) CountEntities() map[string]int { "http_body_urlencoded": len(wb.HTTPBodyUrlencoded), "http_body_raw": len(wb.HTTPBodyRaw), "http_asserts": len(wb.HTTPAsserts), + "graphql_requests": len(wb.GraphQLRequests), + "graphql_headers": len(wb.GraphQLHeaders), "files": len(wb.Files), "flows": len(wb.Flows), "flow_variables": len(wb.FlowVariables), @@ -76,8 +84,9 @@ func (wb *WorkspaceBundle) CountEntities() map[string]int { "flow_js_nodes": len(wb.FlowJSNodes), "flow_ai_nodes": len(wb.FlowAINodes), "flow_ai_provider_nodes": len(wb.FlowAIProviderNodes), - "flow_ai_memory_nodes": len(wb.FlowAIMemoryNodes), - "environments": len(wb.Environments), + "flow_ai_memory_nodes": len(wb.FlowAIMemoryNodes), + "flow_graphql_nodes": len(wb.FlowGraphQLNodes), + "environments": len(wb.Environments), "environment_vars": len(wb.EnvironmentVars), "credentials": len(wb.Credentials), } @@ -94,6 +103,17 @@ func (wb *WorkspaceBundle) GetHTTPByID(id idwrap.IDWrap) *mhttp.HTTP { return nil } +// GetGraphQLByID finds and returns a GraphQL request by its ID. +// Returns nil if the GraphQL request is not found. +func (wb *WorkspaceBundle) GetGraphQLByID(id idwrap.IDWrap) *mgraphql.GraphQL { + for i := range wb.GraphQLRequests { + if wb.GraphQLRequests[i].ID.Compare(id) == 0 { + return &wb.GraphQLRequests[i] + } + } + return nil +} + // GetFlowByID finds and returns a flow by its ID. // Returns nil if the flow is not found. func (wb *WorkspaceBundle) GetFlowByID(id idwrap.IDWrap) *mflow.Flow { diff --git a/packages/server/pkg/model/mfile/mfile.go b/packages/server/pkg/model/mfile/mfile.go index 6606de978..5d0920615 100644 --- a/packages/server/pkg/model/mfile/mfile.go +++ b/packages/server/pkg/model/mfile/mfile.go @@ -18,6 +18,7 @@ const ( ContentTypeHTTPDelta ContentType = 2 // http delta (draft/overlay) ContentTypeFlow ContentType = 3 // flow ContentTypeCredential ContentType = 4 // credential + ContentTypeGraphQL ContentType = 5 // graphql ) // String returns the string representation of ContentType @@ -33,6 +34,8 @@ func (ct ContentType) String() string { return "http_delta" case ContentTypeCredential: return "credential" + case ContentTypeGraphQL: + return "graphql" default: return "unknown" } @@ -87,6 +90,11 @@ func (f File) IsCredential() bool { return f.ContentType == ContentTypeCredential } +// IsGraphQL returns true if the file contains a GraphQL request +func (f File) IsGraphQL() bool { + return f.ContentType == ContentTypeGraphQL +} + // IsRoot returns true if the file has no parent folder func (f File) IsRoot() bool { return f.ParentID == nil @@ -130,6 +138,8 @@ func ContentTypeFromString(s string) ContentType { return ContentTypeHTTPDelta case "credential": return ContentTypeCredential + case "graphql": + return ContentTypeGraphQL default: return ContentTypeUnknown } @@ -137,7 +147,7 @@ func ContentTypeFromString(s string) ContentType { // IsValidContentType checks if the content type is valid func IsValidContentType(kind ContentType) bool { - return kind == ContentTypeFolder || kind == ContentTypeFlow || kind == ContentTypeHTTP || kind == ContentTypeHTTPDelta || kind == ContentTypeCredential + return kind == ContentTypeFolder || kind == ContentTypeFlow || kind == ContentTypeHTTP || kind == ContentTypeHTTPDelta || kind == ContentTypeCredential || kind == ContentTypeGraphQL } // IDEquals checks if two IDWrap values are equal diff --git a/packages/server/pkg/model/mflow/execution.go b/packages/server/pkg/model/mflow/execution.go index ac68768fc..0c52d567b 100644 --- a/packages/server/pkg/model/mflow/execution.go +++ b/packages/server/pkg/model/mflow/execution.go @@ -18,6 +18,7 @@ type NodeExecution struct { OutputData []byte `json:"output_data,omitempty"` OutputDataCompressType int8 `json:"output_data_compress_type"` ResponseID *idwrap.IDWrap `json:"response_id,omitempty"` + GraphQLResponseID *idwrap.IDWrap `json:"graphql_response_id,omitempty"` CompletedAt *int64 `json:"completed_at,omitempty"` } diff --git a/packages/server/pkg/model/mflow/node.go b/packages/server/pkg/model/mflow/node.go index b964bd8c1..8218e74ae 100644 --- a/packages/server/pkg/model/mflow/node.go +++ b/packages/server/pkg/model/mflow/node.go @@ -18,6 +18,7 @@ const ( NODE_KIND_AI NodeKind = 7 NODE_KIND_AI_PROVIDER NodeKind = 8 NODE_KIND_AI_MEMORY NodeKind = 9 + NODE_KIND_GRAPHQL NodeKind = 10 ) type NodeState = int8 diff --git a/packages/server/pkg/model/mflow/node_types.go b/packages/server/pkg/model/mflow/node_types.go index c5e3ac740..747306b76 100644 --- a/packages/server/pkg/model/mflow/node_types.go +++ b/packages/server/pkg/model/mflow/node_types.go @@ -252,3 +252,11 @@ type NodeMemory struct { MemoryType AiMemoryType WindowSize int32 } + +// --- GraphQL Node --- + +type NodeGraphQL struct { + FlowNodeID idwrap.IDWrap + GraphQLID *idwrap.IDWrap + DeltaGraphQLID *idwrap.IDWrap +} diff --git a/packages/server/pkg/model/mgraphql/mgraphql.go b/packages/server/pkg/model/mgraphql/mgraphql.go new file mode 100644 index 000000000..400cea565 --- /dev/null +++ b/packages/server/pkg/model/mgraphql/mgraphql.go @@ -0,0 +1,104 @@ +package mgraphql + +import ( + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" +) + +type GraphQL struct { + ID idwrap.IDWrap `json:"id"` + WorkspaceID idwrap.IDWrap `json:"workspace_id"` + FolderID *idwrap.IDWrap `json:"folder_id,omitempty"` + Name string `json:"name"` + Url string `json:"url"` + Query string `json:"query"` + Variables string `json:"variables"` + Description string `json:"description"` + ParentGraphQLID *idwrap.IDWrap `json:"parent_graphql_id,omitempty"` + IsDelta bool `json:"is_delta"` + IsSnapshot bool `json:"is_snapshot"` + DeltaName *string `json:"delta_name,omitempty"` + DeltaUrl *string `json:"delta_url,omitempty"` + DeltaQuery *string `json:"delta_query,omitempty"` + DeltaVariables *string `json:"delta_variables,omitempty"` + DeltaDescription *string `json:"delta_description,omitempty"` + LastRunAt *int64 `json:"last_run_at,omitempty"` + CreatedAt int64 `json:"created_at"` + UpdatedAt int64 `json:"updated_at"` +} + +type GraphQLHeader struct { + ID idwrap.IDWrap `json:"id"` + GraphQLID idwrap.IDWrap `json:"graphql_id"` + Key string `json:"key"` + Value string `json:"value"` + Enabled bool `json:"enabled"` + Description string `json:"description"` + DisplayOrder float32 `json:"order"` + ParentGraphQLHeaderID *idwrap.IDWrap `json:"parent_graphql_header_id,omitempty"` + IsDelta bool `json:"is_delta"` + DeltaKey *string `json:"delta_key,omitempty"` + DeltaValue *string `json:"delta_value,omitempty"` + DeltaEnabled *bool `json:"delta_enabled,omitempty"` + DeltaDescription *string `json:"delta_description,omitempty"` + DeltaDisplayOrder *float32 `json:"delta_order,omitempty"` + CreatedAt int64 `json:"created_at"` + UpdatedAt int64 `json:"updated_at"` +} + +type GraphQLAssert struct { + ID idwrap.IDWrap `json:"id"` + GraphQLID idwrap.IDWrap `json:"graphql_id"` + Value string `json:"value"` + Enabled bool `json:"enabled"` + Description string `json:"description"` + DisplayOrder float32 `json:"order"` + ParentGraphQLAssertID *idwrap.IDWrap `json:"parent_graphql_assert_id,omitempty"` + IsDelta bool `json:"is_delta"` + DeltaValue *string `json:"delta_value,omitempty"` + DeltaEnabled *bool `json:"delta_enabled,omitempty"` + DeltaDescription *string `json:"delta_description,omitempty"` + DeltaDisplayOrder *float32 `json:"delta_order,omitempty"` + CreatedAt int64 `json:"created_at"` + UpdatedAt int64 `json:"updated_at"` +} + +func (a GraphQLAssert) IsEnabled() bool { + return a.Enabled +} + +type GraphQLResponse struct { + ID idwrap.IDWrap `json:"id"` + GraphQLID idwrap.IDWrap `json:"graphql_id"` + Status int32 `json:"status"` + Body []byte `json:"body"` + Time int64 `json:"time"` + Duration int32 `json:"duration"` + Size int32 `json:"size"` + CreatedAt int64 `json:"created_at"` +} + +type GraphQLResponseHeader struct { + ID idwrap.IDWrap `json:"id"` + ResponseID idwrap.IDWrap `json:"response_id"` + HeaderKey string `json:"header_key"` + HeaderValue string `json:"header_value"` + CreatedAt int64 `json:"created_at"` +} + +type GraphQLResponseAssert struct { + ID idwrap.IDWrap `json:"id"` + ResponseID idwrap.IDWrap `json:"response_id"` + Value string `json:"value"` + Success bool `json:"success"` + CreatedAt int64 `json:"created_at"` +} + +type GraphQLVersion struct { + ID idwrap.IDWrap `json:"id"` + GraphQLID idwrap.IDWrap `json:"graphql_id"` + VersionName string `json:"version_name"` + VersionDescription string `json:"version_description"` + IsActive bool `json:"is_active"` + CreatedAt int64 `json:"created_at"` + CreatedBy *idwrap.IDWrap `json:"created_by,omitempty"` +} diff --git a/packages/server/pkg/mutation/delete_file.go b/packages/server/pkg/mutation/delete_file.go index e6a93351a..7d5c7819b 100644 --- a/packages/server/pkg/mutation/delete_file.go +++ b/packages/server/pkg/mutation/delete_file.go @@ -51,6 +51,14 @@ func (c *Context) DeleteFile(ctx context.Context, file FileDeleteItem) error { if err := c.q.DeleteCredential(ctx, *file.ContentID); err != nil { return err } + case mfile.ContentTypeGraphQL: + // GraphQL - cascade to headers + if err := c.DeleteGraphQL(ctx, GraphQLDeleteItem{ + ID: *file.ContentID, + WorkspaceID: file.WorkspaceID, + }); err != nil { + return err + } case mfile.ContentTypeFolder: // Content deletion handled by recursion above (folders don't have separate content tables) } @@ -87,6 +95,7 @@ func (c *Context) DeleteFileBatch(ctx context.Context, items []FileDeleteItem) e // Group by content type for efficient batch deletion of LEAF content var httpItems []HTTPDeleteItem var flowItems []FlowDeleteItem + var graphqlItems []GraphQLDeleteItem for _, item := range items { if item.ContentID != nil { @@ -114,6 +123,11 @@ func (c *Context) DeleteFileBatch(ctx context.Context, items []FileDeleteItem) e if err := c.q.DeleteCredential(ctx, *item.ContentID); err != nil { return err } + case mfile.ContentTypeGraphQL: + graphqlItems = append(graphqlItems, GraphQLDeleteItem{ + ID: *item.ContentID, + WorkspaceID: item.WorkspaceID, + }) } } } @@ -132,6 +146,13 @@ func (c *Context) DeleteFileBatch(ctx context.Context, items []FileDeleteItem) e } } + // Delete GraphQL content batch + if len(graphqlItems) > 0 { + if err := c.DeleteGraphQLBatch(ctx, graphqlItems); err != nil { + return err + } + } + // Track file deletes and delete file records for _, item := range items { c.track(Event{ diff --git a/packages/server/pkg/mutation/delete_graphql.go b/packages/server/pkg/mutation/delete_graphql.go new file mode 100644 index 000000000..5641cdae0 --- /dev/null +++ b/packages/server/pkg/mutation/delete_graphql.go @@ -0,0 +1,77 @@ +package mutation + +import ( + "context" + "database/sql" + "errors" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" +) + +// GraphQLDeleteItem represents a GraphQL entry to delete. +type GraphQLDeleteItem struct { + ID idwrap.IDWrap + WorkspaceID idwrap.IDWrap +} + +// DeleteGraphQL deletes a GraphQL entry and tracks cascade events. +func (c *Context) DeleteGraphQL(ctx context.Context, item GraphQLDeleteItem) error { + // Collect children before delete + c.collectGraphQLChildren(ctx, item.ID, item.WorkspaceID) + + // Track parent delete + c.track(Event{ + Entity: EntityGraphQL, + Op: OpDelete, + ID: item.ID, + WorkspaceID: item.WorkspaceID, + }) + + // Delete - DB CASCADE handles actual child deletion + err := c.q.DeleteGraphQL(ctx, item.ID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { + return err + } + return nil +} + +// DeleteGraphQLBatch deletes multiple GraphQL entries. +func (c *Context) DeleteGraphQLBatch(ctx context.Context, items []GraphQLDeleteItem) error { + for _, item := range items { + if err := c.DeleteGraphQL(ctx, item); err != nil { + return err + } + } + return nil +} + +// collectGraphQLChildren collects cascade events for a single GraphQL entry. +func (c *Context) collectGraphQLChildren(ctx context.Context, graphqlID, workspaceID idwrap.IDWrap) { + // Headers - cascaded by DB FK + if headers, err := c.q.GetGraphQLHeaders(ctx, graphqlID); err == nil { + for i := range headers { + c.track(Event{ + Entity: EntityGraphQLHeader, + Op: OpDelete, + ID: headers[i].ID, + ParentID: graphqlID, + WorkspaceID: workspaceID, + }) + } + } + + // Asserts - cascaded by DB FK + if asserts, err := c.q.GetGraphQLAssertsByGraphQLID(ctx, graphqlID.Bytes()); err == nil { + for i := range asserts { + id, _ := idwrap.NewFromBytes(asserts[i].ID) + c.track(Event{ + Entity: EntityGraphQLAssert, + Op: OpDelete, + ID: id, + ParentID: graphqlID, + WorkspaceID: workspaceID, + IsDelta: asserts[i].IsDelta, + }) + } + } +} diff --git a/packages/server/pkg/mutation/event.go b/packages/server/pkg/mutation/event.go index 8c791db60..a89acfe1c 100644 --- a/packages/server/pkg/mutation/event.go +++ b/packages/server/pkg/mutation/event.go @@ -38,6 +38,7 @@ const ( EntityFlowNodeAI EntityFlowNodeAiProvider EntityFlowNodeMemory + EntityFlowNodeGraphQL EntityFlowEdge EntityFlowVariable EntityFlowTag @@ -47,6 +48,14 @@ const ( // Credential entities EntityCredential + + // GraphQL entities + EntityGraphQL + EntityGraphQLHeader + EntityGraphQLAssert + EntityGraphQLResponse + EntityGraphQLResponseHeader + EntityGraphQLResponseAssert ) // Operation identifies the type of mutation. diff --git a/packages/server/pkg/mutation/insert_graphql.go b/packages/server/pkg/mutation/insert_graphql.go new file mode 100644 index 000000000..ae6cb3a1c --- /dev/null +++ b/packages/server/pkg/mutation/insert_graphql.go @@ -0,0 +1,80 @@ +package mutation + +import ( + "context" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" +) + +// GraphQLInsertItem represents a GraphQL entry to insert. +type GraphQLInsertItem struct { + GraphQL *mgraphql.GraphQL + WorkspaceID idwrap.IDWrap +} + +// InsertGraphQL inserts a GraphQL entry and tracks the event. +func (c *Context) InsertGraphQL(ctx context.Context, item GraphQLInsertItem) error { + writer := sgraphql.NewWriterFromQueries(c.q) + + if err := writer.Create(ctx, item.GraphQL); err != nil { + return err + } + + c.track(Event{ + Entity: EntityGraphQL, + Op: OpInsert, + ID: item.GraphQL.ID, + WorkspaceID: item.WorkspaceID, + Payload: item.GraphQL, + }) + + return nil +} + +// InsertGraphQLBatch inserts multiple GraphQL entries. +func (c *Context) InsertGraphQLBatch(ctx context.Context, items []GraphQLInsertItem) error { + for _, item := range items { + if err := c.InsertGraphQL(ctx, item); err != nil { + return err + } + } + return nil +} + +// GraphQLAssertInsertItem represents a GraphQL assert to insert. +type GraphQLAssertInsertItem struct { + ID idwrap.IDWrap + GraphQLID idwrap.IDWrap + WorkspaceID idwrap.IDWrap + IsDelta bool + Params gen.CreateGraphQLAssertParams +} + +// InsertGraphQLAssert inserts a GraphQL assert and tracks the event. +func (c *Context) InsertGraphQLAssert(ctx context.Context, item GraphQLAssertInsertItem) error { + if err := c.q.CreateGraphQLAssert(ctx, item.Params); err != nil { + return err + } + c.track(Event{ + Entity: EntityGraphQLAssert, + Op: OpInsert, + ID: item.ID, + WorkspaceID: item.WorkspaceID, + ParentID: item.GraphQLID, + IsDelta: item.IsDelta, + }) + return nil +} + +// InsertGraphQLAssertBatch inserts multiple GraphQL asserts. +func (c *Context) InsertGraphQLAssertBatch(ctx context.Context, items []GraphQLAssertInsertItem) error { + for _, item := range items { + if err := c.InsertGraphQLAssert(ctx, item); err != nil { + return err + } + } + return nil +} diff --git a/packages/server/pkg/mutation/update_graphql.go b/packages/server/pkg/mutation/update_graphql.go new file mode 100644 index 000000000..73aaf90c4 --- /dev/null +++ b/packages/server/pkg/mutation/update_graphql.go @@ -0,0 +1,120 @@ +package mutation + +import ( + "context" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/patch" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" +) + +// GraphQLUpdateItem represents a GraphQL entry to update. +type GraphQLUpdateItem struct { + GraphQL *mgraphql.GraphQL + WorkspaceID idwrap.IDWrap +} + +// UpdateGraphQL updates a GraphQL entry and tracks the event. +func (c *Context) UpdateGraphQL(ctx context.Context, item GraphQLUpdateItem) error { + writer := sgraphql.NewWriterFromQueries(c.q) + + if err := writer.Update(ctx, item.GraphQL); err != nil { + return err + } + + c.track(Event{ + Entity: EntityGraphQL, + Op: OpUpdate, + ID: item.GraphQL.ID, + WorkspaceID: item.WorkspaceID, + Payload: item.GraphQL, + }) + + return nil +} + +// UpdateGraphQLBatch updates multiple GraphQL entries. +func (c *Context) UpdateGraphQLBatch(ctx context.Context, items []GraphQLUpdateItem) error { + for _, item := range items { + if err := c.UpdateGraphQL(ctx, item); err != nil { + return err + } + } + return nil +} + +// GraphQLAssertUpdateItem represents a GraphQL assert to update. +type GraphQLAssertUpdateItem struct { + ID idwrap.IDWrap + GraphQLID idwrap.IDWrap + WorkspaceID idwrap.IDWrap + IsDelta bool + Params gen.UpdateGraphQLAssertParams + Patch patch.GraphQLAssertPatch +} + +// UpdateGraphQLAssert updates a GraphQL assert and tracks the event. +func (c *Context) UpdateGraphQLAssert(ctx context.Context, item GraphQLAssertUpdateItem) error { + if err := c.q.UpdateGraphQLAssert(ctx, item.Params); err != nil { + return err + } + c.track(Event{ + Entity: EntityGraphQLAssert, + Op: OpUpdate, + ID: item.ID, + WorkspaceID: item.WorkspaceID, + ParentID: item.GraphQLID, + IsDelta: item.IsDelta, + Patch: item.Patch, + }) + return nil +} + +// UpdateGraphQLAssertBatch updates multiple GraphQL asserts. +func (c *Context) UpdateGraphQLAssertBatch(ctx context.Context, items []GraphQLAssertUpdateItem) error { + for _, item := range items { + if err := c.UpdateGraphQLAssert(ctx, item); err != nil { + return err + } + } + return nil +} + +// GraphQLAssertDeltaUpdateItem represents a GraphQL assert delta to update. +type GraphQLAssertDeltaUpdateItem struct { + ID idwrap.IDWrap + GraphQLID idwrap.IDWrap + WorkspaceID idwrap.IDWrap + Params gen.UpdateGraphQLAssertDeltaParams + Patch any + Payload any +} + +// UpdateGraphQLAssertDelta updates a GraphQL assert delta and tracks the event. +func (c *Context) UpdateGraphQLAssertDelta(ctx context.Context, item GraphQLAssertDeltaUpdateItem) error { + if err := c.q.UpdateGraphQLAssertDelta(ctx, item.Params); err != nil { + return err + } + c.track(Event{ + Entity: EntityGraphQLAssert, + Op: OpUpdate, + ID: item.ID, + WorkspaceID: item.WorkspaceID, + IsDelta: true, + Patch: item.Patch, + Payload: item.Payload, + }) + return nil +} + +// UpdateGraphQLAssertDeltaBatch updates multiple GraphQL assert deltas. +func (c *Context) UpdateGraphQLAssertDeltaBatch(ctx context.Context, items []GraphQLAssertDeltaUpdateItem) error { + for _, item := range items { + if err := c.UpdateGraphQLAssertDelta(ctx, item); err != nil { + return err + } + } + return nil +} diff --git a/packages/server/pkg/patch/patch.go b/packages/server/pkg/patch/patch.go index 5b9c42f6f..ca01a17eb 100644 --- a/packages/server/pkg/patch/patch.go +++ b/packages/server/pkg/patch/patch.go @@ -110,6 +110,36 @@ func (p HTTPDeltaPatch) HasChanges() bool { return p.Name.IsSet() || p.Method.IsSet() || p.Url.IsSet() } +// GraphQLDeltaPatch represents sparse updates to GraphQL delta fields. +// +// Semantics: +// - Field.IsSet() == false = field not changed (omitted from update) +// - Field.IsUnset() == true = field explicitly UNSET/cleared +// - Field.HasValue() == true = field set to that value +type GraphQLDeltaPatch struct { + Name Optional[string] + URL Optional[string] + Query Optional[string] + Variables Optional[string] +} + +// HasChanges returns true if any field in the patch has been set +func (p GraphQLDeltaPatch) HasChanges() bool { + return p.Name.IsSet() || p.URL.IsSet() || p.Query.IsSet() || p.Variables.IsSet() +} + +// GraphQLAssertPatch represents sparse updates to GraphQL assert delta fields. +type GraphQLAssertPatch struct { + Value Optional[string] + Enabled Optional[bool] + Order Optional[float32] +} + +// HasChanges returns true if any field in the patch has been set +func (p GraphQLAssertPatch) HasChanges() bool { + return p.Value.IsSet() || p.Enabled.IsSet() || p.Order.IsSet() +} + // EdgePatch represents partial updates to an Edge type EdgePatch struct { SourceID Optional[string] // ID stored as base64 string for JSON compatibility diff --git a/packages/server/pkg/service/sflow/node_execution_mapper.go b/packages/server/pkg/service/sflow/node_execution_mapper.go index 81beb09ab..0449b86fe 100644 --- a/packages/server/pkg/service/sflow/node_execution_mapper.go +++ b/packages/server/pkg/service/sflow/node_execution_mapper.go @@ -34,6 +34,7 @@ func ConvertNodeExecutionToDB(ne mflow.NodeExecution) *gen.NodeExecution { OutputDataCompressType: ne.OutputDataCompressType, Error: errorSQL, HttpResponseID: ne.ResponseID, + GraphqlResponseID: ne.GraphQLResponseID, CompletedAt: completedAtSQL, } } @@ -62,6 +63,7 @@ func ConvertNodeExecutionToModel(ne gen.NodeExecution) *mflow.NodeExecution { OutputDataCompressType: ne.OutputDataCompressType, Error: errorPtr, ResponseID: responseIDPtr, + GraphQLResponseID: ne.GraphqlResponseID, CompletedAt: completedAtPtr, } } diff --git a/packages/server/pkg/service/sflow/node_execution_writer.go b/packages/server/pkg/service/sflow/node_execution_writer.go index 80d26759c..6b59dfdba 100644 --- a/packages/server/pkg/service/sflow/node_execution_writer.go +++ b/packages/server/pkg/service/sflow/node_execution_writer.go @@ -48,6 +48,7 @@ func (w *NodeExecutionWriter) CreateNodeExecution(ctx context.Context, ne mflow. OutputData: ne.OutputData, OutputDataCompressType: ne.OutputDataCompressType, HttpResponseID: ne.ResponseID, + GraphqlResponseID: ne.GraphQLResponseID, CompletedAt: completedAtSQL, }) @@ -78,6 +79,7 @@ func (w *NodeExecutionWriter) UpdateNodeExecution(ctx context.Context, ne mflow. OutputData: ne.OutputData, OutputDataCompressType: ne.OutputDataCompressType, HttpResponseID: ne.ResponseID, + GraphqlResponseID: ne.GraphQLResponseID, CompletedAt: completedAtSQL, }) @@ -112,6 +114,7 @@ func (w *NodeExecutionWriter) UpsertNodeExecution(ctx context.Context, ne mflow. OutputData: ne.OutputData, OutputDataCompressType: ne.OutputDataCompressType, HttpResponseID: ne.ResponseID, + GraphqlResponseID: ne.GraphQLResponseID, CompletedAt: completedAtSQL, }) diff --git a/packages/server/pkg/service/sflow/node_graphql.go b/packages/server/pkg/service/sflow/node_graphql.go new file mode 100644 index 000000000..843fdc6e5 --- /dev/null +++ b/packages/server/pkg/service/sflow/node_graphql.go @@ -0,0 +1,60 @@ +//nolint:revive // exported +package sflow + +import ( + "context" + "database/sql" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" +) + +type NodeGraphQLService struct { + reader *NodeGraphQLReader + queries *gen.Queries +} + +func NewNodeGraphQLService(queries *gen.Queries) NodeGraphQLService { + return NodeGraphQLService{ + reader: NewNodeGraphQLReaderFromQueries(queries), + queries: queries, + } +} + +func (ngs NodeGraphQLService) TX(tx *sql.Tx) NodeGraphQLService { + newQueries := ngs.queries.WithTx(tx) + return NodeGraphQLService{ + reader: NewNodeGraphQLReaderFromQueries(newQueries), + queries: newQueries, + } +} + +func NewNodeGraphQLServiceTX(ctx context.Context, tx *sql.Tx) (*NodeGraphQLService, error) { + queries, err := gen.Prepare(ctx, tx) + if err != nil { + return nil, err + } + return &NodeGraphQLService{ + reader: NewNodeGraphQLReaderFromQueries(queries), + queries: queries, + }, nil +} + +func (ngs NodeGraphQLService) GetNodeGraphQL(ctx context.Context, id idwrap.IDWrap) (*mflow.NodeGraphQL, error) { + return ngs.reader.GetNodeGraphQL(ctx, id) +} + +func (ngs NodeGraphQLService) CreateNodeGraphQL(ctx context.Context, ng mflow.NodeGraphQL) error { + return NewNodeGraphQLWriterFromQueries(ngs.queries).CreateNodeGraphQL(ctx, ng) +} + +func (ngs NodeGraphQLService) UpdateNodeGraphQL(ctx context.Context, ng mflow.NodeGraphQL) error { + return NewNodeGraphQLWriterFromQueries(ngs.queries).UpdateNodeGraphQL(ctx, ng) +} + +func (ngs NodeGraphQLService) DeleteNodeGraphQL(ctx context.Context, id idwrap.IDWrap) error { + return NewNodeGraphQLWriterFromQueries(ngs.queries).DeleteNodeGraphQL(ctx, id) +} + +func (ngs NodeGraphQLService) Reader() *NodeGraphQLReader { return ngs.reader } diff --git a/packages/server/pkg/service/sflow/node_graphql_mapper.go b/packages/server/pkg/service/sflow/node_graphql_mapper.go new file mode 100644 index 000000000..114174aee --- /dev/null +++ b/packages/server/pkg/service/sflow/node_graphql_mapper.go @@ -0,0 +1,41 @@ +package sflow + +import ( + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" +) + +func ConvertToDBNodeGraphQL(ng mflow.NodeGraphQL) (gen.FlowNodeGraphql, bool) { + if ng.GraphQLID == nil || isZeroID(*ng.GraphQLID) { + return gen.FlowNodeGraphql{}, false + } + + dbNode := gen.FlowNodeGraphql{ + FlowNodeID: ng.FlowNodeID, + GraphqlID: *ng.GraphQLID, + } + + if ng.DeltaGraphQLID != nil { + dbNode.DeltaGraphqlID = ng.DeltaGraphQLID.Bytes() + } + + return dbNode, true +} + +func ConvertToModelNodeGraphQL(ng gen.FlowNodeGraphql) *mflow.NodeGraphQL { + graphqlID := ng.GraphqlID + modelNode := &mflow.NodeGraphQL{ + FlowNodeID: ng.FlowNodeID, + GraphQLID: &graphqlID, + } + + if len(ng.DeltaGraphqlID) > 0 { + deltaID, err := idwrap.NewFromBytes(ng.DeltaGraphqlID) + if err == nil { + modelNode.DeltaGraphQLID = &deltaID + } + } + + return modelNode +} diff --git a/packages/server/pkg/service/sflow/node_graphql_reader.go b/packages/server/pkg/service/sflow/node_graphql_reader.go new file mode 100644 index 000000000..a299682bd --- /dev/null +++ b/packages/server/pkg/service/sflow/node_graphql_reader.go @@ -0,0 +1,34 @@ +package sflow + +import ( + "context" + "database/sql" + "errors" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" +) + +type NodeGraphQLReader struct { + queries *gen.Queries +} + +func NewNodeGraphQLReader(db *sql.DB) *NodeGraphQLReader { + return &NodeGraphQLReader{queries: gen.New(db)} +} + +func NewNodeGraphQLReaderFromQueries(queries *gen.Queries) *NodeGraphQLReader { + return &NodeGraphQLReader{queries: queries} +} + +func (r *NodeGraphQLReader) GetNodeGraphQL(ctx context.Context, id idwrap.IDWrap) (*mflow.NodeGraphQL, error) { + nodeGQL, err := r.queries.GetFlowNodeGraphQL(ctx, id) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, nil + } + return nil, err + } + return ConvertToModelNodeGraphQL(nodeGQL), nil +} diff --git a/packages/server/pkg/service/sflow/node_graphql_writer.go b/packages/server/pkg/service/sflow/node_graphql_writer.go new file mode 100644 index 000000000..859beff0a --- /dev/null +++ b/packages/server/pkg/service/sflow/node_graphql_writer.go @@ -0,0 +1,47 @@ +package sflow + +import ( + "context" + "database/sql" + "errors" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" +) + +type NodeGraphQLWriter struct { + queries *gen.Queries +} + +func NewNodeGraphQLWriter(tx gen.DBTX) *NodeGraphQLWriter { + return &NodeGraphQLWriter{queries: gen.New(tx)} +} + +func NewNodeGraphQLWriterFromQueries(queries *gen.Queries) *NodeGraphQLWriter { + return &NodeGraphQLWriter{queries: queries} +} + +func (w *NodeGraphQLWriter) CreateNodeGraphQL(ctx context.Context, ng mflow.NodeGraphQL) error { + dbModel, ok := ConvertToDBNodeGraphQL(ng) + if !ok { + return nil + } + return w.queries.CreateFlowNodeGraphQL(ctx, gen.CreateFlowNodeGraphQLParams(dbModel)) +} + +func (w *NodeGraphQLWriter) UpdateNodeGraphQL(ctx context.Context, ng mflow.NodeGraphQL) error { + dbModel, ok := ConvertToDBNodeGraphQL(ng) + if !ok { + // Treat removal of GraphQLID as request to delete any existing binding. + if err := w.queries.DeleteFlowNodeGraphQL(ctx, ng.FlowNodeID); err != nil && !errors.Is(err, sql.ErrNoRows) { + return err + } + return nil + } + return w.queries.UpdateFlowNodeGraphQL(ctx, gen.UpdateFlowNodeGraphQLParams(dbModel)) +} + +func (w *NodeGraphQLWriter) DeleteNodeGraphQL(ctx context.Context, id idwrap.IDWrap) error { + return w.queries.DeleteFlowNodeGraphQL(ctx, id) +} diff --git a/packages/server/pkg/service/sgraphql/assert.go b/packages/server/pkg/service/sgraphql/assert.go new file mode 100644 index 000000000..1f526415f --- /dev/null +++ b/packages/server/pkg/service/sgraphql/assert.go @@ -0,0 +1,295 @@ +package sgraphql + +import ( + "context" + "database/sql" + "errors" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/dbtime" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +var ErrNoGraphQLAssertFound = errors.New("no graphql assert found") + +type GraphQLAssertService struct { + queries *gen.Queries +} + +func NewGraphQLAssertService(queries *gen.Queries) GraphQLAssertService { + return GraphQLAssertService{queries: queries} +} + +func (s GraphQLAssertService) TX(tx *sql.Tx) GraphQLAssertService { + return GraphQLAssertService{queries: s.queries.WithTx(tx)} +} + +func (s GraphQLAssertService) GetByID(ctx context.Context, id idwrap.IDWrap) (*mgraphql.GraphQLAssert, error) { + assert, err := s.queries.GetGraphQLAssert(ctx, id.Bytes()) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, ErrNoGraphQLAssertFound + } + return nil, err + } + + result := convertGetGraphQLAssertRowToModel(assert) + return &result, nil +} + +func (s GraphQLAssertService) GetByGraphQLID(ctx context.Context, graphqlID idwrap.IDWrap) ([]mgraphql.GraphQLAssert, error) { + asserts, err := s.queries.GetGraphQLAssertsByGraphQLID(ctx, graphqlID.Bytes()) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLAssert{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLAssert, len(asserts)) + for i, a := range asserts { + result[i] = convertGetGraphQLAssertsByGraphQLIDRowToModel(a) + } + return result, nil +} + +func (s GraphQLAssertService) GetByIDs(ctx context.Context, ids []idwrap.IDWrap) ([]mgraphql.GraphQLAssert, error) { + // Convert IDWraps to [][]byte + idBytes := make([][]byte, len(ids)) + for i, id := range ids { + idBytes[i] = id.Bytes() + } + + asserts, err := s.queries.GetGraphQLAssertsByIDs(ctx, idBytes) + if err != nil { + return nil, err + } + + result := make([]mgraphql.GraphQLAssert, len(asserts)) + for i, a := range asserts { + result[i] = convertGetGraphQLAssertsByIDsRowToModel(a) + } + return result, nil +} + +func (s GraphQLAssertService) Create(ctx context.Context, assert *mgraphql.GraphQLAssert) error { + now := dbtime.DBNow() + assert.CreatedAt = now.Unix() + assert.UpdatedAt = now.Unix() + + params := gen.CreateGraphQLAssertParams{ + ID: assert.ID.Bytes(), + GraphqlID: assert.GraphQLID.Bytes(), + Value: assert.Value, + Enabled: assert.Enabled, + Description: assert.Description, + DisplayOrder: float64(assert.DisplayOrder), + CreatedAt: assert.CreatedAt, + UpdatedAt: assert.UpdatedAt, + } + + // Handle delta fields + if assert.ParentGraphQLAssertID != nil { + params.ParentGraphqlAssertID = assert.ParentGraphQLAssertID.Bytes() + } + params.IsDelta = assert.IsDelta + params.DeltaValue = stringPtrToNullString(assert.DeltaValue) + params.DeltaEnabled = boolPtrToNullBool(assert.DeltaEnabled) + params.DeltaDescription = stringPtrToNullString(assert.DeltaDescription) + params.DeltaDisplayOrder = float32PtrToNullFloat64(assert.DeltaDisplayOrder) + + return s.queries.CreateGraphQLAssert(ctx, params) +} + +func (s GraphQLAssertService) Update(ctx context.Context, assert *mgraphql.GraphQLAssert) error { + return s.queries.UpdateGraphQLAssert(ctx, gen.UpdateGraphQLAssertParams{ + ID: assert.ID.Bytes(), + Value: assert.Value, + Enabled: assert.Enabled, + Description: assert.Description, + DisplayOrder: float64(assert.DisplayOrder), + UpdatedAt: dbtime.DBNow().Unix(), + }) +} + +func (s GraphQLAssertService) UpdateDelta(ctx context.Context, id idwrap.IDWrap, deltaValue *string, deltaEnabled *bool, deltaDescription *string, deltaDisplayOrder *float32) error { + return s.queries.UpdateGraphQLAssertDelta(ctx, gen.UpdateGraphQLAssertDeltaParams{ + ID: id.Bytes(), + DeltaValue: stringPtrToNullString(deltaValue), + DeltaEnabled: boolPtrToNullBool(deltaEnabled), + DeltaDescription: stringPtrToNullString(deltaDescription), + DeltaDisplayOrder: float32PtrToNullFloat64(deltaDisplayOrder), + UpdatedAt: dbtime.DBNow().Unix(), + }) +} + +func (s GraphQLAssertService) Delete(ctx context.Context, id idwrap.IDWrap) error { + return s.queries.DeleteGraphQLAssert(ctx, id.Bytes()) +} + +// Delta methods +func (s GraphQLAssertService) GetDeltasByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQLAssert, error) { + asserts, err := s.queries.GetGraphQLAssertDeltasByWorkspaceID(ctx, workspaceID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLAssert{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLAssert, len(asserts)) + for i, a := range asserts { + result[i] = convertGetGraphQLAssertDeltasByWorkspaceIDRowToModel(a) + } + return result, nil +} + +func (s GraphQLAssertService) GetDeltasByParentID(ctx context.Context, parentID idwrap.IDWrap) ([]mgraphql.GraphQLAssert, error) { + asserts, err := s.queries.GetGraphQLAssertDeltasByParentID(ctx, parentID.Bytes()) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLAssert{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLAssert, len(asserts)) + for i, a := range asserts { + result[i] = convertGetGraphQLAssertDeltasByParentIDRowToModel(a) + } + return result, nil +} + +// Row conversion helpers - convert sqlc row types to model +func convertGetGraphQLAssertRowToModel(row gen.GetGraphQLAssertRow) mgraphql.GraphQLAssert { + id, _ := idwrap.NewFromBytes(row.ID) + graphqlID, _ := idwrap.NewFromBytes(row.GraphqlID) + + return mgraphql.GraphQLAssert{ + ID: id, + GraphQLID: graphqlID, + Value: row.Value, + Enabled: row.Enabled, + Description: row.Description, + DisplayOrder: float32(row.DisplayOrder), + ParentGraphQLAssertID: bytesToIDWrapPtr(row.ParentGraphqlAssertID), + IsDelta: row.IsDelta, + DeltaValue: interfaceToStringPtr(row.DeltaValue), + DeltaEnabled: interfaceToBoolPtr(row.DeltaEnabled), + DeltaDescription: interfaceToStringPtr(row.DeltaDescription), + DeltaDisplayOrder: interfaceToFloat32Ptr(row.DeltaDisplayOrder), + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + } +} + +func convertGetGraphQLAssertsByGraphQLIDRowToModel(row gen.GetGraphQLAssertsByGraphQLIDRow) mgraphql.GraphQLAssert { + id, _ := idwrap.NewFromBytes(row.ID) + graphqlID, _ := idwrap.NewFromBytes(row.GraphqlID) + + return mgraphql.GraphQLAssert{ + ID: id, + GraphQLID: graphqlID, + Value: row.Value, + Enabled: row.Enabled, + Description: row.Description, + DisplayOrder: float32(row.DisplayOrder), + ParentGraphQLAssertID: bytesToIDWrapPtr(row.ParentGraphqlAssertID), + IsDelta: row.IsDelta, + DeltaValue: interfaceToStringPtr(row.DeltaValue), + DeltaEnabled: interfaceToBoolPtr(row.DeltaEnabled), + DeltaDescription: interfaceToStringPtr(row.DeltaDescription), + DeltaDisplayOrder: interfaceToFloat32Ptr(row.DeltaDisplayOrder), + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + } +} + +func convertGetGraphQLAssertsByIDsRowToModel(row gen.GetGraphQLAssertsByIDsRow) mgraphql.GraphQLAssert { + id, _ := idwrap.NewFromBytes(row.ID) + graphqlID, _ := idwrap.NewFromBytes(row.GraphqlID) + + return mgraphql.GraphQLAssert{ + ID: id, + GraphQLID: graphqlID, + Value: row.Value, + Enabled: row.Enabled, + Description: row.Description, + DisplayOrder: float32(row.DisplayOrder), + ParentGraphQLAssertID: bytesToIDWrapPtr(row.ParentGraphqlAssertID), + IsDelta: row.IsDelta, + DeltaValue: interfaceToStringPtr(row.DeltaValue), + DeltaEnabled: interfaceToBoolPtr(row.DeltaEnabled), + DeltaDescription: interfaceToStringPtr(row.DeltaDescription), + DeltaDisplayOrder: interfaceToFloat32Ptr(row.DeltaDisplayOrder), + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + } +} + +func convertGetGraphQLAssertDeltasByWorkspaceIDRowToModel(row gen.GetGraphQLAssertDeltasByWorkspaceIDRow) mgraphql.GraphQLAssert { + id, _ := idwrap.NewFromBytes(row.ID) + graphqlID, _ := idwrap.NewFromBytes(row.GraphqlID) + + return mgraphql.GraphQLAssert{ + ID: id, + GraphQLID: graphqlID, + Value: row.Value, + Enabled: row.Enabled, + Description: row.Description, + DisplayOrder: float32(row.DisplayOrder), + ParentGraphQLAssertID: bytesToIDWrapPtr(row.ParentGraphqlAssertID), + IsDelta: row.IsDelta, + DeltaValue: interfaceToStringPtr(row.DeltaValue), + DeltaEnabled: interfaceToBoolPtr(row.DeltaEnabled), + DeltaDescription: interfaceToStringPtr(row.DeltaDescription), + DeltaDisplayOrder: interfaceToFloat32Ptr(row.DeltaDisplayOrder), + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + } +} + +func convertGetGraphQLAssertDeltasByParentIDRowToModel(row gen.GetGraphQLAssertDeltasByParentIDRow) mgraphql.GraphQLAssert { + id, _ := idwrap.NewFromBytes(row.ID) + graphqlID, _ := idwrap.NewFromBytes(row.GraphqlID) + + return mgraphql.GraphQLAssert{ + ID: id, + GraphQLID: graphqlID, + Value: row.Value, + Enabled: row.Enabled, + Description: row.Description, + DisplayOrder: float32(row.DisplayOrder), + ParentGraphQLAssertID: bytesToIDWrapPtr(row.ParentGraphqlAssertID), + IsDelta: row.IsDelta, + DeltaValue: interfaceToStringPtr(row.DeltaValue), + DeltaEnabled: interfaceToBoolPtr(row.DeltaEnabled), + DeltaDescription: interfaceToStringPtr(row.DeltaDescription), + DeltaDisplayOrder: interfaceToFloat32Ptr(row.DeltaDisplayOrder), + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + } +} + +// Conversion helpers +func stringPtrToNullString(s *string) sql.NullString { + if s == nil { + return sql.NullString{Valid: false} + } + return sql.NullString{String: *s, Valid: true} +} + +func boolPtrToNullBool(b *bool) sql.NullBool { + if b == nil { + return sql.NullBool{Valid: false} + } + return sql.NullBool{Bool: *b, Valid: true} +} + +func float32PtrToNullFloat64(f *float32) sql.NullFloat64 { + if f == nil { + return sql.NullFloat64{Valid: false} + } + return sql.NullFloat64{Float64: float64(*f), Valid: true} +} diff --git a/packages/server/pkg/service/sgraphql/header.go b/packages/server/pkg/service/sgraphql/header.go new file mode 100644 index 000000000..04798fc78 --- /dev/null +++ b/packages/server/pkg/service/sgraphql/header.go @@ -0,0 +1,121 @@ +package sgraphql + +import ( + "context" + "database/sql" + "errors" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/dbtime" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +var ErrNoGraphQLHeaderFound = errors.New("no graphql header found") + +type GraphQLHeaderService struct { + queries *gen.Queries +} + +func NewGraphQLHeaderService(queries *gen.Queries) GraphQLHeaderService { + return GraphQLHeaderService{queries: queries} +} + +func (s GraphQLHeaderService) TX(tx *sql.Tx) GraphQLHeaderService { + return GraphQLHeaderService{queries: s.queries.WithTx(tx)} +} + +func (s GraphQLHeaderService) GetByGraphQLID(ctx context.Context, graphqlID idwrap.IDWrap) ([]mgraphql.GraphQLHeader, error) { + headers, err := s.queries.GetGraphQLHeaders(ctx, graphqlID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLHeader{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLHeader, len(headers)) + for i, h := range headers { + result[i] = ConvertToModelGraphQLHeader(h) + } + return result, nil +} + +func (s GraphQLHeaderService) GetByIDs(ctx context.Context, ids []idwrap.IDWrap) ([]mgraphql.GraphQLHeader, error) { + headers, err := s.queries.GetGraphQLHeadersByIDs(ctx, ids) + if err != nil { + return nil, err + } + + result := make([]mgraphql.GraphQLHeader, len(headers)) + for i, h := range headers { + result[i] = ConvertToModelGraphQLHeader(h) + } + return result, nil +} + +func (s GraphQLHeaderService) Create(ctx context.Context, header *mgraphql.GraphQLHeader) error { + now := dbtime.DBNow() + header.CreatedAt = now.Unix() + header.UpdatedAt = now.Unix() + + return s.queries.CreateGraphQLHeader(ctx, gen.CreateGraphQLHeaderParams{ + ID: header.ID, + GraphqlID: header.GraphQLID, + HeaderKey: header.Key, + HeaderValue: header.Value, + Description: header.Description, + Enabled: header.Enabled, + DisplayOrder: float64(header.DisplayOrder), + CreatedAt: header.CreatedAt, + UpdatedAt: header.UpdatedAt, + }) +} + +func (s GraphQLHeaderService) Update(ctx context.Context, header *mgraphql.GraphQLHeader) error { + return s.queries.UpdateGraphQLHeader(ctx, gen.UpdateGraphQLHeaderParams{ + ID: header.ID, + HeaderKey: header.Key, + HeaderValue: header.Value, + Description: header.Description, + Enabled: header.Enabled, + DisplayOrder: float64(header.DisplayOrder), + }) +} + +func (s GraphQLHeaderService) Delete(ctx context.Context, id idwrap.IDWrap) error { + return s.queries.DeleteGraphQLHeader(ctx, id) +} + +// Delta methods +func (s GraphQLHeaderService) GetDeltasByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQLHeader, error) { + headers, err := s.queries.GetGraphQLHeaderDeltasByWorkspaceID(ctx, workspaceID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLHeader{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLHeader, len(headers)) + for i, h := range headers { + result[i] = ConvertToModelGraphQLHeader(h) + } + return result, nil +} + +func (s GraphQLHeaderService) GetDeltasByParentID(ctx context.Context, parentID idwrap.IDWrap) ([]mgraphql.GraphQLHeader, error) { + headers, err := s.queries.GetGraphQLHeaderDeltasByParentID(ctx, parentID.Bytes()) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLHeader{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLHeader, len(headers)) + for i, h := range headers { + result[i] = ConvertToModelGraphQLHeader(h) + } + return result, nil +} diff --git a/packages/server/pkg/service/sgraphql/mapper.go b/packages/server/pkg/service/sgraphql/mapper.go new file mode 100644 index 000000000..683900fbe --- /dev/null +++ b/packages/server/pkg/service/sgraphql/mapper.go @@ -0,0 +1,231 @@ +package sgraphql + +import ( + "time" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +func interfaceToInt64Ptr(v interface{}) *int64 { + if v == nil { + return nil + } + switch val := v.(type) { + case int64: + return &val + case int: + i := int64(val) + return &i + default: + return nil + } +} + +func interfaceToInt32(v interface{}) int32 { + switch val := v.(type) { + case int32: + return val + case int64: + return int32(val) //nolint:gosec // G115 + default: + return 0 + } +} + +func interfaceToStringPtr(v interface{}) *string { + if v == nil { + return nil + } + if str, ok := v.(string); ok { + return &str + } + return nil +} + +func interfaceToBoolPtr(v interface{}) *bool { + if v == nil { + return nil + } + if b, ok := v.(bool); ok { + return &b + } + return nil +} + +func interfaceToFloat32Ptr(v interface{}) *float32 { + if v == nil { + return nil + } + switch val := v.(type) { + case float32: + return &val + case float64: + f32 := float32(val) + return &f32 + default: + return nil + } +} + +func bytesToIDWrapPtr(b []byte) *idwrap.IDWrap { + if b == nil || len(b) == 0 { + return nil + } + id, err := idwrap.NewFromBytes(b) + if err != nil { + return nil + } + return &id +} + +func idWrapPtrToBytes(id *idwrap.IDWrap) []byte { + if id == nil { + return nil + } + return id.Bytes() +} + +func stringPtrToInterface(s *string) interface{} { + if s == nil { + return nil + } + return *s +} + +func boolPtrToInterface(b *bool) interface{} { + if b == nil { + return nil + } + return *b +} + +func float32PtrToInterface(f *float32) interface{} { + if f == nil { + return nil + } + return float64(*f) +} + +func ConvertToDBGraphQL(gql mgraphql.GraphQL) gen.Graphql { + var lastRunAt interface{} + if gql.LastRunAt != nil { + lastRunAt = *gql.LastRunAt + } + + return gen.Graphql{ + ID: gql.ID, + WorkspaceID: gql.WorkspaceID, + FolderID: gql.FolderID, + Name: gql.Name, + Url: gql.Url, + Query: gql.Query, + Variables: gql.Variables, + Description: gql.Description, + ParentGraphqlID: idWrapPtrToBytes(gql.ParentGraphQLID), + IsDelta: gql.IsDelta, + IsSnapshot: gql.IsSnapshot, + DeltaName: stringPtrToInterface(gql.DeltaName), + DeltaUrl: stringPtrToInterface(gql.DeltaUrl), + DeltaQuery: stringPtrToInterface(gql.DeltaQuery), + DeltaVariables: stringPtrToInterface(gql.DeltaVariables), + DeltaDescription: stringPtrToInterface(gql.DeltaDescription), + LastRunAt: lastRunAt, + CreatedAt: gql.CreatedAt, + UpdatedAt: gql.UpdatedAt, + } +} + +func ConvertToModelGraphQL(gql gen.Graphql) *mgraphql.GraphQL { + return &mgraphql.GraphQL{ + ID: gql.ID, + WorkspaceID: gql.WorkspaceID, + FolderID: gql.FolderID, + Name: gql.Name, + Url: gql.Url, + Query: gql.Query, + Variables: gql.Variables, + Description: gql.Description, + ParentGraphQLID: bytesToIDWrapPtr(gql.ParentGraphqlID), + IsDelta: gql.IsDelta, + IsSnapshot: gql.IsSnapshot, + DeltaName: interfaceToStringPtr(gql.DeltaName), + DeltaUrl: interfaceToStringPtr(gql.DeltaUrl), + DeltaQuery: interfaceToStringPtr(gql.DeltaQuery), + DeltaVariables: interfaceToStringPtr(gql.DeltaVariables), + DeltaDescription: interfaceToStringPtr(gql.DeltaDescription), + LastRunAt: interfaceToInt64Ptr(gql.LastRunAt), + CreatedAt: gql.CreatedAt, + UpdatedAt: gql.UpdatedAt, + } +} + +func ConvertToDBGraphQLResponse(resp mgraphql.GraphQLResponse) gen.GraphqlResponse { + return gen.GraphqlResponse{ + ID: resp.ID, + GraphqlID: resp.GraphQLID, + Status: resp.Status, + Body: resp.Body, + Time: time.Unix(resp.Time, 0), + Duration: resp.Duration, + Size: resp.Size, + CreatedAt: resp.CreatedAt, + } +} + +func ConvertToModelGraphQLResponse(resp gen.GraphqlResponse) mgraphql.GraphQLResponse { + return mgraphql.GraphQLResponse{ + ID: resp.ID, + GraphQLID: resp.GraphqlID, + Status: interfaceToInt32(resp.Status), + Body: resp.Body, + Time: resp.Time.Unix(), + Duration: interfaceToInt32(resp.Duration), + Size: interfaceToInt32(resp.Size), + CreatedAt: resp.CreatedAt, + } +} + +func ConvertToModelGraphQLHeader(h gen.GraphqlHeader) mgraphql.GraphQLHeader { + return mgraphql.GraphQLHeader{ + ID: h.ID, + GraphQLID: h.GraphqlID, + Key: h.HeaderKey, + Value: h.HeaderValue, + Enabled: h.Enabled, + Description: h.Description, + DisplayOrder: float32(h.DisplayOrder), + ParentGraphQLHeaderID: bytesToIDWrapPtr(h.ParentGraphqlHeaderID), + IsDelta: h.IsDelta, + DeltaKey: interfaceToStringPtr(h.DeltaHeaderKey), + DeltaValue: interfaceToStringPtr(h.DeltaHeaderValue), + DeltaEnabled: interfaceToBoolPtr(h.DeltaEnabled), + DeltaDescription: interfaceToStringPtr(h.DeltaDescription), + DeltaDisplayOrder: interfaceToFloat32Ptr(h.DeltaDisplayOrder), + CreatedAt: h.CreatedAt, + UpdatedAt: h.UpdatedAt, + } +} + +func ConvertToModelGraphQLAssert(a gen.GraphqlAssert) mgraphql.GraphQLAssert { + id, _ := idwrap.NewFromBytes(a.ID) + graphqlID, _ := idwrap.NewFromBytes(a.GraphqlID) + + return mgraphql.GraphQLAssert{ + ID: id, + GraphQLID: graphqlID, + Value: a.Value, + Enabled: a.Enabled, + Description: a.Description, + DisplayOrder: float32(a.DisplayOrder), + ParentGraphQLAssertID: bytesToIDWrapPtr(a.ParentGraphqlAssertID), + IsDelta: a.IsDelta, + DeltaValue: interfaceToStringPtr(a.DeltaValue), + DeltaEnabled: interfaceToBoolPtr(a.DeltaEnabled), + DeltaDescription: interfaceToStringPtr(a.DeltaDescription), + DeltaDisplayOrder: interfaceToFloat32Ptr(a.DeltaDisplayOrder), + CreatedAt: a.CreatedAt, + UpdatedAt: a.UpdatedAt, + } +} diff --git a/packages/server/pkg/service/sgraphql/reader.go b/packages/server/pkg/service/sgraphql/reader.go new file mode 100644 index 000000000..6f9e16c1d --- /dev/null +++ b/packages/server/pkg/service/sgraphql/reader.go @@ -0,0 +1,140 @@ +package sgraphql + +import ( + "context" + "database/sql" + "errors" + "fmt" + "log/slog" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +type Reader struct { + queries *gen.Queries + logger *slog.Logger +} + +func NewReader(db *sql.DB, logger *slog.Logger) *Reader { + return &Reader{ + queries: gen.New(db), + logger: logger, + } +} + +func NewReaderFromQueries(queries *gen.Queries, logger *slog.Logger) *Reader { + return &Reader{ + queries: queries, + logger: logger, + } +} + +func (r *Reader) Get(ctx context.Context, id idwrap.IDWrap) (*mgraphql.GraphQL, error) { + gql, err := r.queries.GetGraphQL(ctx, id) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + if r.logger != nil { + r.logger.DebugContext(ctx, fmt.Sprintf("GraphQL ID: %s not found", id.String())) + } + return nil, ErrNoGraphQLFound + } + return nil, err + } + return ConvertToModelGraphQL(gql), nil +} + +func (r *Reader) GetByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQL, error) { + gqls, err := r.queries.GetGraphQLsByWorkspaceID(ctx, workspaceID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQL{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQL, len(gqls)) + for i, gql := range gqls { + result[i] = *ConvertToModelGraphQL(gql) + } + return result, nil +} + +func (r *Reader) GetWorkspaceID(ctx context.Context, id idwrap.IDWrap) (idwrap.IDWrap, error) { + workspaceID, err := r.queries.GetGraphQLWorkspaceID(ctx, id) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return idwrap.IDWrap{}, ErrNoGraphQLFound + } + return idwrap.IDWrap{}, err + } + return workspaceID, nil +} + +func (r *Reader) GetDeltasByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQL, error) { + gqls, err := r.queries.GetGraphQLDeltasByWorkspaceID(ctx, workspaceID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQL{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQL, len(gqls)) + for i, gql := range gqls { + result[i] = *ConvertToModelGraphQL(gql) + } + return result, nil +} + +func (r *Reader) GetDeltasByParentID(ctx context.Context, parentID idwrap.IDWrap) ([]mgraphql.GraphQL, error) { + gqls, err := r.queries.GetGraphQLDeltasByParentID(ctx, parentID.Bytes()) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQL{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQL, len(gqls)) + for i, gql := range gqls { + result[i] = *ConvertToModelGraphQL(gql) + } + return result, nil +} + +func (r *Reader) GetGraphQLVersionsByGraphQLID(ctx context.Context, graphqlID idwrap.IDWrap) ([]mgraphql.GraphQLVersion, error) { + versions, err := r.queries.GetGraphQLVersionsByGraphQLID(ctx, graphqlID.Bytes()) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLVersion{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLVersion, len(versions)) + for i, v := range versions { + var createdBy *idwrap.IDWrap + if len(v.CreatedBy) > 0 { + id, err := idwrap.NewFromBytes(v.CreatedBy) + if err == nil { + createdBy = &id + } + } + + id, _ := idwrap.NewFromBytes(v.ID) + gqlID, _ := idwrap.NewFromBytes(v.GraphqlID) + + result[i] = mgraphql.GraphQLVersion{ + ID: id, + GraphQLID: gqlID, + VersionName: v.VersionName, + VersionDescription: v.VersionDescription, + IsActive: v.IsActive, + CreatedAt: v.CreatedAt, + CreatedBy: createdBy, + } + } + return result, nil +} diff --git a/packages/server/pkg/service/sgraphql/response.go b/packages/server/pkg/service/sgraphql/response.go new file mode 100644 index 000000000..4373c2797 --- /dev/null +++ b/packages/server/pkg/service/sgraphql/response.go @@ -0,0 +1,184 @@ +package sgraphql + +import ( + "context" + "database/sql" + "errors" + "time" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +var ErrNoGraphQLResponseFound = errors.New("no graphql response found") + +type GraphQLResponseService struct { + queries *gen.Queries +} + +func NewGraphQLResponseService(queries *gen.Queries) GraphQLResponseService { + return GraphQLResponseService{queries: queries} +} + +func (s GraphQLResponseService) TX(tx *sql.Tx) GraphQLResponseService { + return GraphQLResponseService{queries: s.queries.WithTx(tx)} +} + +func (s GraphQLResponseService) Create(ctx context.Context, resp mgraphql.GraphQLResponse) error { + return s.queries.CreateGraphQLResponse(ctx, gen.CreateGraphQLResponseParams{ + ID: resp.ID, + GraphqlID: resp.GraphQLID, + Status: resp.Status, + Body: resp.Body, + Time: time.Unix(resp.Time, 0), + Duration: resp.Duration, + Size: resp.Size, + CreatedAt: resp.CreatedAt, + }) +} + +func (s GraphQLResponseService) GetByGraphQLID(ctx context.Context, graphqlID idwrap.IDWrap) ([]mgraphql.GraphQLResponse, error) { + responses, err := s.queries.GetGraphQLResponsesByGraphQLID(ctx, graphqlID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLResponse{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLResponse, len(responses)) + for i, resp := range responses { + result[i] = ConvertToModelGraphQLResponse(resp) + } + return result, nil +} + +func (s GraphQLResponseService) GetByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQLResponse, error) { + responses, err := s.queries.GetGraphQLResponsesByWorkspaceID(ctx, workspaceID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLResponse{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLResponse, len(responses)) + for i, resp := range responses { + result[i] = ConvertToModelGraphQLResponse(resp) + } + return result, nil +} + +func (s GraphQLResponseService) CreateHeader(ctx context.Context, header mgraphql.GraphQLResponseHeader) error { + return s.queries.CreateGraphQLResponseHeader(ctx, gen.CreateGraphQLResponseHeaderParams{ + ID: header.ID, + ResponseID: header.ResponseID, + Key: header.HeaderKey, + Value: header.HeaderValue, + CreatedAt: header.CreatedAt, + }) +} + +func (s GraphQLResponseService) GetHeadersByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQLResponseHeader, error) { + headers, err := s.queries.GetGraphQLResponseHeadersByWorkspaceID(ctx, workspaceID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLResponseHeader{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLResponseHeader, len(headers)) + for i, h := range headers { + result[i] = mgraphql.GraphQLResponseHeader{ + ID: h.ID, + ResponseID: h.ResponseID, + HeaderKey: h.Key, + HeaderValue: h.Value, + CreatedAt: h.CreatedAt, + } + } + return result, nil +} + +func (s GraphQLResponseService) GetHeadersByResponseID(ctx context.Context, responseID idwrap.IDWrap) ([]mgraphql.GraphQLResponseHeader, error) { + headers, err := s.queries.GetGraphQLResponseHeadersByResponseID(ctx, responseID) + if err != nil { + return nil, err + } + + result := make([]mgraphql.GraphQLResponseHeader, len(headers)) + for i, h := range headers { + result[i] = mgraphql.GraphQLResponseHeader{ + ID: h.ID, + ResponseID: h.ResponseID, + HeaderKey: h.Key, + HeaderValue: h.Value, + CreatedAt: h.CreatedAt, + } + } + return result, nil +} + + +func (s GraphQLResponseService) CreateAssert(ctx context.Context, assert mgraphql.GraphQLResponseAssert) error { + return s.queries.CreateGraphQLResponseAssert(ctx, gen.CreateGraphQLResponseAssertParams{ + ID: assert.ID.Bytes(), + ResponseID: assert.ResponseID.Bytes(), + Value: assert.Value, + Success: assert.Success, + CreatedAt: assert.CreatedAt, + }) +} + +func (s GraphQLResponseService) GetAssertsByResponseID(ctx context.Context, responseID idwrap.IDWrap) ([]mgraphql.GraphQLResponseAssert, error) { + asserts, err := s.queries.GetGraphQLResponseAssertsByResponseID(ctx, responseID.Bytes()) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLResponseAssert{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLResponseAssert, len(asserts)) + for i, a := range asserts { + id, _ := idwrap.NewFromBytes(a.ID) + respID, _ := idwrap.NewFromBytes(a.ResponseID) + + result[i] = mgraphql.GraphQLResponseAssert{ + ID: id, + ResponseID: respID, + Value: a.Value, + Success: a.Success, + CreatedAt: a.CreatedAt, + } + } + return result, nil +} + +func (s GraphQLResponseService) GetAssertsByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQLResponseAssert, error) { + asserts, err := s.queries.GetGraphQLResponseAssertsByWorkspaceID(ctx, workspaceID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLResponseAssert{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLResponseAssert, len(asserts)) + for i, a := range asserts { + id, _ := idwrap.NewFromBytes(a.ID) + respID, _ := idwrap.NewFromBytes(a.ResponseID) + + result[i] = mgraphql.GraphQLResponseAssert{ + ID: id, + ResponseID: respID, + Value: a.Value, + Success: a.Success, + CreatedAt: a.CreatedAt, + } + } + return result, nil +} + diff --git a/packages/server/pkg/service/sgraphql/sgraphql.go b/packages/server/pkg/service/sgraphql/sgraphql.go new file mode 100644 index 000000000..120f02f65 --- /dev/null +++ b/packages/server/pkg/service/sgraphql/sgraphql.go @@ -0,0 +1,62 @@ +package sgraphql + +import ( + "context" + "database/sql" + "log/slog" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +var ErrNoGraphQLFound = sql.ErrNoRows + +type GraphQLService struct { + reader *Reader + queries *gen.Queries + logger *slog.Logger +} + +func New(queries *gen.Queries, logger *slog.Logger) GraphQLService { + return GraphQLService{ + reader: NewReaderFromQueries(queries, logger), + queries: queries, + logger: logger, + } +} + +func (s GraphQLService) TX(tx *sql.Tx) GraphQLService { + newQueries := s.queries.WithTx(tx) + return GraphQLService{ + reader: NewReaderFromQueries(newQueries, s.logger), + queries: newQueries, + logger: s.logger, + } +} + +func (s GraphQLService) Create(ctx context.Context, gql *mgraphql.GraphQL) error { + return NewWriterFromQueries(s.queries).Create(ctx, gql) +} + +func (s GraphQLService) Get(ctx context.Context, id idwrap.IDWrap) (*mgraphql.GraphQL, error) { + return s.reader.Get(ctx, id) +} + +func (s GraphQLService) GetByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQL, error) { + return s.reader.GetByWorkspaceID(ctx, workspaceID) +} + +func (s GraphQLService) GetWorkspaceID(ctx context.Context, id idwrap.IDWrap) (idwrap.IDWrap, error) { + return s.reader.GetWorkspaceID(ctx, id) +} + +func (s GraphQLService) Update(ctx context.Context, gql *mgraphql.GraphQL) error { + return NewWriterFromQueries(s.queries).Update(ctx, gql) +} + +func (s GraphQLService) Delete(ctx context.Context, id idwrap.IDWrap) error { + return NewWriterFromQueries(s.queries).Delete(ctx, id) +} + +func (s GraphQLService) Reader() *Reader { return s.reader } diff --git a/packages/server/pkg/service/sgraphql/writer.go b/packages/server/pkg/service/sgraphql/writer.go new file mode 100644 index 000000000..35ccc2552 --- /dev/null +++ b/packages/server/pkg/service/sgraphql/writer.go @@ -0,0 +1,96 @@ +package sgraphql + +import ( + "context" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/dbtime" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +type Writer struct { + queries *gen.Queries +} + +func NewWriterFromQueries(queries *gen.Queries) *Writer { + return &Writer{queries: queries} +} + +func (w *Writer) Create(ctx context.Context, gql *mgraphql.GraphQL) error { + now := dbtime.DBNow() + gql.CreatedAt = now.Unix() + gql.UpdatedAt = now.Unix() + + dbGQL := ConvertToDBGraphQL(*gql) + return w.queries.CreateGraphQL(ctx, gen.CreateGraphQLParams(dbGQL)) +} + +func (w *Writer) Update(ctx context.Context, gql *mgraphql.GraphQL) error { + gql.UpdatedAt = dbtime.DBNow().Unix() + + dbGQL := ConvertToDBGraphQL(*gql) + + if gql.IsDelta { + // Update delta fields for delta records + if err := w.queries.UpdateGraphQLDelta(ctx, gen.UpdateGraphQLDeltaParams{ + ID: dbGQL.ID, + DeltaName: dbGQL.DeltaName, + DeltaUrl: dbGQL.DeltaUrl, + DeltaQuery: dbGQL.DeltaQuery, + DeltaVariables: dbGQL.DeltaVariables, + DeltaDescription: dbGQL.DeltaDescription, + }); err != nil { + return err + } + // Fallthrough to update common fields (like LastRunAt) + } + + var lastRunAt interface{} + if gql.LastRunAt != nil { + lastRunAt = *gql.LastRunAt + } + + // Update base fields + return w.queries.UpdateGraphQL(ctx, gen.UpdateGraphQLParams{ + ID: gql.ID, + Name: gql.Name, + Url: gql.Url, + Query: gql.Query, + Variables: gql.Variables, + Description: gql.Description, + LastRunAt: lastRunAt, + }) +} + +func (w *Writer) Delete(ctx context.Context, id idwrap.IDWrap) error { + return w.queries.DeleteGraphQL(ctx, id) +} + +func (w *Writer) CreateGraphQLVersion(ctx context.Context, graphqlID, createdBy idwrap.IDWrap, versionName, versionDescription string) (*mgraphql.GraphQLVersion, error) { + id := idwrap.NewNow() + now := dbtime.DBNow().Unix() + + err := w.queries.CreateGraphQLVersion(ctx, gen.CreateGraphQLVersionParams{ + ID: id.Bytes(), + GraphqlID: graphqlID.Bytes(), + VersionName: versionName, + VersionDescription: versionDescription, + IsActive: true, + CreatedAt: now, + CreatedBy: createdBy.Bytes(), + }) + if err != nil { + return nil, err + } + + return &mgraphql.GraphQLVersion{ + ID: id, + GraphQLID: graphqlID, + VersionName: versionName, + VersionDescription: versionDescription, + IsActive: true, + CreatedAt: now, + CreatedBy: &createdBy, + }, nil +} diff --git a/packages/server/pkg/translate/yamlflowsimplev2/converter.go b/packages/server/pkg/translate/yamlflowsimplev2/converter.go index 290391f30..60cad4751 100644 --- a/packages/server/pkg/translate/yamlflowsimplev2/converter.go +++ b/packages/server/pkg/translate/yamlflowsimplev2/converter.go @@ -55,9 +55,17 @@ func ConvertSimplifiedYAML(data []byte, opts ConvertOptionsV2) (*ioworkspace.Wor } } + // Prepare GraphQL request templates + graphqlTemplates := make(map[string]YamlGraphQLDefV2) + for _, gql := range yamlFormat.GraphQLRequests { + if gql.Name != "" { + graphqlTemplates[gql.Name] = gql + } + } + // Process flows and generate HTTP requests for _, flowEntry := range yamlFormat.Flows { - flowData, err := processFlow(flowEntry, yamlFormat.Run, requestTemplates, opts) + flowData, err := processFlow(flowEntry, yamlFormat.Run, requestTemplates, graphqlTemplates, opts) if err != nil { return nil, fmt.Errorf("failed to process flow '%s': %w", flowEntry.Name, err) } diff --git a/packages/server/pkg/translate/yamlflowsimplev2/converter_flow.go b/packages/server/pkg/translate/yamlflowsimplev2/converter_flow.go index 405692dcd..a120f88e5 100644 --- a/packages/server/pkg/translate/yamlflowsimplev2/converter_flow.go +++ b/packages/server/pkg/translate/yamlflowsimplev2/converter_flow.go @@ -15,7 +15,7 @@ import ( ) // processFlow processes a single flow and returns the generated data -func processFlow(flowEntry YamlFlowFlowV2, runEntries []YamlRunEntryV2, templates map[string]YamlRequestDefV2, opts ConvertOptionsV2) (*ioworkspace.WorkspaceBundle, error) { +func processFlow(flowEntry YamlFlowFlowV2, runEntries []YamlRunEntryV2, templates map[string]YamlRequestDefV2, graphqlTemplates map[string]YamlGraphQLDefV2, opts ConvertOptionsV2) (*ioworkspace.WorkspaceBundle, error) { result := &ioworkspace.WorkspaceBundle{} flowID := idwrap.NewNow() @@ -68,7 +68,7 @@ func processFlow(flowEntry YamlFlowFlowV2, runEntries []YamlRunEntryV2, template startNodeID := idwrap.NewNow() // Process steps - processRes, err := processSteps(flowEntry, templates, varMap, flowID, startNodeID, opts, result) + processRes, err := processSteps(flowEntry, templates, graphqlTemplates, varMap, flowID, startNodeID, opts, result) if err != nil { return nil, fmt.Errorf("failed to process steps: %w", err) } @@ -270,6 +270,10 @@ func mergeFlowData(result *ioworkspace.WorkspaceBundle, flowData *ioworkspace.Wo result.FlowAINodes = append(result.FlowAINodes, flowData.FlowAINodes...) result.FlowAIProviderNodes = append(result.FlowAIProviderNodes, flowData.FlowAIProviderNodes...) result.FlowAIMemoryNodes = append(result.FlowAIMemoryNodes, flowData.FlowAIMemoryNodes...) + + result.GraphQLRequests = append(result.GraphQLRequests, flowData.GraphQLRequests...) + result.GraphQLHeaders = append(result.GraphQLHeaders, flowData.GraphQLHeaders...) + result.FlowGraphQLNodes = append(result.FlowGraphQLNodes, flowData.FlowGraphQLNodes...) } func mergeAssociatedData(result *ioworkspace.WorkspaceBundle, assoc *HTTPAssociatedData) { diff --git a/packages/server/pkg/translate/yamlflowsimplev2/converter_node.go b/packages/server/pkg/translate/yamlflowsimplev2/converter_node.go index 3365000e3..2d5f800c6 100644 --- a/packages/server/pkg/translate/yamlflowsimplev2/converter_node.go +++ b/packages/server/pkg/translate/yamlflowsimplev2/converter_node.go @@ -11,6 +11,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/ioworkspace" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mcondition" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/varsystem" ) @@ -27,7 +28,7 @@ func createStartNodeWithID(nodeID, flowID idwrap.IDWrap, result *ioworkspace.Wor } // processSteps processes all steps in a flow -func processSteps(flowEntry YamlFlowFlowV2, templates map[string]YamlRequestDefV2, varMap varsystem.VarMap, flowID, startNodeID idwrap.IDWrap, opts ConvertOptionsV2, result *ioworkspace.WorkspaceBundle) (*StepProcessingResult, error) { +func processSteps(flowEntry YamlFlowFlowV2, templates map[string]YamlRequestDefV2, graphqlTemplates map[string]YamlGraphQLDefV2, varMap varsystem.VarMap, flowID, startNodeID idwrap.IDWrap, opts ConvertOptionsV2, result *ioworkspace.WorkspaceBundle) (*StepProcessingResult, error) { nodeInfoMap := make(map[string]*nodeInfo) nodeList := make([]*nodeInfo, 0) startNodeFound := false @@ -43,6 +44,9 @@ func processSteps(flowEntry YamlFlowFlowV2, templates map[string]YamlRequestDefV case stepWrapper.Request != nil: nodeName = stepWrapper.Request.Name dependsOn = stepWrapper.Request.DependsOn + case stepWrapper.GraphQL != nil: + nodeName = stepWrapper.GraphQL.Name + dependsOn = stepWrapper.GraphQL.DependsOn case stepWrapper.If != nil: nodeName = stepWrapper.If.Name dependsOn = stepWrapper.If.DependsOn @@ -98,6 +102,10 @@ func processSteps(flowEntry YamlFlowFlowV2, templates map[string]YamlRequestDefV file := createFileForHTTP(*httpReq, opts) result.Files = append(result.Files, file) } + case stepWrapper.GraphQL != nil: + if err := processGraphQLStructStep(stepWrapper.GraphQL, nodeID, flowID, graphqlTemplates, opts, result); err != nil { + return nil, err + } case stepWrapper.If != nil: if stepWrapper.If.Condition == "" { return nil, NewYamlFlowErrorV2("missing required condition", "if", i) @@ -443,3 +451,94 @@ func processAIMemoryStructStep(step *YamlStepAIMemory, nodeID, flowID idwrap.IDW result.FlowAIMemoryNodes = append(result.FlowAIMemoryNodes, memoryNode) return nil } + +func processGraphQLStructStep(step *YamlStepGraphQL, nodeID, flowID idwrap.IDWrap, templates map[string]YamlGraphQLDefV2, opts ConvertOptionsV2, result *ioworkspace.WorkspaceBundle) error { + url := step.URL + query := step.Query + variables := step.Variables + var headers HeaderMapOrSlice + + if step.UseRequest != "" { + if tmpl, ok := templates[step.UseRequest]; ok { + if tmpl.URL != "" { + url = tmpl.URL + } + if tmpl.Query != "" { + query = tmpl.Query + } + if tmpl.Variables != "" { + variables = tmpl.Variables + } + headers = tmpl.Headers + } else { + return NewYamlFlowErrorV2(fmt.Sprintf("graphql step '%s' references unknown template '%s'", step.Name, step.UseRequest), "use_request", step.UseRequest) + } + } + + // Step-level values override template + if step.URL != "" { + url = step.URL + } + if step.Query != "" { + query = step.Query + } + if step.Variables != "" { + variables = step.Variables + } + if len(step.Headers) > 0 { + headers = append(headers, step.Headers...) + } + + if url == "" { + return NewYamlFlowErrorV2(fmt.Sprintf("graphql step '%s' missing required url", step.Name), "url", nil) + } + + gqlID := idwrap.NewNow() + now := time.Now().UnixMilli() + + gqlReq := mgraphql.GraphQL{ + ID: gqlID, + WorkspaceID: opts.WorkspaceID, + FolderID: opts.FolderID, + Name: step.Name, + Url: url, + Query: query, + Variables: variables, + CreatedAt: now, + UpdatedAt: now, + } + result.GraphQLRequests = append(result.GraphQLRequests, gqlReq) + + // Create headers + for i, h := range headers { + header := mgraphql.GraphQLHeader{ + ID: idwrap.NewNow(), + GraphQLID: gqlID, + Key: h.Name, + Value: h.Value, + Enabled: h.Enabled, + DisplayOrder: float32(i), + CreatedAt: now, + UpdatedAt: now, + } + result.GraphQLHeaders = append(result.GraphQLHeaders, header) + } + + // Create flow node + flowNode := mflow.Node{ + ID: nodeID, + FlowID: flowID, + Name: step.Name, + NodeKind: mflow.NODE_KIND_GRAPHQL, + } + result.FlowNodes = append(result.FlowNodes, flowNode) + + // Create GraphQL node linking flow node to GraphQL entity + graphqlNode := mflow.NodeGraphQL{ + FlowNodeID: nodeID, + GraphQLID: &gqlID, + } + result.FlowGraphQLNodes = append(result.FlowGraphQLNodes, graphqlNode) + + return nil +} diff --git a/packages/server/pkg/translate/yamlflowsimplev2/exporter.go b/packages/server/pkg/translate/yamlflowsimplev2/exporter.go index 2aa180d18..045d4bd42 100644 --- a/packages/server/pkg/translate/yamlflowsimplev2/exporter.go +++ b/packages/server/pkg/translate/yamlflowsimplev2/exporter.go @@ -10,6 +10,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/ioworkspace" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mcredential" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" "gopkg.in/yaml.v3" @@ -116,6 +117,21 @@ func MarshalSimplifiedYAML(data *ioworkspace.WorkspaceBundle) ([]byte, error) { aiMemoryNodeMap[n.FlowNodeID] = n } + graphqlNodeMap := make(map[idwrap.IDWrap]mflow.NodeGraphQL) + for _, n := range data.FlowGraphQLNodes { + graphqlNodeMap[n.FlowNodeID] = n + } + + graphqlMap := make(map[idwrap.IDWrap]mgraphql.GraphQL) + for _, g := range data.GraphQLRequests { + graphqlMap[g.ID] = g + } + + graphqlHeadersMap := make(map[idwrap.IDWrap][]mgraphql.GraphQLHeader) + for _, h := range data.GraphQLHeaders { + graphqlHeadersMap[h.GraphQLID] = append(graphqlHeadersMap[h.GraphQLID], h) + } + // Credential Map (ID -> Credential) credentialMap := make(map[idwrap.IDWrap]mcredential.Credential) for _, c := range data.Credentials { @@ -212,6 +228,73 @@ func MarshalSimplifiedYAML(data *ioworkspace.WorkspaceBundle) ([]byte, error) { yamlFormat.Requests = requests } + // 2b. Build top-level graphql_requests section + graphqlIDToRequestName := make(map[idwrap.IDWrap]string) + graphqlNameUsed := make(map[string]bool) + + // First pass: collect all GraphQL requests used in flows + for _, flow := range data.Flows { + for _, n := range data.FlowNodes { + if n.FlowID != flow.ID || n.NodeKind != mflow.NODE_KIND_GRAPHQL { + continue + } + gqlNode, ok := graphqlNodeMap[n.ID] + if !ok || gqlNode.GraphQLID == nil { + continue + } + gqlReq, ok := graphqlMap[*gqlNode.GraphQLID] + if !ok { + continue + } + + if _, exists := graphqlIDToRequestName[gqlReq.ID]; exists { + continue + } + + gqlName := gqlReq.Name + if gqlName == "" { + gqlName = "GraphQL Request" + } + + baseName := gqlName + counter := 1 + for graphqlNameUsed[gqlName] { + gqlName = fmt.Sprintf("%s_%d", baseName, counter) + counter++ + } + graphqlNameUsed[gqlName] = true + graphqlIDToRequestName[gqlReq.ID] = gqlName + } + } + + // Second pass: build the graphql_requests section + var graphqlRequests []YamlGraphQLDefV2 + var graphqlIDs []idwrap.IDWrap + for gqlID := range graphqlIDToRequestName { + graphqlIDs = append(graphqlIDs, gqlID) + } + sort.Slice(graphqlIDs, func(i, j int) bool { + return graphqlIDToRequestName[graphqlIDs[i]] < graphqlIDToRequestName[graphqlIDs[j]] + }) + + for _, gqlID := range graphqlIDs { + gqlName := graphqlIDToRequestName[gqlID] + gqlReq := graphqlMap[gqlID] + + gqlDef := YamlGraphQLDefV2{ + Name: gqlName, + URL: gqlReq.Url, + Query: gqlReq.Query, + Variables: gqlReq.Variables, + Headers: buildGraphQLHeaderMapOrSlice(graphqlHeadersMap[gqlID]), + } + graphqlRequests = append(graphqlRequests, gqlDef) + } + + if len(graphqlRequests) > 0 { + yamlFormat.GraphQLRequests = graphqlRequests + } + // 3. Process each Flow flowNameUsed := make(map[string]bool) for _, flow := range data.Flows { @@ -443,6 +526,30 @@ func MarshalSimplifiedYAML(data *ioworkspace.WorkspaceBundle) ([]byte, error) { } stepWrapper.AIMemory = memoryStep + case mflow.NODE_KIND_GRAPHQL: + gqlNode, ok := graphqlNodeMap[node.ID] + if !ok || gqlNode.GraphQLID == nil { + continue + } + gqlReq, ok := graphqlMap[*gqlNode.GraphQLID] + if !ok { + continue + } + + gqlStep := &YamlStepGraphQL{ + YamlStepCommon: common, + } + + if gqlName, exists := graphqlIDToRequestName[gqlReq.ID]; exists { + gqlStep.UseRequest = gqlName + } else { + gqlStep.URL = gqlReq.Url + gqlStep.Query = gqlReq.Query + gqlStep.Variables = gqlReq.Variables + gqlStep.Headers = buildGraphQLHeaderMapOrSlice(graphqlHeadersMap[gqlReq.ID]) + } + stepWrapper.GraphQL = gqlStep + case mflow.NODE_KIND_MANUAL_START: if node.ID == startNodeID { stepWrapper.ManualStart = &common @@ -454,7 +561,7 @@ func MarshalSimplifiedYAML(data *ioworkspace.WorkspaceBundle) ([]byte, error) { // Add to flow // Because stepWrapper has pointer fields, "empty" fields are nil // Checking if any field is set (simplified check, assume one set if we got here) - isValid := stepWrapper.Request != nil || stepWrapper.If != nil || stepWrapper.For != nil || + isValid := stepWrapper.Request != nil || stepWrapper.GraphQL != nil || stepWrapper.If != nil || stepWrapper.For != nil || stepWrapper.ForEach != nil || stepWrapper.JS != nil || stepWrapper.AI != nil || stepWrapper.AIProvider != nil || stepWrapper.AIMemory != nil || stepWrapper.ManualStart != nil if isValid { @@ -523,6 +630,22 @@ func MarshalSimplifiedYAML(data *ioworkspace.WorkspaceBundle) ([]byte, error) { return yaml.Marshal(yamlFormat) } +func buildGraphQLHeaderMapOrSlice(headers []mgraphql.GraphQLHeader) HeaderMapOrSlice { + if len(headers) == 0 { + return nil + } + var result []YamlNameValuePairV2 + for _, h := range headers { + result = append(result, YamlNameValuePairV2{ + Name: h.Key, + Value: h.Value, + Enabled: h.Enabled, + Description: h.Description, + }) + } + return HeaderMapOrSlice(result) +} + type deltaLookupContext struct { httpMap map[idwrap.IDWrap]mhttp.HTTP headersMap map[idwrap.IDWrap][]mhttp.HTTPHeader diff --git a/packages/server/pkg/translate/yamlflowsimplev2/types.go b/packages/server/pkg/translate/yamlflowsimplev2/types.go index 26e88308a..79357d83b 100644 --- a/packages/server/pkg/translate/yamlflowsimplev2/types.go +++ b/packages/server/pkg/translate/yamlflowsimplev2/types.go @@ -9,6 +9,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/compress" "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" ) // YamlFlowFormatV2 represents the modern YAML structure for simplified workflows @@ -20,6 +21,7 @@ type YamlFlowFormatV2 struct { Run []YamlRunEntryV2 `yaml:"run,omitempty"` RequestTemplates map[string]YamlRequestDefV2 `yaml:"request_templates,omitempty"` Requests []YamlRequestDefV2 `yaml:"requests,omitempty"` + GraphQLRequests []YamlGraphQLDefV2 `yaml:"graphql_requests,omitempty"` Flows []YamlFlowFlowV2 `yaml:"flows"` Environments []YamlEnvironmentV2 `yaml:"environments,omitempty"` } @@ -51,6 +53,15 @@ type YamlRequestDefV2 struct { Description string `yaml:"description,omitempty"` } +// YamlGraphQLDefV2 represents a GraphQL request definition (template or standalone) +type YamlGraphQLDefV2 struct { + Name string `yaml:"name,omitempty"` + URL string `yaml:"url,omitempty"` + Query string `yaml:"query"` + Variables string `yaml:"variables,omitempty"` + Headers HeaderMapOrSlice `yaml:"headers,omitempty"` +} + // YamlFlowFlowV2 represents a flow in the modern YAML format type YamlFlowFlowV2 struct { Name string `yaml:"name"` @@ -64,6 +75,7 @@ type YamlFlowFlowV2 struct { // A step is a map with a single key that identifies the type type YamlStepWrapper struct { Request *YamlStepRequest `yaml:"request,omitempty"` + GraphQL *YamlStepGraphQL `yaml:"graphql,omitempty"` If *YamlStepIf `yaml:"if,omitempty"` For *YamlStepFor `yaml:"for,omitempty"` ForEach *YamlStepForEach `yaml:"for_each,omitempty"` @@ -91,6 +103,15 @@ type YamlStepRequest struct { Assertions AssertionsOrSlice `yaml:"assertions,omitempty"` } +type YamlStepGraphQL struct { + YamlStepCommon `yaml:",inline"` + UseRequest string `yaml:"use_request,omitempty"` + URL string `yaml:"url,omitempty"` + Query string `yaml:"query,omitempty"` + Variables string `yaml:"variables,omitempty"` + Headers HeaderMapOrSlice `yaml:"headers,omitempty"` +} + type YamlStepIf struct { YamlStepCommon `yaml:",inline"` Condition string `yaml:"condition"` @@ -380,6 +401,10 @@ type YamlFlowDataV2 struct { // HTTP request data HTTPRequests []YamlHTTPRequestV2 + // GraphQL request data + GraphQLRequests []mgraphql.GraphQL + GraphQLHeaders []mgraphql.GraphQLHeader + // Flow node implementations RequestNodes []mflow.NodeRequest ConditionNodes []mflow.NodeIf @@ -389,6 +414,7 @@ type YamlFlowDataV2 struct { AINodes []mflow.NodeAI AIProviderNodes []mflow.NodeAiProvider AIMemoryNodes []mflow.NodeMemory + GraphQLNodes []mflow.NodeGraphQL } // YamlVariableV2 represents a variable during parsing diff --git a/packages/server/pkg/translate/yamlflowsimplev2/utils.go b/packages/server/pkg/translate/yamlflowsimplev2/utils.go index f4f9d9880..f9dcaec30 100644 --- a/packages/server/pkg/translate/yamlflowsimplev2/utils.go +++ b/packages/server/pkg/translate/yamlflowsimplev2/utils.go @@ -246,6 +246,17 @@ func ValidateYAMLStructure(yamlFormat *YamlFlowFormatV2) error { } } + // Check for duplicate GraphQL request names + graphqlNames := make(map[string]bool) + for _, gql := range yamlFormat.GraphQLRequests { + if gql.Name != "" { + if graphqlNames[gql.Name] { + return NewYamlFlowErrorV2(fmt.Sprintf("duplicate graphql request name: %s", gql.Name), "graphql_requests", gql.Name) + } + graphqlNames[gql.Name] = true + } + } + // Check for flow dependencies that reference non-existent flows for _, runEntry := range yamlFormat.Run { flowName := runEntry.Flow diff --git a/packages/server/test/e2e_har_to_cli_test.go b/packages/server/test/e2e_har_to_cli_test.go index bc80a209e..35576c196 100644 --- a/packages/server/test/e2e_har_to_cli_test.go +++ b/packages/server/test/e2e_har_to_cli_test.go @@ -27,6 +27,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rimportv2" "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream/memory" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/flowbuilder" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/ngraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nrequest" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/runner" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/runner/flowlocalrunner" @@ -271,6 +272,9 @@ func TestE2E_HAR_To_CLI_Chain(t *testing.T) { nil, // NodeAIService nil, // NodeAiProviderService nil, // NodeMemoryService + nil, // NodeGraphQLService + nil, // GraphQLService + nil, // GraphQLHeaderService cli.Workspace, cli.Variable, cli.FlowVariable, @@ -475,6 +479,16 @@ func executeFlow(ctx context.Context, flowPtr *mflow.Flow, c *cliServices, build }() defer close(requestRespChan) + gqlRespChan := make(chan ngraphql.NodeGraphQLSideResp, requestBufferSize) + go func() { + for resp := range gqlRespChan { + if resp.Done != nil { + close(resp.Done) + } + } + }() + defer close(gqlRespChan) + // Build flow node map flowNodeMap, startNodeID, err := builder.BuildNodes( ctx, @@ -483,6 +497,7 @@ func executeFlow(ctx context.Context, flowPtr *mflow.Flow, c *cliServices, build nodeTimeout, httpClient, requestRespChan, + gqlRespChan, nil, // No JS client needed for this test ) if err != nil { diff --git a/packages/spec/api/file-system.tsp b/packages/spec/api/file-system.tsp index b8d639bef..88fcdb0b7 100644 --- a/packages/spec/api/file-system.tsp +++ b/packages/spec/api/file-system.tsp @@ -8,6 +8,7 @@ enum FileKind { HttpDelta, Flow, Credential, + GraphQL, } @TanStackDB.collection diff --git a/packages/spec/api/flow.tsp b/packages/spec/api/flow.tsp index 5887df404..4068e905b 100644 --- a/packages/spec/api/flow.tsp +++ b/packages/spec/api/flow.tsp @@ -70,6 +70,7 @@ enum NodeKind { Ai, AiProvider, AiMemory, + GraphQL, } enum AiMemoryType { @@ -134,6 +135,13 @@ model NodeHttp { @foreignKey deltaHttpId?: Id; } +@TanStackDB.collection +model NodeGraphQL { + @primaryKey nodeId: Id; + @foreignKey graphqlId: Id; + @foreignKey deltaGraphqlId?: Id; +} + enum ErrorHandling { Ignore, Break, @@ -200,5 +208,6 @@ model NodeExecution { input?: Protobuf.WellKnown.Json; output?: Protobuf.WellKnown.Json; httpResponseId?: Id; + graphqlResponseId?: Id; completedAt?: Protobuf.WellKnown.Timestamp; } diff --git a/packages/spec/api/graphql.tsp b/packages/spec/api/graphql.tsp new file mode 100644 index 000000000..9be323de3 --- /dev/null +++ b/packages/spec/api/graphql.tsp @@ -0,0 +1,91 @@ +using DevTools; + +namespace Api.GraphQL; + +@withDelta +@TanStackDB.collection +model GraphQL { + @primaryKey graphqlId: Id; + name: string; + url: string; + query: string; + variables: string; + lastRunAt?: Protobuf.WellKnown.Timestamp; +} + +@TanStackDB.collection(#{ isReadOnly: true }) +model GraphQLVersion { + @primaryKey graphqlVersionId: Id; + @foreignKey graphqlId: Id; + @foreignKey deltaGraphqlId?: Id; + name: string; + description: string; + createdAt: int64; +} + +@withDelta +@TanStackDB.collection +model GraphQLHeader { + @primaryKey graphqlHeaderId: Id; + ...CommonTableFields; +} + +@withDelta +@TanStackDB.collection +model GraphQLAssert { + @primaryKey graphqlAssertId: Id; + @foreignKey graphqlId: Id; + value: string; + enabled: boolean; + order: float32; +} + +@TanStackDB.collection(#{ isReadOnly: true }) +model GraphQLResponse { + @primaryKey graphqlResponseId: Id; + @foreignKey graphqlId: Id; + status: int32; + body: string; + time: Protobuf.WellKnown.Timestamp; + duration: int32; + size: int32; +} + +@TanStackDB.collection(#{ isReadOnly: true }) +model GraphQLResponseHeader { + @primaryKey graphqlResponseHeaderId: Id; + @foreignKey graphqlResponseId: Id; + key: string; + value: string; +} + +@TanStackDB.collection(#{ isReadOnly: true }) +model GraphQLResponseAssert { + @primaryKey graphqlResponseAssertId: Id; + @foreignKey graphqlResponseId: Id; + value: string; + success: boolean; +} + +model GraphQLRunRequest { + graphqlId: Id; +} + +op GraphQLRun(...GraphQLRunRequest): {}; + +model GraphQLDuplicateRequest { + graphqlId: Id; +} + +op GraphQLDuplicate(...GraphQLDuplicateRequest): {}; + +model GraphQLIntrospectRequest { + graphqlId: Id; +} + +model GraphQLIntrospectResponse { + sdl: string; + introspectionJson: string; +} + +op GraphQLIntrospect(...GraphQLIntrospectRequest): GraphQLIntrospectResponse; diff --git a/packages/spec/api/main.tsp b/packages/spec/api/main.tsp index 89c8ab389..329652cf7 100644 --- a/packages/spec/api/main.tsp +++ b/packages/spec/api/main.tsp @@ -7,6 +7,7 @@ import "./environment.tsp"; import "./export.tsp"; import "./file-system.tsp"; import "./flow.tsp"; +import "./graphql.tsp"; import "./health.tsp"; import "./http.tsp"; import "./import.tsp"; diff --git a/packages/spec/api/reference.tsp b/packages/spec/api/reference.tsp index 2867af993..e3ecec5f1 100644 --- a/packages/spec/api/reference.tsp +++ b/packages/spec/api/reference.tsp @@ -45,6 +45,8 @@ model ReferenceContext { workspaceId?: Id; httpId?: Id; deltaHttpId?: Id; + graphqlId?: Id; + deltaGraphqlId?: Id; flowNodeId?: Id; }