From 3603f98a26abc6cb6bbfc5f847a53ba0a56e265d Mon Sep 17 00:00:00 2001 From: Alisha Date: Mon, 2 Dec 2024 14:27:27 -0800 Subject: [PATCH 01/14] cil sync S3 end to end integration test --- .github/workflows/go.yml | 5 + .../integration-test/integration-test-util.go | 34 ++ .../pkg/integration-test/integration-test.go | 23 ++ .../jobs-service_integration_test.go | 26 +- .../neosync/sync/sync_integration_test.go | 283 +++++++++++-- .../testutil/testcontainers/mysql/mysql.go | 31 ++ .../testcontainers/postgres/postgres.go | 30 ++ .../testdata/gen_jobmappings_config.json | 22 + internal/testutil/testdata/generators.go | 3 + .../testutil/testdata/jobmapping_generator.go | 362 ++++++++++++++++ .../testdata/mysql/alltypes/create-schema.sql | 2 - .../testdata/mysql/alltypes/create-tables.sql | 5 +- .../testdata/mysql/alltypes/job_mappings.go | 263 ++++++++++++ .../testdata/mysql/alltypes/teardown.sql | 1 - .../mysql/humanresources/create-schema.sql | 1 - .../mysql/humanresources/create-tables.sql | 18 +- .../mysql/humanresources/job_mappings.go | 383 +++++++++++++++++ .../mysql/humanresources/teardown.sql | 1 - .../postgres/alltypes/create-schema.sql | 1 - .../postgres/alltypes/create-tables.sql | 45 +- .../postgres/alltypes/job_mappings.go | 391 ++++++++++++++++++ .../testdata/postgres/alltypes/teardown.sql | 1 - .../postgres/humanresources/create-schema.sql | 1 - .../postgres/humanresources/create-tables.sql | 3 - .../postgres/humanresources/job_mappings.go | 311 ++++++++++++++ .../postgres/humanresources/teardown.sql | 1 - internal/testutil/utils.go | 32 ++ .../pkg/integration-test/datasync_workflow.go | 82 ++++ .../postgres/all-types/job_mappings.go | 240 ----------- 29 files changed, 2261 insertions(+), 340 deletions(-) create mode 100644 internal/testutil/testdata/gen_jobmappings_config.json create mode 100644 internal/testutil/testdata/generators.go create mode 100644 internal/testutil/testdata/jobmapping_generator.go delete mode 100644 internal/testutil/testdata/mysql/alltypes/create-schema.sql create mode 100644 internal/testutil/testdata/mysql/alltypes/job_mappings.go delete mode 100644 internal/testutil/testdata/mysql/alltypes/teardown.sql delete mode 100644 internal/testutil/testdata/mysql/humanresources/create-schema.sql create mode 100644 internal/testutil/testdata/mysql/humanresources/job_mappings.go delete mode 100644 internal/testutil/testdata/mysql/humanresources/teardown.sql delete mode 100644 internal/testutil/testdata/postgres/alltypes/create-schema.sql create mode 100644 internal/testutil/testdata/postgres/alltypes/job_mappings.go delete mode 100644 internal/testutil/testdata/postgres/alltypes/teardown.sql delete mode 100644 internal/testutil/testdata/postgres/humanresources/create-schema.sql create mode 100644 internal/testutil/testdata/postgres/humanresources/job_mappings.go delete mode 100644 internal/testutil/testdata/postgres/humanresources/teardown.sql create mode 100644 worker/pkg/integration-test/datasync_workflow.go diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 013844aaed..d9e5fd78db 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -95,6 +95,11 @@ jobs: go test -race -timeout 1800s -coverprofile=integration-coverage.out -covermode=atomic -run TestIntegrationTestSuite ./... env: INTEGRATION_TESTS_ENABLED: 1 + S3_INTEGRATION_TESTS_ENABLED: 1 + TEST_S3_REGION: us-west-2 + TEST_S3_BUCKET: neosync-integration-test + TEST_S3_ACCESS_KEY_ID: ${{ secrets.TEST_S3_ACCESS_KEY_ID }} + TEST_S3_SECRET_ACCESS_KEY: ${{ secrets.TEST_S3_SECRET_ACCESS_KEY }} - name: Upload coverage to Codecov uses: codecov/codecov-action@v5 diff --git a/backend/pkg/integration-test/integration-test-util.go b/backend/pkg/integration-test/integration-test-util.go index 01e50da9f1..ce9d17bb15 100644 --- a/backend/pkg/integration-test/integration-test-util.go +++ b/backend/pkg/integration-test/integration-test-util.go @@ -76,6 +76,40 @@ func CreateMysqlConnection( return resp.Msg.GetConnection() } +func CreateS3Connection( + ctx context.Context, + t *testing.T, + connclient mgmtv1alpha1connect.ConnectionServiceClient, + accountId, name string, + accessKeyId, secretAccessKey string, + bucket string, + region *string, +) *mgmtv1alpha1.Connection { + resp, err := connclient.CreateConnection( + ctx, + connect.NewRequest(&mgmtv1alpha1.CreateConnectionRequest{ + AccountId: accountId, + Name: name, + ConnectionConfig: &mgmtv1alpha1.ConnectionConfig{ + Config: &mgmtv1alpha1.ConnectionConfig_AwsS3Config{ + AwsS3Config: &mgmtv1alpha1.AwsS3ConnectionConfig{ + Bucket: bucket, + PathPrefix: nil, + Region: region, + Endpoint: nil, + Credentials: &mgmtv1alpha1.AwsS3Credentials{ + AccessKeyId: &accessKeyId, + SecretAccessKey: &secretAccessKey, + }, + }, + }, + }, + }), + ) + RequireNoErrResp(t, resp, err) + return resp.Msg.GetConnection() +} + func SetUser(ctx context.Context, t *testing.T, client mgmtv1alpha1connect.UserAccountServiceClient) string { resp, err := client.SetUser(ctx, connect.NewRequest(&mgmtv1alpha1.SetUserRequest{})) RequireNoErrResp(t, resp, err) diff --git a/backend/pkg/integration-test/integration-test.go b/backend/pkg/integration-test/integration-test.go index 6de6851b10..bb97659b6d 100644 --- a/backend/pkg/integration-test/integration-test.go +++ b/backend/pkg/integration-test/integration-test.go @@ -42,6 +42,7 @@ import ( neomigrate "github.com/nucleuscloud/neosync/internal/migrate" promapiv1mock "github.com/nucleuscloud/neosync/internal/mocks/github.com/prometheus/client_golang/api/prometheus/v1" tcpostgres "github.com/nucleuscloud/neosync/internal/testutil/testcontainers/postgres" + "github.com/stretchr/testify/mock" ) var ( @@ -273,6 +274,7 @@ func (s *NeosyncApiTestClient) Setup(ctx context.Context, t *testing.T) error { jobhookService := jobhooks.New( neosyncdb.New(pgcontainer.DB, db_queries.New()), unauthdUserService, + jobhooks.WithEnabled(), ) unauthdJobsService := v1alpha1_jobservice.New( @@ -420,6 +422,27 @@ func (s *NeosyncApiTestClient) Setup(ctx context.Context, t *testing.T) error { return nil } +func (s *NeosyncApiTestClient) MockTemporalForCreateJob(returnId string) { + s.Mocks.TemporalClientManager. + On( + "DoesAccountHaveNamespace", mock.Anything, mock.Anything, mock.Anything, + ). + Return(true, nil). + Once() + s.Mocks.TemporalClientManager. + On( + "GetSyncJobTaskQueue", mock.Anything, mock.Anything, mock.Anything, + ). + Return("sync-job", nil). + Once() + s.Mocks.TemporalClientManager. + On( + "CreateSchedule", mock.Anything, mock.Anything, mock.Anything, mock.Anything, + ). + Return(returnId, nil). + Once() +} + func (s *NeosyncApiTestClient) InitializeTest(ctx context.Context) error { discardLogger := slog.New(slog.NewTextHandler(io.Discard, &slog.HandlerOptions{})) err := neomigrate.Up(ctx, s.Pgcontainer.URL, s.migrationsDir, discardLogger) diff --git a/backend/services/mgmt/v1alpha1/integration_tests/jobs-service_integration_test.go b/backend/services/mgmt/v1alpha1/integration_tests/jobs-service_integration_test.go index 3c98a387de..1b4b2e6d9d 100644 --- a/backend/services/mgmt/v1alpha1/integration_tests/jobs-service_integration_test.go +++ b/backend/services/mgmt/v1alpha1/integration_tests/jobs-service_integration_test.go @@ -9,7 +9,6 @@ import ( "github.com/google/uuid" mgmtv1alpha1 "github.com/nucleuscloud/neosync/backend/gen/go/protos/mgmt/v1alpha1" "github.com/nucleuscloud/neosync/backend/gen/go/protos/mgmt/v1alpha1/mgmtv1alpha1connect" - "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" ) @@ -29,7 +28,7 @@ func (s *IntegrationTestSuite) Test_CreateJob_Ok() { srcconn := s.createPostgresConnection(s.UnauthdClients.Connections, accountId, "source", "test") destconn := s.createPostgresConnection(s.UnauthdClients.Connections, accountId, "dest", "test2") - s.mockTemporalForCreateJob("test-id") + s.MockTemporalForCreateJob("test-id") resp, err := s.UnauthdClients.Jobs.CreateJob(s.ctx, connect.NewRequest(&mgmtv1alpha1.CreateJobRequest{ AccountId: accountId, @@ -59,27 +58,6 @@ func (s *IntegrationTestSuite) Test_CreateJob_Ok() { require.NotNil(s.T(), resp.Msg.GetJob()) } -func (s *IntegrationTestSuite) mockTemporalForCreateJob(returnId string) { - s.Mocks.TemporalClientManager. - On( - "DoesAccountHaveNamespace", mock.Anything, mock.Anything, mock.Anything, - ). - Return(true, nil). - Once() - s.Mocks.TemporalClientManager. - On( - "GetSyncJobTaskQueue", mock.Anything, mock.Anything, mock.Anything, - ). - Return("sync-job", nil). - Once() - s.Mocks.TemporalClientManager. - On( - "CreateSchedule", mock.Anything, mock.Anything, mock.Anything, mock.Anything, - ). - Return(returnId, nil). - Once() -} - func (s *IntegrationTestSuite) Test_JobService_JobHooks() { t := s.T() ctx := s.ctx @@ -131,7 +109,7 @@ func (s *IntegrationTestSuite) Test_JobService_JobHooks() { srcconn := s.createPostgresConnection(s.NeosyncCloudClients.GetConnectionClient(testAuthUserId), accountId, "source", "test") destconn := s.createPostgresConnection(s.NeosyncCloudClients.GetConnectionClient(testAuthUserId), accountId, "dest", "test2") - s.mockTemporalForCreateJob("test-id") + s.MockTemporalForCreateJob("test-id") jobResp, err := client.CreateJob(ctx, connect.NewRequest(&mgmtv1alpha1.CreateJobRequest{ JobName: "cloud-testjob-1", AccountId: accountId, diff --git a/cli/internal/cmds/neosync/sync/sync_integration_test.go b/cli/internal/cmds/neosync/sync/sync_integration_test.go index bad167d6a8..e105c41981 100644 --- a/cli/internal/cmds/neosync/sync/sync_integration_test.go +++ b/cli/internal/cmds/neosync/sync/sync_integration_test.go @@ -2,13 +2,19 @@ package sync_cmd import ( "context" + "fmt" "testing" + "connectrpc.com/connect" + mgmtv1alpha1 "github.com/nucleuscloud/neosync/backend/gen/go/protos/mgmt/v1alpha1" tcneosyncapi "github.com/nucleuscloud/neosync/backend/pkg/integration-test" "github.com/nucleuscloud/neosync/cli/internal/output" "github.com/nucleuscloud/neosync/internal/testutil" tcmysql "github.com/nucleuscloud/neosync/internal/testutil/testcontainers/mysql" tcpostgres "github.com/nucleuscloud/neosync/internal/testutil/testcontainers/postgres" + mysqlalltypes "github.com/nucleuscloud/neosync/internal/testutil/testdata/mysql/alltypes" + pgalltypes "github.com/nucleuscloud/neosync/internal/testutil/testdata/postgres/alltypes" + workertest "github.com/nucleuscloud/neosync/worker/pkg/integration-test" "github.com/stretchr/testify/require" ) @@ -24,36 +30,58 @@ func Test_Sync(t *testing.T) { neosyncApi, err := tcneosyncapi.NewNeosyncApiTestClient(ctx, t, tcneosyncapi.WithMigrationsDirectory(neosyncDbMigrationsPath)) if err != nil { - panic(err) + t.Fatal(err) } connclient := neosyncApi.UnauthdClients.Connections conndataclient := neosyncApi.UnauthdClients.ConnectionData + jobclient := neosyncApi.UnauthdClients.Jobs sqlmanagerclient := tcneosyncapi.NewTestSqlManagerClient() accountId := tcneosyncapi.CreatePersonalAccount(ctx, t, neosyncApi.UnauthdClients.Users) + awsS3Config := testutil.GetTestAwsS3Config() + s3Conn := tcneosyncapi.CreateS3Connection( + ctx, + t, + connclient, + accountId, + "s3-conn", + awsS3Config.AccessKeyId, + awsS3Config.SecretAccessKey, + awsS3Config.Bucket, + &awsS3Config.Region, + ) outputType := output.PlainOutput t.Run("postgres", func(t *testing.T) { t.Parallel() postgres, err := tcpostgres.NewPostgresTestSyncContainer(ctx, []tcpostgres.Option{}, []tcpostgres.Option{}) if err != nil { - panic(err) + t.Fatal(err) } testdataFolder := "../../../../../internal/testutil/testdata/postgres" - err = postgres.Source.RunSqlFiles(ctx, &testdataFolder, []string{"humanresources/create-tables.sql", "alltypes/create-tables.sql"}) - if err != nil { - panic(err) - } - err = postgres.Target.RunSqlFiles(ctx, &testdataFolder, []string{"humanresources/create-schema.sql", "alltypes/create-schema.sql"}) - if err != nil { - panic(err) - } sourceConn := tcneosyncapi.CreatePostgresConnection(ctx, t, neosyncApi.UnauthdClients.Connections, accountId, "postgres-source", postgres.Source.URL) - t.Run("sync", func(t *testing.T) { - discardLogger := testutil.GetTestLogger(t) + t.Run("postgres_sync", func(t *testing.T) { + // can't be run in parallel yet + // right now CLI sync and init schema takes everything in source and copies it to target since there are no job mappings defined by the user + // so it can't be scoped to specific schema + // t.Parallel() + err = postgres.Source.RunCreateStmtsInSchema(ctx, &testdataFolder, []string{"humanresources/create-tables.sql"}, "humanresources") + if err != nil { + t.Fatal(err) + } + err = postgres.Source.RunCreateStmtsInSchema(ctx, &testdataFolder, []string{"alltypes/create-tables.sql"}, "alltypes") + if err != nil { + t.Fatal(err) + } + err = postgres.Target.CreateSchemas(ctx, []string{"humanresources", "alltypes"}) + if err != nil { + t.Fatal(err) + } + + testlogger := testutil.GetTestLogger(t) cmdConfig := &cmdConfig{ Source: &sourceConfig{ ConnectionId: sourceConn.Id, @@ -73,7 +101,7 @@ func Test_Sync(t *testing.T) { connectionclient: connclient, sqlmanagerclient: sqlmanagerclient, ctx: ctx, - logger: discardLogger, + logger: testlogger, cmd: cmdConfig, } err := sync.configureAndRunSync() @@ -90,7 +118,104 @@ func Test_Sync(t *testing.T) { require.NoError(t, err) require.Greater(t, rowCount, 1) - rows = postgres.Target.DB.QueryRow(ctx, "select count(*) from alltypes.all_postgres_types;") + rows = postgres.Target.DB.QueryRow(ctx, "select count(*) from alltypes.all_data_types;") + err = rows.Scan(&rowCount) + require.NoError(t, err) + require.Greater(t, rowCount, 1) + }) + + t.Run("S3_end_to_end", func(t *testing.T) { + t.Parallel() + ok := testutil.ShouldRunS3IntegrationTest() + if !ok { + return + } + + alltypesSchema := "alltypes_s3_pg" + err := postgres.Source.RunCreateStmtsInSchema(ctx, &testdataFolder, []string{"alltypes/create-tables.sql"}, alltypesSchema) + if err != nil { + t.Fatal(err) + } + + err = postgres.Target.RunCreateStmtsInSchema(ctx, &testdataFolder, []string{"alltypes/create-tables.sql"}, alltypesSchema) + if err != nil { + t.Fatal(err) + } + + neosyncApi.MockTemporalForCreateJob("cli-test-sync") + job, err := jobclient.CreateJob(ctx, connect.NewRequest(&mgmtv1alpha1.CreateJobRequest{ + AccountId: accountId, + JobName: "S3 to PG", + Source: &mgmtv1alpha1.JobSource{ + Options: &mgmtv1alpha1.JobSourceOptions{ + Config: &mgmtv1alpha1.JobSourceOptions_Postgres{ + Postgres: &mgmtv1alpha1.PostgresSourceConnectionOptions{ + ConnectionId: sourceConn.Id, + Schemas: []*mgmtv1alpha1.PostgresSourceSchemaOption{}, + SubsetByForeignKeyConstraints: true, + }, + }, + }, + }, + Destinations: []*mgmtv1alpha1.CreateJobDestination{ + { + ConnectionId: s3Conn.Id, + Options: &mgmtv1alpha1.JobDestinationOptions{ + Config: &mgmtv1alpha1.JobDestinationOptions_AwsS3Options{ + AwsS3Options: &mgmtv1alpha1.AwsS3DestinationConnectionOptions{}, + }, + }, + }, + }, + Mappings: pgalltypes.GetDefaultSyncJobMappings(alltypesSchema), + })) + require.NoError(t, err) + + t.Run("Postgres_to_S3", func(t *testing.T) { + env := workertest.ExecuteTestDataSyncWorkflow(t, neosyncApi, nil, job.Msg.GetJob().GetId()) + require.Truef(t, env.IsWorkflowCompleted(), "Workflow did not complete. Test: pg_s3") + err = env.GetWorkflowError() + require.NoError(t, err, "Received Temporal Workflow Error", "testName", "pg_s3") + }) + + t.Run("S3_to_Postgres", func(t *testing.T) { + testlogger := testutil.GetTestLogger(t) + cmdConfig := &cmdConfig{ + Source: &sourceConfig{ + ConnectionId: s3Conn.Id, + ConnectionOpts: &connectionOpts{ + JobId: &job.Msg.Job.Id, + }, + }, + Destination: &sqlDestinationConfig{ + ConnectionUrl: postgres.Target.URL, + Driver: postgresDriver, + InitSchema: false, + TruncateBeforeInsert: true, + TruncateCascade: true, + }, + OutputType: &outputType, + AccountId: &accountId, + } + sync := &clisync{ + connectiondataclient: conndataclient, + connectionclient: connclient, + sqlmanagerclient: sqlmanagerclient, + ctx: ctx, + logger: testlogger, + cmd: cmdConfig, + } + err := sync.configureAndRunSync() + require.NoError(t, err) + }) + + var rowCount int + rows := postgres.Target.DB.QueryRow(ctx, fmt.Sprintf("select count(*) from %s.all_data_types;", alltypesSchema)) + err = rows.Scan(&rowCount) + require.NoError(t, err) + require.Greater(t, rowCount, 1) + + rows = postgres.Target.DB.QueryRow(ctx, fmt.Sprintf("select count(*) from %s.json_data;", alltypesSchema)) err = rows.Scan(&rowCount) require.NoError(t, err) require.Greater(t, rowCount, 1) @@ -99,7 +224,7 @@ func Test_Sync(t *testing.T) { t.Cleanup(func() { err := postgres.TearDown(ctx) if err != nil { - panic(err) + t.Fatal(err) } }) }) @@ -108,22 +233,30 @@ func Test_Sync(t *testing.T) { t.Parallel() mysql, err := tcmysql.NewMysqlTestSyncContainer(ctx, []tcmysql.Option{}, []tcmysql.Option{}) if err != nil { - panic(err) + t.Fatal(err) } testdataFolder := "../../../../../internal/testutil/testdata/mysql" - err = mysql.Source.RunSqlFiles(ctx, &testdataFolder, []string{"humanresources/create-tables.sql", "alltypes/create-tables.sql"}) - if err != nil { - panic(err) - } - err = mysql.Target.RunSqlFiles(ctx, &testdataFolder, []string{"humanresources/create-schema.sql", "alltypes/create-schema.sql"}) - if err != nil { - panic(err) - } sourceConn := tcneosyncapi.CreateMysqlConnection(ctx, t, neosyncApi.UnauthdClients.Connections, accountId, "mysql-source", mysql.Source.URL) - t.Run("sync", func(t *testing.T) { - discardLogger := testutil.GetTestLogger(t) + t.Run("mysql_sync", func(t *testing.T) { + // can't be run in parallel yet + // right now CLI sync and init schema takes everything in source and copies it to target since there are no job mappings defined by the user + // so it can't be scoped to specific schema + // t.Parallel() + err = mysql.Source.RunCreateStmtsInDatabase(ctx, &testdataFolder, []string{"humanresources/create-tables.sql"}, "humanresources") + if err != nil { + t.Fatal(err) + } + err = mysql.Source.RunCreateStmtsInDatabase(ctx, &testdataFolder, []string{"alltypes/create-tables.sql"}, "alltypes") + if err != nil { + t.Fatal(err) + } + err = mysql.Target.CreateDatabases(ctx, []string{"humanresources", "alltypes"}) + if err != nil { + t.Fatal(err) + } + testlogger := testutil.GetTestLogger(t) cmdConfig := &cmdConfig{ Source: &sourceConfig{ ConnectionId: sourceConn.Id, @@ -142,7 +275,7 @@ func Test_Sync(t *testing.T) { connectionclient: connclient, sqlmanagerclient: sqlmanagerclient, ctx: ctx, - logger: discardLogger, + logger: testlogger, cmd: cmdConfig, } err := sync.configureAndRunSync() @@ -165,6 +298,102 @@ func Test_Sync(t *testing.T) { require.Greater(t, rowCount, 1) }) + t.Run("S3_end_to_end", func(t *testing.T) { + t.Parallel() + ok := testutil.ShouldRunS3IntegrationTest() + if !ok { + return + } + + alltypesSchema := "alltypes_s3_mysql" + err := mysql.Source.RunCreateStmtsInDatabase(ctx, &testdataFolder, []string{"alltypes/create-tables.sql"}, alltypesSchema) + if err != nil { + t.Fatal(err) + } + + err = mysql.Target.RunCreateStmtsInDatabase(ctx, &testdataFolder, []string{"alltypes/create-tables.sql"}, alltypesSchema) + if err != nil { + t.Fatal(err) + } + + neosyncApi.MockTemporalForCreateJob("cli-test-sync") + job, err := jobclient.CreateJob(ctx, connect.NewRequest(&mgmtv1alpha1.CreateJobRequest{ + AccountId: accountId, + JobName: "S3 to Mysql", + Source: &mgmtv1alpha1.JobSource{ + Options: &mgmtv1alpha1.JobSourceOptions{ + Config: &mgmtv1alpha1.JobSourceOptions_Mysql{ + Mysql: &mgmtv1alpha1.MysqlSourceConnectionOptions{ + ConnectionId: sourceConn.Id, + Schemas: []*mgmtv1alpha1.MysqlSourceSchemaOption{}, + SubsetByForeignKeyConstraints: true, + }, + }, + }, + }, + Destinations: []*mgmtv1alpha1.CreateJobDestination{ + { + ConnectionId: s3Conn.Id, + Options: &mgmtv1alpha1.JobDestinationOptions{ + Config: &mgmtv1alpha1.JobDestinationOptions_AwsS3Options{ + AwsS3Options: &mgmtv1alpha1.AwsS3DestinationConnectionOptions{}, + }, + }, + }, + }, + Mappings: mysqlalltypes.GetDefaultSyncJobMappings(alltypesSchema), + })) + require.NoError(t, err) + + t.Run("Mysql_to_S3", func(t *testing.T) { + env := workertest.ExecuteTestDataSyncWorkflow(t, neosyncApi, nil, job.Msg.GetJob().GetId()) + require.Truef(t, env.IsWorkflowCompleted(), "Workflow did not complete. Test: mysql_to_s3") + err = env.GetWorkflowError() + require.NoError(t, err, "Received Temporal Workflow Error", "testName", "mysql_to_s3") + }) + + t.Run("S3_to_Mysql", func(t *testing.T) { + testlogger := testutil.GetTestLogger(t) + cmdConfig := &cmdConfig{ + Source: &sourceConfig{ + ConnectionId: s3Conn.Id, + ConnectionOpts: &connectionOpts{ + JobId: &job.Msg.Job.Id, + }, + }, + Destination: &sqlDestinationConfig{ + ConnectionUrl: mysql.Target.URL, + Driver: mysqlDriver, + InitSchema: false, + TruncateBeforeInsert: true, + }, + OutputType: &outputType, + AccountId: &accountId, + } + sync := &clisync{ + connectiondataclient: conndataclient, + connectionclient: connclient, + sqlmanagerclient: sqlmanagerclient, + ctx: ctx, + logger: testlogger, + cmd: cmdConfig, + } + err := sync.configureAndRunSync() + require.NoError(t, err) + }) + + var rowCount int + rows := mysql.Target.DB.QueryRowContext(ctx, fmt.Sprintf("select count(*) from %s.all_data_types;", alltypesSchema)) + err = rows.Scan(&rowCount) + require.NoError(t, err) + require.Greater(t, rowCount, 1) + + rows = mysql.Target.DB.QueryRowContext(ctx, fmt.Sprintf("select count(*) from %s.json_data;", alltypesSchema)) + err = rows.Scan(&rowCount) + require.NoError(t, err) + require.Greater(t, rowCount, 1) + }) + t.Cleanup(func() { err := mysql.TearDown(ctx) if err != nil { diff --git a/internal/testutil/testcontainers/mysql/mysql.go b/internal/testutil/testcontainers/mysql/mysql.go index a02081f3bf..2a75fa44aa 100644 --- a/internal/testutil/testcontainers/mysql/mysql.go +++ b/internal/testutil/testcontainers/mysql/mysql.go @@ -179,3 +179,34 @@ func (m *MysqlTestContainer) RunSqlFiles(ctx context.Context, folder *string, fi } return nil } + +// Creates schema and sets USE to schema before running SQL files +func (m *MysqlTestContainer) RunCreateStmtsInDatabase(ctx context.Context, folder *string, files []string, database string) error { + for _, file := range files { + filePath := file + if folder != nil && *folder != "" { + filePath = fmt.Sprintf("./%s/%s", *folder, file) + } + sqlStr, err := os.ReadFile(filePath) + if err != nil { + return err + } + + setSchemaSql := fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s; \n USE %s; \n", database, database) + _, err = m.DB.ExecContext(ctx, setSchemaSql+string(sqlStr)) + if err != nil { + return fmt.Errorf("unable to exec sql when running postgres sql files: %w", err) + } + } + return nil +} + +func (m *MysqlTestContainer) CreateDatabases(ctx context.Context, schemas []string) error { + for _, schema := range schemas { + _, err := m.DB.ExecContext(ctx, fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s;", schema)) + if err != nil { + return fmt.Errorf("unable to create schema %s: %w", schema, err) + } + } + return nil +} diff --git a/internal/testutil/testcontainers/postgres/postgres.go b/internal/testutil/testcontainers/postgres/postgres.go index 6073c53e94..9450cce382 100644 --- a/internal/testutil/testcontainers/postgres/postgres.go +++ b/internal/testutil/testcontainers/postgres/postgres.go @@ -179,3 +179,33 @@ func (p *PostgresTestContainer) RunSqlFiles(ctx context.Context, folder *string, } return nil } + +// Creates schema and sets search_path to schema before running SQL files +func (p *PostgresTestContainer) RunCreateStmtsInSchema(ctx context.Context, folder *string, files []string, schema string) error { + for _, file := range files { + filePath := file + if folder != nil && *folder != "" { + filePath = fmt.Sprintf("./%s/%s", *folder, file) + } + sqlStr, err := os.ReadFile(filePath) + if err != nil { + return err + } + setSchemaSql := fmt.Sprintf("CREATE SCHEMA IF NOT EXISTS %s; \n SET search_path TO %s; \n", schema, schema) + _, err = p.DB.Exec(ctx, setSchemaSql+string(sqlStr)) + if err != nil { + return fmt.Errorf("unable to exec sql when running postgres sql files: %w", err) + } + } + return nil +} + +func (p *PostgresTestContainer) CreateSchemas(ctx context.Context, schemas []string) error { + for _, schema := range schemas { + _, err := p.DB.Exec(ctx, fmt.Sprintf("CREATE SCHEMA IF NOT EXISTS %s;", schema)) + if err != nil { + return fmt.Errorf("unable to create schema %s: %w", schema, err) + } + } + return nil +} diff --git a/internal/testutil/testdata/gen_jobmappings_config.json b/internal/testutil/testdata/gen_jobmappings_config.json new file mode 100644 index 0000000000..cf61de37f2 --- /dev/null +++ b/internal/testutil/testdata/gen_jobmappings_config.json @@ -0,0 +1,22 @@ +[ + { + "folder":"postgres/alltypes", + "sql_file": "create-tables.sql", + "driver": "postgres" + }, + { + "folder":"postgres/humanresources", + "sql_file": "create-tables.sql", + "driver": "postgres" + }, + { + "folder": "mysql/alltypes", + "sql_file": "create-tables.sql", + "driver": "mysql" + }, + { + "folder": "mysql/humanresources", + "sql_file": "create-tables.sql", + "driver": "mysql" + } +] diff --git a/internal/testutil/testdata/generators.go b/internal/testutil/testdata/generators.go new file mode 100644 index 0000000000..7121dec06b --- /dev/null +++ b/internal/testutil/testdata/generators.go @@ -0,0 +1,3 @@ +package testuitl_testdata + +//go:generate go run jobmapping_generator.go gen_jobmappings_config.json $GOPACKAGE diff --git a/internal/testutil/testdata/jobmapping_generator.go b/internal/testutil/testdata/jobmapping_generator.go new file mode 100644 index 0000000000..c8c96dc4c0 --- /dev/null +++ b/internal/testutil/testdata/jobmapping_generator.go @@ -0,0 +1,362 @@ +package main + +import ( + "bytes" + "encoding/json" + "fmt" + + // "html/template" + "io" + "os" + "regexp" + "slices" + "strings" + "text/template" + + "github.com/antlr4-go/antlr/v4" + parser "github.com/nucleuscloud/go-antlrv4-parser/tsql" + mgmtv1alpha1 "github.com/nucleuscloud/neosync/backend/gen/go/protos/mgmt/v1alpha1" + pg_query "github.com/pganalyze/pg_query_go/v5" +) + +type Input struct { + Folder string `json:"folder"` + SqlFile string `json:"sql_file"` + Driver string `json:"driver"` +} + +type Column struct { + Name string + TypeStr string +} + +type Table struct { + Name string + Columns []*Column +} + +type JobMapping struct { + Table string + Column string + Transformer string + Config string +} + +func parsePostegresStatements(sql string) ([]*Table, error) { + tree, err := pg_query.Parse(sql) + if err != nil { + return nil, err + } + + tables := []*Table{} + for _, stmt := range tree.GetStmts() { + s := stmt.GetStmt() + switch s.Node.(type) { + case *pg_query.Node_CreateStmt: + table := s.GetCreateStmt().GetRelation().GetRelname() + columns := []*Column{} + for _, col := range s.GetCreateStmt().GetTableElts() { + if col.GetColumnDef() != nil { + columns = append(columns, &Column{ + Name: col.GetColumnDef().Colname, + }) + } + } + tables = append(tables, &Table{ + Name: table, + Columns: columns, + }) + } + } + return tables, nil +} + +// todo fix very brittle +func parseSQLStatements(sql string) []*Table { + lines := strings.Split(sql, "\n") + tableColumnsMap := make(map[string][]string) + var currentTable string + + reCreateTable := regexp.MustCompile(`CREATE\s+TABLE\s+IF\s+NOT\s+EXISTS\s+(\w+)\s*\.\s*(\w+)\s*\(`) + reCreateTableNoSchema := regexp.MustCompile(`CREATE\s+TABLE\s+IF\s+NOT\s+EXISTS\s+(\w+)\s*\(`) + reColumn := regexp.MustCompile(`^\s*([\w]+)\s+[\w\(\)]+.*`) + + for _, line := range lines { + line = strings.TrimSpace(line) + if matches := reCreateTable.FindStringSubmatch(line); len(matches) > 2 { + currentTable = matches[2] + } else if matches := reCreateTableNoSchema.FindStringSubmatch(line); len(matches) > 1 { + currentTable = matches[1] + } else if currentTable != "" { + if matches := reColumn.FindStringSubmatch(line); len(matches) > 1 { + columnName := matches[1] + if slices.Contains([]string{"primary key", "constraint", "key", "unique", "primary", "alter"}, strings.ToLower(matches[1])) { + continue + } + tableColumnsMap[currentTable] = append(tableColumnsMap[currentTable], columnName) + } else if strings.HasPrefix(line, "PRIMARY KEY") || strings.HasPrefix(line, "CONSTRAINT") || strings.HasPrefix(line, "UNIQUE") || strings.HasPrefix(line, "KEY") || strings.HasPrefix(line, "ENGINE") || strings.HasPrefix(line, ")") { + // Ignore key constraints and end of table definition + if strings.HasPrefix(line, ")") { + currentTable = "" + } + } + } + } + res := []*Table{} + for table, cols := range tableColumnsMap { + tableCols := []*Column{} + for _, c := range cols { + tableCols = append(tableCols, &Column{ + Name: c, + }) + } + res = append(res, &Table{ + Name: table, + Columns: tableCols, + }) + } + + return res +} + +func generateJobMapping(tables []*Table) []*mgmtv1alpha1.JobMapping { + mappings := []*mgmtv1alpha1.JobMapping{} + for _, t := range tables { + for _, c := range t.Columns { + mappings = append(mappings, &mgmtv1alpha1.JobMapping{ + Table: t.Name, + Column: c.Name, + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }) + + } + } + return mappings +} + +type TemplateData struct { + SourceFile string + PackageName string + Mappings []*mgmtv1alpha1.JobMapping + Tables []*Table + GenerateTypeMap bool +} + +func formatJobMappings(pkgName string, sqlFile string, mappings []*mgmtv1alpha1.JobMapping, tables []*Table, generateTypeMap bool) (string, error) { + const tmpl = ` +// Code generated by Neosync jobmapping_generator. DO NOT EDIT. +// source: {{ .SourceFile }} + +package {{ .PackageName }} + +import ( + mgmtv1alpha1 "github.com/nucleuscloud/neosync/backend/gen/go/protos/mgmt/v1alpha1" +) + +func GetDefaultSyncJobMappings(schema string)[]*mgmtv1alpha1.JobMapping { + return []*mgmtv1alpha1.JobMapping{ + {{- range .Mappings }} + { + Schema: schema, + Table: "{{ .Table }}", + Column: "{{ .Column }}", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + {{- end }} + } +} +{{ if .GenerateTypeMap }} + + +func GetTableColumnTypeMap() map[string]map[string]string { + return map[string]map[string]string{ + {{- range .Tables }} + "{{ .Name }}": { + {{- range .Columns }} + "{{ .Name }}": "{{ .TypeStr }}", + {{- end }} + }, + {{- end }} + } +} +{{- end }} +` + data := TemplateData{ + SourceFile: sqlFile, + PackageName: pkgName, + Mappings: mappings, + Tables: tables, + GenerateTypeMap: generateTypeMap, + } + t := template.Must(template.New("jobmappings").Parse(tmpl)) + var out bytes.Buffer + err := t.Execute(&out, data) + if err != nil { + return "", err + } + return out.String(), nil +} + +func main() { + args := os.Args + if len(args) < 3 { + panic("must provide necessary args") + } + + configFile := args[1] + gopackage := args[2] + + packageSplit := strings.Split(gopackage, "_") + goPkg := packageSplit[len(packageSplit)-1] + + jsonFile, err := os.Open(configFile) + if err != nil { + fmt.Println("failed to open file: %s", err) + return + } + defer jsonFile.Close() + + byteValue, err := io.ReadAll(jsonFile) + if err != nil { + fmt.Println("failed to read file: %s", err) + return + } + + var inputs []Input + if err := json.Unmarshal(byteValue, &inputs); err != nil { + fmt.Println("failed to unmarshal JSON: %s", err) + return + } + for _, input := range inputs { + folderSplit := strings.Split(input.Folder, "/") + var goPkgName string + if len(folderSplit) == 1 { + goPkgName = strings.ReplaceAll(fmt.Sprintf("%s_%s", goPkg, input.Folder), "-", "") + } else if len(folderSplit) > 1 { + lastTwo := folderSplit[len(folderSplit)-2:] + goPkgName = strings.ReplaceAll(strings.Join(lastTwo, "_"), "-", "") + } + sqlFile, err := os.Open(fmt.Sprintf("%s/%s", input.Folder, input.SqlFile)) + if err != nil { + fmt.Println("failed to open file: %s", err) + } + + byteValue, err := io.ReadAll(sqlFile) + if err != nil { + fmt.Println("failed to read file: %s", err) + } + + sqlContent := string(byteValue) + sqlFile.Close() + + var tables []*Table + if input.Driver == "postgres" { + t, err := parsePostegresStatements(sqlContent) + if err != nil { + fmt.Println("Error parsing postgres SQL schema:", err) + return + } + tables = t + } else if input.Driver == "mysql" { + t := parseSQLStatements(sqlContent) + tables = t + } else if input.Driver == "sqlserver" { + t := parseTsql(sqlContent) + tables = t + } + + jobMapping := generateJobMapping(tables) + + formattedJobMappings, err := formatJobMappings(goPkgName, input.SqlFile, jobMapping, tables, input.Driver == "sqlserver") + if err != nil { + fmt.Println("Error formatting job mappings:", err) + return + } + + output := fmt.Sprintf("%s/job_mappings.go", input.Folder) + outputFile, err := os.Create(output) + if err != nil { + fmt.Println("Error creating jobmapping.go file:", err) + return + } + + _, err = outputFile.WriteString(formattedJobMappings) + if err != nil { + fmt.Println("Error writing to jobmapping.go file:", err) + return + } + outputFile.Close() + } + + return +} + +type tsqlListener struct { + *parser.BaseTSqlParserListener + inCreate bool + currentTable string + currentCols []*Column + mappings []*Table +} + +func (l *tsqlListener) PushTable() { + l.mappings = append(l.mappings, &Table{ + Name: l.currentTable, + Columns: l.currentCols, + }) + l.currentTable = "" + l.currentCols = []*Column{} + l.inCreate = false +} + +func (l *tsqlListener) PushColumn(name, typeStr string) { + l.currentCols = append(l.currentCols, &Column{ + Name: name, + TypeStr: typeStr, + }) +} + +func (l *tsqlListener) SetTable(schemaTable string) { + split := strings.Split(schemaTable, ".") + if len(split) == 1 { + l.currentTable = split[0] + } else if len(split) > 1 { + l.currentTable = split[1] + } +} + +// EnterCreate_table is called when production create_table is entered. +func (l *tsqlListener) EnterCreate_table(ctx *parser.Create_tableContext) { + l.inCreate = true + table := ctx.Table_name().GetText() + l.SetTable(table) +} + +// ExitCreate_table is called when production create_table is exited. +func (l *tsqlListener) ExitCreate_table(ctx *parser.Create_tableContext) { + l.PushTable() +} +func (l *tsqlListener) EnterColumn_definition(ctx *parser.Column_definitionContext) { + l.PushColumn(ctx.Id_().GetText(), ctx.Data_type().GetText()) +} + +func parseTsql(sql string) []*Table { + inputStream := antlr.NewInputStream(sql) + + // create the lexer + lexer := parser.NewTSqlLexer(inputStream) + tokens := antlr.NewCommonTokenStream(lexer, antlr.TokenDefaultChannel) + + // create the parser + p := parser.NewTSqlParser(tokens) + + listener := &tsqlListener{} + tree := p.Tsql_file() + antlr.ParseTreeWalkerDefault.Walk(listener, tree) + + return listener.mappings +} diff --git a/internal/testutil/testdata/mysql/alltypes/create-schema.sql b/internal/testutil/testdata/mysql/alltypes/create-schema.sql deleted file mode 100644 index 7fdccc4d07..0000000000 --- a/internal/testutil/testdata/mysql/alltypes/create-schema.sql +++ /dev/null @@ -1,2 +0,0 @@ -CREATE DATABASE IF NOT EXISTS alltypes; - diff --git a/internal/testutil/testdata/mysql/alltypes/create-tables.sql b/internal/testutil/testdata/mysql/alltypes/create-tables.sql index ecbdfdae22..024a32152e 100644 --- a/internal/testutil/testdata/mysql/alltypes/create-tables.sql +++ b/internal/testutil/testdata/mysql/alltypes/create-tables.sql @@ -1,6 +1,3 @@ -CREATE DATABASE IF NOT EXISTS alltypes; - -USE alltypes; CREATE TABLE IF NOT EXISTS all_data_types ( -- Auto-incrementing primary key id INT AUTO_INCREMENT PRIMARY KEY, @@ -76,7 +73,7 @@ INSERT INTO all_data_types ( json_col, set_as_array ) VALUES ( - 127, 32767, 8388607, 2147483647, 9223372036854775807, + 127, 32767, 8388607, 2147483647, 922337203685477580, 1234.56, 3.1415, 3.14159265359, b'10101010', '2023-09-12', '14:30:00', '2023-09-12 14:30:00', 2023, diff --git a/internal/testutil/testdata/mysql/alltypes/job_mappings.go b/internal/testutil/testdata/mysql/alltypes/job_mappings.go new file mode 100644 index 0000000000..0143ff7402 --- /dev/null +++ b/internal/testutil/testdata/mysql/alltypes/job_mappings.go @@ -0,0 +1,263 @@ + +// Code generated by Neosync jobmapping_generator. DO NOT EDIT. +// source: create-tables.sql + +package mysql_alltypes + +import ( + mgmtv1alpha1 "github.com/nucleuscloud/neosync/backend/gen/go/protos/mgmt/v1alpha1" +) + +func GetDefaultSyncJobMappings(schema string)[]*mgmtv1alpha1.JobMapping { + return []*mgmtv1alpha1.JobMapping{ + { + Schema: schema, + Table: "all_data_types", + Column: "id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "tinyint_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "smallint_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "mediumint_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "int_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "bigint_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "decimal_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "float_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "double_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "bit_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "date_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "time_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "datetime_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "timestamp_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "year_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "char_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "varchar_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "binary_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "varbinary_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "tinyblob_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "tinytext_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "blob_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "text_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "mediumblob_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "mediumtext_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "longblob_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "longtext_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "enum_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "set_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "json_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "set_as_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + } +} + diff --git a/internal/testutil/testdata/mysql/alltypes/teardown.sql b/internal/testutil/testdata/mysql/alltypes/teardown.sql deleted file mode 100644 index 0ce3909abc..0000000000 --- a/internal/testutil/testdata/mysql/alltypes/teardown.sql +++ /dev/null @@ -1 +0,0 @@ -DROP DATABASE IF EXISTS alltypes; diff --git a/internal/testutil/testdata/mysql/humanresources/create-schema.sql b/internal/testutil/testdata/mysql/humanresources/create-schema.sql deleted file mode 100644 index 8f0d1c740e..0000000000 --- a/internal/testutil/testdata/mysql/humanresources/create-schema.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE DATABASE IF NOT EXISTS humanresources; diff --git a/internal/testutil/testdata/mysql/humanresources/create-tables.sql b/internal/testutil/testdata/mysql/humanresources/create-tables.sql index 9664def324..b7d57f2ac5 100644 --- a/internal/testutil/testdata/mysql/humanresources/create-tables.sql +++ b/internal/testutil/testdata/mysql/humanresources/create-tables.sql @@ -1,18 +1,16 @@ -CREATE DATABASE IF NOT EXISTS humanresources; -USE humanresources; -CREATE TABLE regions ( +CREATE TABLE IF NOT EXISTS regions ( region_id INT (11) AUTO_INCREMENT PRIMARY KEY, region_name VARCHAR (25) DEFAULT NULL ); -CREATE TABLE countries ( +CREATE TABLE IF NOT EXISTS countries ( country_id CHAR (2) PRIMARY KEY, country_name VARCHAR (40) DEFAULT NULL, region_id INT (11) NOT NULL, FOREIGN KEY (region_id) REFERENCES regions (region_id) ON DELETE CASCADE ON UPDATE CASCADE ); -CREATE TABLE locations ( +CREATE TABLE IF NOT EXISTS locations ( location_id INT (11) AUTO_INCREMENT PRIMARY KEY, street_address VARCHAR (40) DEFAULT NULL, postal_code VARCHAR (12) DEFAULT NULL, @@ -22,21 +20,21 @@ CREATE TABLE locations ( FOREIGN KEY (country_id) REFERENCES countries (country_id) ON DELETE CASCADE ON UPDATE CASCADE ); -CREATE TABLE jobs ( +CREATE TABLE IF NOT EXISTS jobs ( job_id INT (11) AUTO_INCREMENT PRIMARY KEY, job_title VARCHAR (35) NOT NULL, min_salary DECIMAL (8, 2) DEFAULT NULL, max_salary DECIMAL (8, 2) DEFAULT NULL ); -CREATE TABLE departments ( +CREATE TABLE IF NOT EXISTS departments ( department_id INT (11) AUTO_INCREMENT PRIMARY KEY, department_name VARCHAR (30) NOT NULL, location_id INT (11) DEFAULT NULL, FOREIGN KEY (location_id) REFERENCES locations (location_id) ON DELETE CASCADE ON UPDATE CASCADE ); -CREATE TABLE employees ( +CREATE TABLE IF NOT EXISTS employees ( employee_id INT (11) AUTO_INCREMENT PRIMARY KEY, first_name VARCHAR (20) DEFAULT NULL, last_name VARCHAR (25) NOT NULL, @@ -52,7 +50,7 @@ CREATE TABLE employees ( FOREIGN KEY (manager_id) REFERENCES employees (employee_id) ); -CREATE TABLE dependents ( +CREATE TABLE IF NOT EXISTS dependents ( dependent_id INT (11) AUTO_INCREMENT PRIMARY KEY, first_name VARCHAR (50) NOT NULL, last_name VARCHAR (50) NOT NULL, @@ -225,7 +223,7 @@ INSERT INTO dependents(dependent_id,first_name,last_name,relationship,employee_i -- table with generated columns -CREATE TABLE generated_table ( +CREATE TABLE IF NOT EXISTS generated_table ( -- Auto Incremented column id INT AUTO_INCREMENT PRIMARY KEY, price DECIMAL(10,2) NOT NULL, diff --git a/internal/testutil/testdata/mysql/humanresources/job_mappings.go b/internal/testutil/testdata/mysql/humanresources/job_mappings.go new file mode 100644 index 0000000000..d7acecfe6b --- /dev/null +++ b/internal/testutil/testdata/mysql/humanresources/job_mappings.go @@ -0,0 +1,383 @@ + +// Code generated by Neosync jobmapping_generator. DO NOT EDIT. +// source: create-tables.sql + +package mysql_humanresources + +import ( + mgmtv1alpha1 "github.com/nucleuscloud/neosync/backend/gen/go/protos/mgmt/v1alpha1" +) + +func GetDefaultSyncJobMappings(schema string)[]*mgmtv1alpha1.JobMapping { + return []*mgmtv1alpha1.JobMapping{ + { + Schema: schema, + Table: "dependents", + Column: "dependent_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "dependents", + Column: "first_name", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "dependents", + Column: "last_name", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "dependents", + Column: "relationship", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "dependents", + Column: "employee_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "dependents", + Column: "FOREIGN", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "generated_table", + Column: "id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "generated_table", + Column: "price", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "generated_table", + Column: "quantity", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "generated_table", + Column: "discount_percent", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "generated_table", + Column: "total_value", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "generated_table", + Column: "discounted_price", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "regions", + Column: "region_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "regions", + Column: "region_name", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "countries", + Column: "country_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "countries", + Column: "country_name", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "countries", + Column: "region_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "countries", + Column: "FOREIGN", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "locations", + Column: "location_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "locations", + Column: "street_address", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "locations", + Column: "postal_code", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "locations", + Column: "city", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "locations", + Column: "state_province", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "locations", + Column: "country_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "locations", + Column: "FOREIGN", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "jobs", + Column: "job_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "jobs", + Column: "job_title", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "jobs", + Column: "min_salary", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "jobs", + Column: "max_salary", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "departments", + Column: "department_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "departments", + Column: "department_name", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "departments", + Column: "location_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "departments", + Column: "FOREIGN", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "employee_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "first_name", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "last_name", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "email", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "phone_number", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "hire_date", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "job_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "salary", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "manager_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "department_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "FOREIGN", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "FOREIGN", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "FOREIGN", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + } +} + diff --git a/internal/testutil/testdata/mysql/humanresources/teardown.sql b/internal/testutil/testdata/mysql/humanresources/teardown.sql deleted file mode 100644 index 7db214c127..0000000000 --- a/internal/testutil/testdata/mysql/humanresources/teardown.sql +++ /dev/null @@ -1 +0,0 @@ -DROP DATABASE IF EXISTS humanresources; diff --git a/internal/testutil/testdata/postgres/alltypes/create-schema.sql b/internal/testutil/testdata/postgres/alltypes/create-schema.sql deleted file mode 100644 index 5ac3f363a9..0000000000 --- a/internal/testutil/testdata/postgres/alltypes/create-schema.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE SCHEMA IF NOT EXISTS alltypes; diff --git a/internal/testutil/testdata/postgres/alltypes/create-tables.sql b/internal/testutil/testdata/postgres/alltypes/create-tables.sql index 0c114bd4f1..5b406e108e 100644 --- a/internal/testutil/testdata/postgres/alltypes/create-tables.sql +++ b/internal/testutil/testdata/postgres/alltypes/create-tables.sql @@ -1,5 +1,4 @@ -CREATE SCHEMA IF NOT EXISTS alltypes; -CREATE TABLE IF NOT EXISTS alltypes.all_postgres_types ( +CREATE TABLE IF NOT EXISTS all_data_types ( id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, -- Numeric Types smallint_col SMALLINT, @@ -82,7 +81,7 @@ CREATE TABLE IF NOT EXISTS alltypes.all_postgres_types ( ); -INSERT INTO alltypes.all_postgres_types ( +INSERT INTO all_data_types ( Id, smallint_col, integer_col, @@ -135,7 +134,7 @@ INSERT INTO alltypes.all_postgres_types ( DEFAULT, 32767, -- smallint_col 2147483647, -- integer_col - 9223372036854775807, -- bigint_col + 922337203685477580, -- bigint_col 1234.56, -- decimal_col 99999999.99, -- numeric_col 12345.67, -- real_col @@ -182,21 +181,21 @@ INSERT INTO alltypes.all_postgres_types ( 123456 -- oid_col ); -INSERT INTO alltypes.all_postgres_types ( +INSERT INTO all_data_types ( Id ) VALUES ( DEFAULT ); --- CREATE TABLE IF NOT EXISTS alltypes.time_time ( +-- CREATE TABLE IF NOT EXISTS time_time ( -- id SERIAL PRIMARY KEY, -- timestamp_col TIMESTAMP, -- timestamptz_col TIMESTAMPTZ, -- date_col DATE -- ); --- INSERT INTO alltypes.time_time ( +-- INSERT INTO time_time ( -- timestamp_col, -- timestamptz_col, -- date_col @@ -207,7 +206,7 @@ INSERT INTO alltypes.all_postgres_types ( -- '2024-03-18' -- ); --- INSERT INTO alltypes.time_time ( +-- INSERT INTO time_time ( -- timestamp_col, -- timestamptz_col, -- date_col @@ -219,7 +218,7 @@ INSERT INTO alltypes.all_postgres_types ( -- ); --- CREATE TABLE IF NOT EXISTS alltypes.array_types ( +-- CREATE TABLE IF NOT EXISTS array_types ( -- "id" BIGINT NOT NULL PRIMARY KEY, -- "int_array" _int4, -- "smallint_array" _int2, @@ -256,7 +255,7 @@ INSERT INTO alltypes.all_postgres_types ( -- ); --- INSERT INTO alltypes.array_types ( +-- INSERT INTO array_types ( -- id, int_array, smallint_array, bigint_array, real_array, double_array, -- text_array, varchar_array, char_array, boolean_array, date_array, -- time_array, timestamp_array, timestamptz_array, interval_array, @@ -305,28 +304,28 @@ INSERT INTO alltypes.all_postgres_types ( -- ); -CREATE TABLE alltypes.json_data ( +CREATE TABLE json_data ( id SERIAL PRIMARY KEY, data JSONB ); -INSERT INTO alltypes.json_data (data) VALUES ('"Hello, world!"'); -INSERT INTO alltypes.json_data (data) VALUES ('42'); -INSERT INTO alltypes.json_data (data) VALUES ('3.14'); -INSERT INTO alltypes.json_data (data) VALUES ('true'); -INSERT INTO alltypes.json_data (data) VALUES ('false'); -INSERT INTO alltypes.json_data (data) VALUES ('null'); +INSERT INTO json_data (data) VALUES ('"Hello, world!"'); +INSERT INTO json_data (data) VALUES ('42'); +INSERT INTO json_data (data) VALUES ('3.14'); +INSERT INTO json_data (data) VALUES ('true'); +INSERT INTO json_data (data) VALUES ('false'); +INSERT INTO json_data (data) VALUES ('null'); -INSERT INTO alltypes.json_data (data) VALUES ('{"name": "John", "age": 30}'); -INSERT INTO alltypes.json_data (data) VALUES ('{"coords": {"x": 10, "y": 20}}'); +INSERT INTO json_data (data) VALUES ('{"name": "John", "age": 30}'); +INSERT INTO json_data (data) VALUES ('{"coords": {"x": 10, "y": 20}}'); -INSERT INTO alltypes.json_data (data) VALUES ('[1, 2, 3, 4]'); -INSERT INTO alltypes.json_data (data) VALUES ('["apple", "banana", "cherry"]'); +INSERT INTO json_data (data) VALUES ('[1, 2, 3, 4]'); +INSERT INTO json_data (data) VALUES ('["apple", "banana", "cherry"]'); -INSERT INTO alltypes.json_data (data) VALUES ('{"items": ["book", "pen"], "count": 2, "in_stock": true}'); +INSERT INTO json_data (data) VALUES ('{"items": ["book", "pen"], "count": 2, "in_stock": true}'); -INSERT INTO alltypes.json_data (data) VALUES ( +INSERT INTO json_data (data) VALUES ( '{ "user": { "name": "Alice", diff --git a/internal/testutil/testdata/postgres/alltypes/job_mappings.go b/internal/testutil/testdata/postgres/alltypes/job_mappings.go new file mode 100644 index 0000000000..c4d72bb77a --- /dev/null +++ b/internal/testutil/testdata/postgres/alltypes/job_mappings.go @@ -0,0 +1,391 @@ + +// Code generated by Neosync jobmapping_generator. DO NOT EDIT. +// source: create-tables.sql + +package postgres_alltypes + +import ( + mgmtv1alpha1 "github.com/nucleuscloud/neosync/backend/gen/go/protos/mgmt/v1alpha1" +) + +func GetDefaultSyncJobMappings(schema string)[]*mgmtv1alpha1.JobMapping { + return []*mgmtv1alpha1.JobMapping{ + { + Schema: schema, + Table: "all_data_types", + Column: "id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "smallint_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "integer_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "bigint_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "decimal_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "numeric_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "real_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "double_precision_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "serial_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "bigserial_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "money_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "char_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "varchar_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "text_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "bytea_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "timestamp_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "timestamptz_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "date_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "boolean_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "uuid_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "inet_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "cidr_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "macaddr_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "bit_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "varbit_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "point_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "line_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "lseg_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "box_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "path_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "polygon_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "circle_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "json_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "jsonb_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "int4range_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "int8range_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "numrange_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "tsrange_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "tstzrange_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "daterange_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "integer_array_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "text_array_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "xml_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "tsvector_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "all_data_types", + Column: "oid_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "json_data", + Column: "id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "json_data", + Column: "data", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + } +} + diff --git a/internal/testutil/testdata/postgres/alltypes/teardown.sql b/internal/testutil/testdata/postgres/alltypes/teardown.sql deleted file mode 100644 index 7313dd27c3..0000000000 --- a/internal/testutil/testdata/postgres/alltypes/teardown.sql +++ /dev/null @@ -1 +0,0 @@ -DROP SCHEMA IF EXISTS alltypes CASCADE; diff --git a/internal/testutil/testdata/postgres/humanresources/create-schema.sql b/internal/testutil/testdata/postgres/humanresources/create-schema.sql deleted file mode 100644 index 9112cc24aa..0000000000 --- a/internal/testutil/testdata/postgres/humanresources/create-schema.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE SCHEMA IF NOT EXISTS humanresources; diff --git a/internal/testutil/testdata/postgres/humanresources/create-tables.sql b/internal/testutil/testdata/postgres/humanresources/create-tables.sql index 56276c8ca0..5ca813ab81 100644 --- a/internal/testutil/testdata/postgres/humanresources/create-tables.sql +++ b/internal/testutil/testdata/postgres/humanresources/create-tables.sql @@ -1,6 +1,3 @@ -CREATE SCHEMA IF NOT EXISTS humanresources; -SET search_path TO humanresources; - CREATE TABLE regions ( region_id SERIAL PRIMARY KEY, region_name CHARACTER VARYING (25) diff --git a/internal/testutil/testdata/postgres/humanresources/job_mappings.go b/internal/testutil/testdata/postgres/humanresources/job_mappings.go new file mode 100644 index 0000000000..cbbd894aa0 --- /dev/null +++ b/internal/testutil/testdata/postgres/humanresources/job_mappings.go @@ -0,0 +1,311 @@ + +// Code generated by Neosync jobmapping_generator. DO NOT EDIT. +// source: create-tables.sql + +package postgres_humanresources + +import ( + mgmtv1alpha1 "github.com/nucleuscloud/neosync/backend/gen/go/protos/mgmt/v1alpha1" +) + +func GetDefaultSyncJobMappings(schema string)[]*mgmtv1alpha1.JobMapping { + return []*mgmtv1alpha1.JobMapping{ + { + Schema: schema, + Table: "regions", + Column: "region_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "regions", + Column: "region_name", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "countries", + Column: "country_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "countries", + Column: "country_name", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "countries", + Column: "region_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "locations", + Column: "location_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "locations", + Column: "street_address", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "locations", + Column: "postal_code", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "locations", + Column: "city", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "locations", + Column: "state_province", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "locations", + Column: "country_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "departments", + Column: "department_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "departments", + Column: "department_name", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "departments", + Column: "location_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "jobs", + Column: "job_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "jobs", + Column: "job_title", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "jobs", + Column: "min_salary", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "jobs", + Column: "max_salary", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "employee_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "first_name", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "last_name", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "email", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "phone_number", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "hire_date", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "job_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "salary", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "manager_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "employees", + Column: "department_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "dependents", + Column: "dependent_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "dependents", + Column: "first_name", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "dependents", + Column: "last_name", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "dependents", + Column: "relationship", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "dependents", + Column: "employee_id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "generated_table", + Column: "id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "generated_table", + Column: "amount", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "generated_table", + Column: "status", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "generated_table", + Column: "amount_with_tax", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + } +} + diff --git a/internal/testutil/testdata/postgres/humanresources/teardown.sql b/internal/testutil/testdata/postgres/humanresources/teardown.sql deleted file mode 100644 index dc2f4c304e..0000000000 --- a/internal/testutil/testdata/postgres/humanresources/teardown.sql +++ /dev/null @@ -1 +0,0 @@ -DROP SCHEMA IF EXISTS humanresources CASCADE; diff --git a/internal/testutil/utils.go b/internal/testutil/utils.go index 3225004a23..6db38ebf56 100644 --- a/internal/testutil/utils.go +++ b/internal/testutil/utils.go @@ -18,6 +18,32 @@ func ShouldRunIntegrationTest() bool { return true } +func ShouldRunS3IntegrationTest() bool { + evkey := "S3_INTEGRATION_TESTS_ENABLED" + shouldRun := os.Getenv(evkey) + if shouldRun != "1" { + slog.Warn(fmt.Sprintf("skipping S3 integration tests, set %s=1 to enable", evkey)) + return false + } + return true +} + +type AwsS3Config struct { + Bucket string + Region string + AccessKeyId string + SecretAccessKey string +} + +func GetTestAwsS3Config() *AwsS3Config { + return &AwsS3Config{ + Region: os.Getenv("TEST_S3_REGION"), + Bucket: os.Getenv("TEST_S3_BUCKET"), + AccessKeyId: os.Getenv("TEST_S3_ACCESS_KEY_ID"), + SecretAccessKey: os.Getenv("TEST_S3_SECRET_ACCESS_KEY"), + } +} + func GetTestLogger(t *testing.T) *slog.Logger { testHandler := slog.NewTextHandler(testWriter{t}, &slog.HandlerOptions{ Level: slog.LevelDebug, @@ -35,3 +61,9 @@ func (tw testWriter) Write(p []byte) (n int, err error) { tw.t.Log(msg) return len(p), nil } + +type FakeEELicense struct{} + +func (f *FakeEELicense) IsValid() bool { + return true +} diff --git a/worker/pkg/integration-test/datasync_workflow.go b/worker/pkg/integration-test/datasync_workflow.go new file mode 100644 index 0000000000..bcbe224f01 --- /dev/null +++ b/worker/pkg/integration-test/datasync_workflow.go @@ -0,0 +1,82 @@ +package integrationtest + +import ( + "sync" + "testing" + "time" + + tcneosyncapi "github.com/nucleuscloud/neosync/backend/pkg/integration-test" + "github.com/nucleuscloud/neosync/backend/pkg/sqlconnect" + "github.com/nucleuscloud/neosync/internal/testutil" + accountstatus_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/account-status" + genbenthosconfigs_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/gen-benthos-configs" + jobhooks_by_timing_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/jobhooks-by-timing" + runsqlinittablestmts_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/run-sql-init-table-stmts" + "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/shared" + sync_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/sync" + syncactivityopts_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/sync-activity-opts" + syncrediscleanup_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/sync-redis-clean-up" + datasync_workflow "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/workflow" + "go.opentelemetry.io/otel/metric" + "go.temporal.io/sdk/client" + temporalmocks "go.temporal.io/sdk/mocks" + "go.temporal.io/sdk/testsuite" +) + +func ExecuteTestDataSyncWorkflow( + t *testing.T, + neosyncApi *tcneosyncapi.NeosyncApiTestClient, + redisUrl *string, + jobId string, +) *testsuite.TestWorkflowEnvironment { + connclient := neosyncApi.UnauthdClients.Connections + jobclient := neosyncApi.UnauthdClients.Jobs + transformerclient := neosyncApi.UnauthdClients.Transformers + userclient := neosyncApi.UnauthdClients.Users + var redisconfig *shared.RedisConfig + if redisUrl != nil && *redisUrl != "" { + redisconfig = &shared.RedisConfig{ + Url: *redisUrl, + Kind: "simple", + Tls: &shared.RedisTlsConfig{ + Enabled: false, + }, + } + } + temporalClientMock := temporalmocks.NewClient(t) + sqlmanager := tcneosyncapi.NewTestSqlManagerClient() + + // temporal workflow + testSuite := &testsuite.WorkflowTestSuite{} + env := testSuite.NewTestWorkflowEnvironment() + + // register activities + genbenthosActivity := genbenthosconfigs_activity.New( + jobclient, + connclient, + transformerclient, + sqlmanager, + redisconfig, + false, + ) + var activityMeter metric.Meter + disableReaper := true + syncActivity := sync_activity.New(connclient, jobclient, &sqlconnect.SqlOpenConnector{}, &sync.Map{}, temporalClientMock, activityMeter, sync_activity.NewBenthosStreamManager(), disableReaper) + retrieveActivityOpts := syncactivityopts_activity.New(jobclient) + runSqlInitTableStatements := runsqlinittablestmts_activity.New(jobclient, connclient, sqlmanager, &testutil.FakeEELicense{}) + jobhookTimingActivity := jobhooks_by_timing_activity.New(jobclient, connclient, sqlmanager, &testutil.FakeEELicense{}) + accountStatusActivity := accountstatus_activity.New(userclient) + env.RegisterWorkflow(datasync_workflow.Workflow) + env.RegisterActivity(syncActivity.Sync) + env.RegisterActivity(retrieveActivityOpts.RetrieveActivityOptions) + env.RegisterActivity(runSqlInitTableStatements.RunSqlInitTableStatements) + env.RegisterActivity(syncrediscleanup_activity.DeleteRedisHash) + env.RegisterActivity(genbenthosActivity.GenerateBenthosConfigs) + env.RegisterActivity(accountStatusActivity.CheckAccountStatus) + env.RegisterActivity(jobhookTimingActivity.RunJobHooksByTiming) + env.SetTestTimeout(600 * time.Second) // increase the test timeout + + env.SetStartWorkflowOptions(client.StartWorkflowOptions{ID: jobId}) + env.ExecuteWorkflow(datasync_workflow.Workflow, &datasync_workflow.WorkflowRequest{JobId: jobId}) + return env +} diff --git a/worker/pkg/workflows/datasync/workflow/testdata/postgres/all-types/job_mappings.go b/worker/pkg/workflows/datasync/workflow/testdata/postgres/all-types/job_mappings.go index 370877fb4a..fe7109db86 100644 --- a/worker/pkg/workflows/datasync/workflow/testdata/postgres/all-types/job_mappings.go +++ b/worker/pkg/workflows/datasync/workflow/testdata/postgres/all-types/job_mappings.go @@ -425,247 +425,7 @@ func GetDefaultSyncJobMappings()[]*mgmtv1alpha1.JobMapping { Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "id", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "int_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "smallint_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "bigint_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "real_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "double_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "text_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "varchar_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "char_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "boolean_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "date_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "time_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "timestamp_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "timestamptz_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "interval_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "point_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "line_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "lseg_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "path_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "polygon_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "circle_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "uuid_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "json_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "jsonb_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "bit_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "varbit_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "numeric_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "money_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "xml_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: "alltypes", - Table: "array_types", - Column: "int_double_array", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, { Schema: "alltypes", Table: "json_data", From 525aae5dd1d9ae5ffa4e1ab77fbc05373b5d40c2 Mon Sep 17 00:00:00 2001 From: Alisha Date: Mon, 2 Dec 2024 14:31:16 -0800 Subject: [PATCH 02/14] fix jobmappings --- .../postgres/all-types/job_mappings.go | 240 ++++++++++++++++++ 1 file changed, 240 insertions(+) diff --git a/worker/pkg/workflows/datasync/workflow/testdata/postgres/all-types/job_mappings.go b/worker/pkg/workflows/datasync/workflow/testdata/postgres/all-types/job_mappings.go index fe7109db86..370877fb4a 100644 --- a/worker/pkg/workflows/datasync/workflow/testdata/postgres/all-types/job_mappings.go +++ b/worker/pkg/workflows/datasync/workflow/testdata/postgres/all-types/job_mappings.go @@ -425,7 +425,247 @@ func GetDefaultSyncJobMappings()[]*mgmtv1alpha1.JobMapping { Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "int_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "smallint_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "bigint_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "real_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "double_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "text_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "varchar_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "char_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "boolean_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "date_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "time_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "timestamp_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "timestamptz_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "interval_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "point_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "line_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "lseg_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "path_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "polygon_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "circle_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "uuid_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "json_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "jsonb_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "bit_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "varbit_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "numeric_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "money_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "xml_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: "alltypes", + Table: "array_types", + Column: "int_double_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, { Schema: "alltypes", Table: "json_data", From 68313ddf3dd324b0c83d8a0b9e60c47de4edfdf5 Mon Sep 17 00:00:00 2001 From: Alisha Date: Mon, 2 Dec 2024 14:52:44 -0800 Subject: [PATCH 03/14] fix temporal activity in test --- .../pkg/integration-test/datasync_workflow.go | 22 ++++++++++++++----- 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/worker/pkg/integration-test/datasync_workflow.go b/worker/pkg/integration-test/datasync_workflow.go index bcbe224f01..42c7d43ea6 100644 --- a/worker/pkg/integration-test/datasync_workflow.go +++ b/worker/pkg/integration-test/datasync_workflow.go @@ -1,12 +1,15 @@ package integrationtest import ( - "sync" "testing" "time" tcneosyncapi "github.com/nucleuscloud/neosync/backend/pkg/integration-test" "github.com/nucleuscloud/neosync/backend/pkg/sqlconnect" + sql_manager "github.com/nucleuscloud/neosync/backend/pkg/sqlmanager" + connectionmanager "github.com/nucleuscloud/neosync/internal/connection-manager" + "github.com/nucleuscloud/neosync/internal/connection-manager/providers/mongoprovider" + "github.com/nucleuscloud/neosync/internal/connection-manager/providers/sqlprovider" "github.com/nucleuscloud/neosync/internal/testutil" accountstatus_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/account-status" genbenthosconfigs_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/gen-benthos-configs" @@ -19,7 +22,6 @@ import ( datasync_workflow "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/workflow" "go.opentelemetry.io/otel/metric" "go.temporal.io/sdk/client" - temporalmocks "go.temporal.io/sdk/mocks" "go.temporal.io/sdk/testsuite" ) @@ -43,8 +45,15 @@ func ExecuteTestDataSyncWorkflow( }, } } - temporalClientMock := temporalmocks.NewClient(t) - sqlmanager := tcneosyncapi.NewTestSqlManagerClient() + + sqlconnmanager := connectionmanager.NewConnectionManager(sqlprovider.NewProvider(&sqlconnect.SqlOpenConnector{}), connectionmanager.WithReaperPoll(10*time.Second)) + go sqlconnmanager.Reaper(testutil.GetTestLogger(t)) + mongoconnmanager := connectionmanager.NewConnectionManager(mongoprovider.NewProvider()) + go mongoconnmanager.Reaper(testutil.GetTestLogger(t)) + + sqlmanager := sql_manager.NewSqlManager( + sql_manager.WithConnectionManager(sqlconnmanager), + ) // temporal workflow testSuite := &testsuite.WorkflowTestSuite{} @@ -59,13 +68,14 @@ func ExecuteTestDataSyncWorkflow( redisconfig, false, ) + var activityMeter metric.Meter - disableReaper := true - syncActivity := sync_activity.New(connclient, jobclient, &sqlconnect.SqlOpenConnector{}, &sync.Map{}, temporalClientMock, activityMeter, sync_activity.NewBenthosStreamManager(), disableReaper) + syncActivity := sync_activity.New(connclient, jobclient, sqlconnmanager, mongoconnmanager, activityMeter, sync_activity.NewBenthosStreamManager()) retrieveActivityOpts := syncactivityopts_activity.New(jobclient) runSqlInitTableStatements := runsqlinittablestmts_activity.New(jobclient, connclient, sqlmanager, &testutil.FakeEELicense{}) jobhookTimingActivity := jobhooks_by_timing_activity.New(jobclient, connclient, sqlmanager, &testutil.FakeEELicense{}) accountStatusActivity := accountstatus_activity.New(userclient) + env.RegisterWorkflow(datasync_workflow.Workflow) env.RegisterActivity(syncActivity.Sync) env.RegisterActivity(retrieveActivityOpts.RetrieveActivityOptions) From fcbc6be44a0b2ac3d6b4c2d613d7da4c7cb1dfa4 Mon Sep 17 00:00:00 2001 From: Nick Z <2420177+nickzelei@users.noreply.github.com> Date: Mon, 2 Dec 2024 15:59:32 -0800 Subject: [PATCH 04/14] updates Integration test and action to pull aws creds from environment --- .github/workflows/go.yml | 18 ++++++++++++++---- .../integration-test/integration-test-util.go | 14 +++++--------- .../cmds/neosync/sync/sync_integration_test.go | 2 -- internal/testutil/utils.go | 6 ++---- 4 files changed, 21 insertions(+), 19 deletions(-) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index d9e5fd78db..3ca7213d3f 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -80,6 +80,11 @@ jobs: integration-tests: name: Integration Tests runs-on: ubuntu-latest + permissions: + id-token: write # required for requesting JWT for use with retrieving AWS creds + contents: read # required for actions/checkout + env: + AWS_REGION: "us-west-2" steps: - name: Checkout uses: actions/checkout@v4 @@ -90,16 +95,21 @@ jobs: go-version-file: go.mod cache-dependency-path: go.sum + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-region: ${{ env.AWS_REGION }} + role-to-assume: ${{ vars.INTEGRATION_TEST_ROLE_ARN }} + role-session-name: NeosyncCiIntegrationTests + - name: Run Integration Tests run: | go test -race -timeout 1800s -coverprofile=integration-coverage.out -covermode=atomic -run TestIntegrationTestSuite ./... env: INTEGRATION_TESTS_ENABLED: 1 S3_INTEGRATION_TESTS_ENABLED: 1 - TEST_S3_REGION: us-west-2 - TEST_S3_BUCKET: neosync-integration-test - TEST_S3_ACCESS_KEY_ID: ${{ secrets.TEST_S3_ACCESS_KEY_ID }} - TEST_S3_SECRET_ACCESS_KEY: ${{ secrets.TEST_S3_SECRET_ACCESS_KEY }} + TEST_S3_REGION: ${{ env.AWS_REGION }} + TEST_S3_BUCKET: ${{ vars.INTEGRATION_TEST_BUCKET_NAME }} - name: Upload coverage to Codecov uses: codecov/codecov-action@v5 diff --git a/backend/pkg/integration-test/integration-test-util.go b/backend/pkg/integration-test/integration-test-util.go index ce9d17bb15..6af841741e 100644 --- a/backend/pkg/integration-test/integration-test-util.go +++ b/backend/pkg/integration-test/integration-test-util.go @@ -81,7 +81,6 @@ func CreateS3Connection( t *testing.T, connclient mgmtv1alpha1connect.ConnectionServiceClient, accountId, name string, - accessKeyId, secretAccessKey string, bucket string, region *string, ) *mgmtv1alpha1.Connection { @@ -93,14 +92,11 @@ func CreateS3Connection( ConnectionConfig: &mgmtv1alpha1.ConnectionConfig{ Config: &mgmtv1alpha1.ConnectionConfig_AwsS3Config{ AwsS3Config: &mgmtv1alpha1.AwsS3ConnectionConfig{ - Bucket: bucket, - PathPrefix: nil, - Region: region, - Endpoint: nil, - Credentials: &mgmtv1alpha1.AwsS3Credentials{ - AccessKeyId: &accessKeyId, - SecretAccessKey: &secretAccessKey, - }, + Bucket: bucket, + PathPrefix: nil, + Region: region, + Endpoint: nil, + Credentials: nil, }, }, }, diff --git a/cli/internal/cmds/neosync/sync/sync_integration_test.go b/cli/internal/cmds/neosync/sync/sync_integration_test.go index f1897f88c0..2997de835f 100644 --- a/cli/internal/cmds/neosync/sync/sync_integration_test.go +++ b/cli/internal/cmds/neosync/sync/sync_integration_test.go @@ -51,8 +51,6 @@ func Test_Sync(t *testing.T) { connclient, accountId, "s3-conn", - awsS3Config.AccessKeyId, - awsS3Config.SecretAccessKey, awsS3Config.Bucket, &awsS3Config.Region, ) diff --git a/internal/testutil/utils.go b/internal/testutil/utils.go index 981cdd520f..4801137c41 100644 --- a/internal/testutil/utils.go +++ b/internal/testutil/utils.go @@ -37,10 +37,8 @@ type AwsS3Config struct { func GetTestAwsS3Config() *AwsS3Config { return &AwsS3Config{ - Region: os.Getenv("TEST_S3_REGION"), - Bucket: os.Getenv("TEST_S3_BUCKET"), - AccessKeyId: os.Getenv("TEST_S3_ACCESS_KEY_ID"), - SecretAccessKey: os.Getenv("TEST_S3_SECRET_ACCESS_KEY"), + Region: os.Getenv("TEST_S3_REGION"), + Bucket: os.Getenv("TEST_S3_BUCKET"), } } From c0572b71afafd145cf39c1c3dd2e4a127072081e Mon Sep 17 00:00:00 2001 From: Nick Z <2420177+nickzelei@users.noreply.github.com> Date: Mon, 2 Dec 2024 16:02:28 -0800 Subject: [PATCH 05/14] Fixes bad variable name --- .github/workflows/go.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 3ca7213d3f..626740de75 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -99,7 +99,7 @@ jobs: uses: aws-actions/configure-aws-credentials@v4 with: aws-region: ${{ env.AWS_REGION }} - role-to-assume: ${{ vars.INTEGRATION_TEST_ROLE_ARN }} + role-to-assume: ${{ vars.INTEGRATION_TEST_AWS_ROLE_ARN }} role-session-name: NeosyncCiIntegrationTests - name: Run Integration Tests From dae9b801e9e73fe3793f359d2997c746fcb84450 Mon Sep 17 00:00:00 2001 From: Alisha Date: Mon, 2 Dec 2024 16:16:34 -0800 Subject: [PATCH 06/14] fix mysql job mappings --- .../mysql/humanresources/create-tables.sql | 96 ++++---- .../mysql/humanresources/job_mappings.go | 206 +++++++----------- .../postgres/alltypes/create-tables.sql | 4 +- .../postgres/alltypes/job_mappings.go | 24 ++ 4 files changed, 157 insertions(+), 173 deletions(-) diff --git a/internal/testutil/testdata/mysql/humanresources/create-tables.sql b/internal/testutil/testdata/mysql/humanresources/create-tables.sql index b7d57f2ac5..88769136a2 100644 --- a/internal/testutil/testdata/mysql/humanresources/create-tables.sql +++ b/internal/testutil/testdata/mysql/humanresources/create-tables.sql @@ -1,64 +1,80 @@ CREATE TABLE IF NOT EXISTS regions ( - region_id INT (11) AUTO_INCREMENT PRIMARY KEY, - region_name VARCHAR (25) DEFAULT NULL + region_id INT (11) AUTO_INCREMENT PRIMARY KEY, + region_name VARCHAR (25) DEFAULT NULL ); CREATE TABLE IF NOT EXISTS countries ( - country_id CHAR (2) PRIMARY KEY, - country_name VARCHAR (40) DEFAULT NULL, - region_id INT (11) NOT NULL, - FOREIGN KEY (region_id) REFERENCES regions (region_id) ON DELETE CASCADE ON UPDATE CASCADE + country_id CHAR (2) PRIMARY KEY, + country_name VARCHAR (40) DEFAULT NULL, + region_id INT (11) NOT NULL ); CREATE TABLE IF NOT EXISTS locations ( - location_id INT (11) AUTO_INCREMENT PRIMARY KEY, - street_address VARCHAR (40) DEFAULT NULL, - postal_code VARCHAR (12) DEFAULT NULL, - city VARCHAR (30) NOT NULL, - state_province VARCHAR (25) DEFAULT NULL, - country_id CHAR (2) NOT NULL, - FOREIGN KEY (country_id) REFERENCES countries (country_id) ON DELETE CASCADE ON UPDATE CASCADE + location_id INT (11) AUTO_INCREMENT PRIMARY KEY, + street_address VARCHAR (40) DEFAULT NULL, + postal_code VARCHAR (12) DEFAULT NULL, + city VARCHAR (30) NOT NULL, + state_province VARCHAR (25) DEFAULT NULL, + country_id CHAR (2) NOT NULL ); CREATE TABLE IF NOT EXISTS jobs ( - job_id INT (11) AUTO_INCREMENT PRIMARY KEY, - job_title VARCHAR (35) NOT NULL, - min_salary DECIMAL (8, 2) DEFAULT NULL, - max_salary DECIMAL (8, 2) DEFAULT NULL + job_id INT (11) AUTO_INCREMENT PRIMARY KEY, + job_title VARCHAR (35) NOT NULL, + min_salary DECIMAL (8, 2) DEFAULT NULL, + max_salary DECIMAL (8, 2) DEFAULT NULL ); CREATE TABLE IF NOT EXISTS departments ( - department_id INT (11) AUTO_INCREMENT PRIMARY KEY, - department_name VARCHAR (30) NOT NULL, - location_id INT (11) DEFAULT NULL, - FOREIGN KEY (location_id) REFERENCES locations (location_id) ON DELETE CASCADE ON UPDATE CASCADE + department_id INT (11) AUTO_INCREMENT PRIMARY KEY, + department_name VARCHAR (30) NOT NULL, + location_id INT (11) DEFAULT NULL ); CREATE TABLE IF NOT EXISTS employees ( - employee_id INT (11) AUTO_INCREMENT PRIMARY KEY, - first_name VARCHAR (20) DEFAULT NULL, - last_name VARCHAR (25) NOT NULL, - email VARCHAR (100) NOT NULL, - phone_number VARCHAR (20) DEFAULT NULL, - hire_date DATE NOT NULL, - job_id INT (11) NOT NULL, - salary DECIMAL (8, 2) NOT NULL, - manager_id INT (11) DEFAULT NULL, - department_id INT (11) DEFAULT NULL, - FOREIGN KEY (job_id) REFERENCES jobs (job_id) ON DELETE CASCADE ON UPDATE CASCADE, - FOREIGN KEY (department_id) REFERENCES departments (department_id) ON DELETE CASCADE ON UPDATE CASCADE, - FOREIGN KEY (manager_id) REFERENCES employees (employee_id) + employee_id INT (11) AUTO_INCREMENT PRIMARY KEY, + first_name VARCHAR (20) DEFAULT NULL, + last_name VARCHAR (25) NOT NULL, + email VARCHAR (100) NOT NULL, + phone_number VARCHAR (20) DEFAULT NULL, + hire_date DATE NOT NULL, + job_id INT (11) NOT NULL, + salary DECIMAL (8, 2) NOT NULL, + manager_id INT (11) DEFAULT NULL, + department_id INT (11) DEFAULT NULL ); CREATE TABLE IF NOT EXISTS dependents ( - dependent_id INT (11) AUTO_INCREMENT PRIMARY KEY, - first_name VARCHAR (50) NOT NULL, - last_name VARCHAR (50) NOT NULL, - relationship VARCHAR (25) NOT NULL, - employee_id INT (11) NOT NULL, - FOREIGN KEY (employee_id) REFERENCES employees (employee_id) ON DELETE CASCADE ON UPDATE CASCADE + dependent_id INT (11) AUTO_INCREMENT PRIMARY KEY, + first_name VARCHAR (50) NOT NULL, + last_name VARCHAR (50) NOT NULL, + relationship VARCHAR (25) NOT NULL, + employee_id INT (11) NOT NULL ); +-- Add foreign keys +ALTER TABLE countries + ADD FOREIGN KEY (region_id) REFERENCES regions (region_id) + ON DELETE CASCADE ON UPDATE CASCADE; + +ALTER TABLE locations + ADD FOREIGN KEY (country_id) REFERENCES countries (country_id) + ON DELETE CASCADE ON UPDATE CASCADE; + +ALTER TABLE departments + ADD FOREIGN KEY (location_id) REFERENCES locations (location_id) + ON DELETE CASCADE ON UPDATE CASCADE; + +ALTER TABLE employees + ADD FOREIGN KEY (job_id) REFERENCES jobs (job_id) + ON DELETE CASCADE ON UPDATE CASCADE, + ADD FOREIGN KEY (department_id) REFERENCES departments (department_id) + ON DELETE CASCADE ON UPDATE CASCADE, + ADD FOREIGN KEY (manager_id) REFERENCES employees (employee_id); + +ALTER TABLE dependents + ADD FOREIGN KEY (employee_id) REFERENCES employees (employee_id) + ON DELETE CASCADE ON UPDATE CASCADE; /*Data for the table regions */ diff --git a/internal/testutil/testdata/mysql/humanresources/job_mappings.go b/internal/testutil/testdata/mysql/humanresources/job_mappings.go index d7acecfe6b..cf821ff31f 100644 --- a/internal/testutil/testdata/mysql/humanresources/job_mappings.go +++ b/internal/testutil/testdata/mysql/humanresources/job_mappings.go @@ -12,39 +12,31 @@ func GetDefaultSyncJobMappings(schema string)[]*mgmtv1alpha1.JobMapping { return []*mgmtv1alpha1.JobMapping{ { Schema: schema, - Table: "dependents", - Column: "dependent_id", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: schema, - Table: "dependents", - Column: "first_name", + Table: "departments", + Column: "department_id", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "dependents", - Column: "last_name", + Table: "departments", + Column: "department_name", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "dependents", - Column: "relationship", + Table: "departments", + Column: "location_id", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "dependents", + Table: "employees", Column: "employee_id", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, @@ -52,215 +44,207 @@ func GetDefaultSyncJobMappings(schema string)[]*mgmtv1alpha1.JobMapping { }, { Schema: schema, - Table: "dependents", - Column: "FOREIGN", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: schema, - Table: "generated_table", - Column: "id", + Table: "employees", + Column: "first_name", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "generated_table", - Column: "price", + Table: "employees", + Column: "last_name", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "generated_table", - Column: "quantity", + Table: "employees", + Column: "email", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "generated_table", - Column: "discount_percent", + Table: "employees", + Column: "phone_number", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "generated_table", - Column: "total_value", + Table: "employees", + Column: "hire_date", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "generated_table", - Column: "discounted_price", + Table: "employees", + Column: "job_id", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "regions", - Column: "region_id", + Table: "employees", + Column: "salary", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "regions", - Column: "region_name", + Table: "employees", + Column: "manager_id", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "countries", - Column: "country_id", + Table: "employees", + Column: "department_id", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "countries", - Column: "country_name", + Table: "dependents", + Column: "dependent_id", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "countries", - Column: "region_id", + Table: "dependents", + Column: "first_name", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "countries", - Column: "FOREIGN", + Table: "dependents", + Column: "last_name", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "locations", - Column: "location_id", + Table: "dependents", + Column: "relationship", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "locations", - Column: "street_address", + Table: "dependents", + Column: "employee_id", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "locations", - Column: "postal_code", + Table: "generated_table", + Column: "id", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "locations", - Column: "city", + Table: "generated_table", + Column: "price", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "locations", - Column: "state_province", + Table: "generated_table", + Column: "quantity", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "locations", - Column: "country_id", + Table: "generated_table", + Column: "discount_percent", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "locations", - Column: "FOREIGN", + Table: "generated_table", + Column: "total_value", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "jobs", - Column: "job_id", + Table: "generated_table", + Column: "discounted_price", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "jobs", - Column: "job_title", + Table: "regions", + Column: "region_id", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "jobs", - Column: "min_salary", + Table: "regions", + Column: "region_name", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "jobs", - Column: "max_salary", + Table: "countries", + Column: "country_id", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "departments", - Column: "department_id", + Table: "countries", + Column: "country_name", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "departments", - Column: "department_name", + Table: "countries", + Column: "region_id", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "departments", + Table: "locations", Column: "location_id", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, @@ -268,63 +252,47 @@ func GetDefaultSyncJobMappings(schema string)[]*mgmtv1alpha1.JobMapping { }, { Schema: schema, - Table: "departments", - Column: "FOREIGN", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: schema, - Table: "employees", - Column: "employee_id", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: schema, - Table: "employees", - Column: "first_name", + Table: "locations", + Column: "street_address", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "employees", - Column: "last_name", + Table: "locations", + Column: "postal_code", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "employees", - Column: "email", + Table: "locations", + Column: "city", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "employees", - Column: "phone_number", + Table: "locations", + Column: "state_province", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "employees", - Column: "hire_date", + Table: "locations", + Column: "country_id", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "employees", + Table: "jobs", Column: "job_id", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, @@ -332,48 +300,24 @@ func GetDefaultSyncJobMappings(schema string)[]*mgmtv1alpha1.JobMapping { }, { Schema: schema, - Table: "employees", - Column: "salary", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: schema, - Table: "employees", - Column: "manager_id", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: schema, - Table: "employees", - Column: "department_id", - Transformer: &mgmtv1alpha1.JobMappingTransformer{ - Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, - }, - }, - { - Schema: schema, - Table: "employees", - Column: "FOREIGN", + Table: "jobs", + Column: "job_title", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "employees", - Column: "FOREIGN", + Table: "jobs", + Column: "min_salary", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, { Schema: schema, - Table: "employees", - Column: "FOREIGN", + Table: "jobs", + Column: "max_salary", Transformer: &mgmtv1alpha1.JobMappingTransformer{ Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, diff --git a/internal/testutil/testdata/postgres/alltypes/create-tables.sql b/internal/testutil/testdata/postgres/alltypes/create-tables.sql index 6e99485b6c..1821d4df7e 100644 --- a/internal/testutil/testdata/postgres/alltypes/create-tables.sql +++ b/internal/testutil/testdata/postgres/alltypes/create-tables.sql @@ -218,7 +218,7 @@ INSERT INTO all_data_types ( -- ); -CREATE TABLE IF NOT EXISTS alltypes.array_types ( +CREATE TABLE IF NOT EXISTS array_types ( "id" BIGINT NOT NULL PRIMARY KEY, -- "int_array" _int4, -- "smallint_array" _int2, @@ -255,7 +255,7 @@ CREATE TABLE IF NOT EXISTS alltypes.array_types ( ); -INSERT INTO alltypes.array_types ( +INSERT INTO array_types ( id, -- int_array, smallint_array, bigint_array, -- real_array, diff --git a/internal/testutil/testdata/postgres/alltypes/job_mappings.go b/internal/testutil/testdata/postgres/alltypes/job_mappings.go index c4d72bb77a..08f933ff49 100644 --- a/internal/testutil/testdata/postgres/alltypes/job_mappings.go +++ b/internal/testutil/testdata/postgres/alltypes/job_mappings.go @@ -154,6 +154,14 @@ func GetDefaultSyncJobMappings(schema string)[]*mgmtv1alpha1.JobMapping { Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, + { + Schema: schema, + Table: "all_data_types", + Column: "interval_col", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, { Schema: schema, Table: "all_data_types", @@ -370,6 +378,22 @@ func GetDefaultSyncJobMappings(schema string)[]*mgmtv1alpha1.JobMapping { Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, }, }, + { + Schema: schema, + Table: "array_types", + Column: "id", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, + { + Schema: schema, + Table: "array_types", + Column: "interval_array", + Transformer: &mgmtv1alpha1.JobMappingTransformer{ + Source: mgmtv1alpha1.TransformerSource_TRANSFORMER_SOURCE_PASSTHROUGH, + }, + }, { Schema: schema, Table: "json_data", From 7117eee9f84c4262a5a85285a2963224773cbad7 Mon Sep 17 00:00:00 2001 From: Alisha Date: Mon, 2 Dec 2024 17:11:52 -0800 Subject: [PATCH 07/14] change runid to workflow id in benthos builder --- cli/internal/cmds/neosync/sync/sync.go | 2 +- .../cmds/neosync/sync/sync_integration_test.go | 4 ++++ internal/benthos/benthos-builder/benthos-builder.go | 10 +++++----- internal/benthos/benthos-builder/builders/aws-s3.go | 2 +- internal/benthos/benthos-builder/builders/dynamodb.go | 2 +- .../benthos-builder/builders/gcp-cloud-storage.go | 2 +- .../benthos/benthos-builder/builders/generate-ai.go | 2 +- internal/benthos/benthos-builder/builders/generate.go | 2 +- internal/benthos/benthos-builder/builders/mongodb.go | 2 +- internal/benthos/benthos-builder/builders/sql.go | 8 ++++---- internal/benthos/benthos-builder/generate-benthos.go | 4 ++-- internal/benthos/benthos-builder/internal/types.go | 4 ++-- internal/testutil/testdata/generators.go | 2 +- worker/pkg/integration-test/datasync_workflow.go | 9 ++++++++- .../activities/gen-benthos-configs/benthos-builder.go | 2 +- 15 files changed, 34 insertions(+), 23 deletions(-) diff --git a/cli/internal/cmds/neosync/sync/sync.go b/cli/internal/cmds/neosync/sync/sync.go index 7bcd92c6af..3ae081a650 100644 --- a/cli/internal/cmds/neosync/sync/sync.go +++ b/cli/internal/cmds/neosync/sync/sync.go @@ -393,7 +393,7 @@ func (c *clisync) configureSync() ([][]*benthosbuilder.BenthosConfigResponse, er SourceJobRunId: jobRunId, DestinationConnection: c.destinationConnection, SyncConfigs: syncConfigs, - RunId: "cli-sync", + WorkflowId: job.Id, Logger: c.logger, Sqlmanagerclient: c.sqlmanagerclient, Transformerclient: c.transformerclient, diff --git a/cli/internal/cmds/neosync/sync/sync_integration_test.go b/cli/internal/cmds/neosync/sync/sync_integration_test.go index 2997de835f..439a1bf648 100644 --- a/cli/internal/cmds/neosync/sync/sync_integration_test.go +++ b/cli/internal/cmds/neosync/sync/sync_integration_test.go @@ -209,6 +209,8 @@ func Test_Sync(t *testing.T) { ctx: ctx, logger: testlogger, cmd: cmdConfig, + connmanager: connmanager, + session: connectionmanager.NewUniqueSession(), } err := sync.configureAndRunSync() require.NoError(t, err) @@ -384,6 +386,8 @@ func Test_Sync(t *testing.T) { ctx: ctx, logger: testlogger, cmd: cmdConfig, + connmanager: connmanager, + session: connectionmanager.NewUniqueSession(), } err := sync.configureAndRunSync() require.NoError(t, err) diff --git a/internal/benthos/benthos-builder/benthos-builder.go b/internal/benthos/benthos-builder/benthos-builder.go index ea3f2ed551..654ea75ea6 100644 --- a/internal/benthos/benthos-builder/benthos-builder.go +++ b/internal/benthos/benthos-builder/benthos-builder.go @@ -198,7 +198,7 @@ type BenthosConfigManager struct { job *mgmtv1alpha1.Job sourceConnection *mgmtv1alpha1.Connection destinationConnections []*mgmtv1alpha1.Connection - runId string + workflowId string } // Manages all necessary configuration parameters for creating @@ -207,7 +207,7 @@ type WorkerBenthosConfig struct { Job *mgmtv1alpha1.Job SourceConnection *mgmtv1alpha1.Connection DestinationConnections []*mgmtv1alpha1.Connection - RunId string + WorkflowId string MetricLabelKeyVals map[string]string Logger *slog.Logger Sqlmanagerclient sqlmanager.SqlManagerClient @@ -249,7 +249,7 @@ func NewWorkerBenthosConfigManager( job: config.Job, sourceConnection: config.SourceConnection, destinationConnections: config.DestinationConnections, - runId: config.RunId, + workflowId: config.WorkflowId, }, nil } @@ -262,7 +262,7 @@ type CliBenthosConfig struct { SourceJobRunId *string // for use when AWS S3 is the source PostgresDriverOverride *string // optional driver override. used for postgres SyncConfigs []*tabledependency.RunConfig - RunId string + WorkflowId string MetricLabelKeyVals map[string]string Logger *slog.Logger Sqlmanagerclient sqlmanager.SqlManagerClient @@ -305,7 +305,7 @@ func NewCliBenthosConfigManager( job: config.Job, sourceConnection: config.SourceConnection, destinationConnections: []*mgmtv1alpha1.Connection{config.DestinationConnection}, - runId: config.RunId, + workflowId: config.WorkflowId, }, nil } diff --git a/internal/benthos/benthos-builder/builders/aws-s3.go b/internal/benthos/benthos-builder/builders/aws-s3.go index 8c0451c292..38a1a2f9f8 100644 --- a/internal/benthos/benthos-builder/builders/aws-s3.go +++ b/internal/benthos/benthos-builder/builders/aws-s3.go @@ -49,7 +49,7 @@ func (b *awsS3SyncBuilder) BuildDestinationConfig(ctx context.Context, params *b s3pathpieces = append( s3pathpieces, "workflows", - params.RunId, + params.WorkflowId, "activities", neosync_benthos.BuildBenthosTable(benthosConfig.TableSchema, benthosConfig.TableName), "data", diff --git a/internal/benthos/benthos-builder/builders/dynamodb.go b/internal/benthos/benthos-builder/builders/dynamodb.go index 7d2d105114..89295a798d 100644 --- a/internal/benthos/benthos-builder/builders/dynamodb.go +++ b/internal/benthos/benthos-builder/builders/dynamodb.go @@ -104,7 +104,7 @@ func (b *dyanmodbSyncBuilder) BuildSourceConfigs(ctx context.Context, params *bb map[string][]*bb_internal.ReferenceKey{}, map[string][]*bb_internal.ReferenceKey{}, params.Job.Id, - params.RunId, + params.WorkflowId, &shared.RedisConfig{}, tableMapping.Mappings, map[string]*sqlmanager_shared.DatabaseSchemaRow{}, diff --git a/internal/benthos/benthos-builder/builders/gcp-cloud-storage.go b/internal/benthos/benthos-builder/builders/gcp-cloud-storage.go index 2c7079c662..f3e70ebdf6 100644 --- a/internal/benthos/benthos-builder/builders/gcp-cloud-storage.go +++ b/internal/benthos/benthos-builder/builders/gcp-cloud-storage.go @@ -47,7 +47,7 @@ func (b *gcpCloudStorageSyncBuilder) BuildDestinationConfig(ctx context.Context, pathpieces = append( pathpieces, "workflows", - params.RunId, + params.WorkflowId, "activities", neosync_benthos.BuildBenthosTable(benthosConfig.TableSchema, benthosConfig.TableName), "data", diff --git a/internal/benthos/benthos-builder/builders/generate-ai.go b/internal/benthos/benthos-builder/builders/generate-ai.go index 061d119989..c9fd5511aa 100644 --- a/internal/benthos/benthos-builder/builders/generate-ai.go +++ b/internal/benthos/benthos-builder/builders/generate-ai.go @@ -66,7 +66,7 @@ func (b *generateAIBuilder) BuildSourceConfigs(ctx context.Context, params *bb_i if err != nil { return nil, err } - db, err := b.sqlmanagerclient.NewSqlConnection(ctx, connectionmanager.NewUniqueSession(connectionmanager.WithSessionGroup(params.RunId)), constraintConnection, params.Logger) + db, err := b.sqlmanagerclient.NewSqlConnection(ctx, connectionmanager.NewUniqueSession(connectionmanager.WithSessionGroup(params.WorkflowId)), constraintConnection, params.Logger) if err != nil { return nil, fmt.Errorf("unable to create new sql db: %w", err) } diff --git a/internal/benthos/benthos-builder/builders/generate.go b/internal/benthos/benthos-builder/builders/generate.go index c85385aad1..1d6d377199 100644 --- a/internal/benthos/benthos-builder/builders/generate.go +++ b/internal/benthos/benthos-builder/builders/generate.go @@ -49,7 +49,7 @@ func (b *generateBuilder) BuildSourceConfigs(ctx context.Context, params *bb_int return nil, fmt.Errorf("unable to get connection by id: %w", err) } - db, err := b.sqlmanagerclient.NewSqlConnection(ctx, connectionmanager.NewUniqueSession(connectionmanager.WithSessionGroup(params.RunId)), sourceConnection, logger) + db, err := b.sqlmanagerclient.NewSqlConnection(ctx, connectionmanager.NewUniqueSession(connectionmanager.WithSessionGroup(params.WorkflowId)), sourceConnection, logger) if err != nil { return nil, fmt.Errorf("unable to create new sql db: %w", err) } diff --git a/internal/benthos/benthos-builder/builders/mongodb.go b/internal/benthos/benthos-builder/builders/mongodb.go index 051f1bf299..6bf0cea1f3 100644 --- a/internal/benthos/benthos-builder/builders/mongodb.go +++ b/internal/benthos/benthos-builder/builders/mongodb.go @@ -73,7 +73,7 @@ func (b *mongodbSyncBuilder) BuildSourceConfigs(ctx context.Context, params *bb_ map[string][]*bb_internal.ReferenceKey{}, map[string][]*bb_internal.ReferenceKey{}, params.Job.Id, - params.RunId, + params.WorkflowId, &shared.RedisConfig{}, tableMapping.Mappings, map[string]*sqlmanager_shared.DatabaseSchemaRow{}, diff --git a/internal/benthos/benthos-builder/builders/sql.go b/internal/benthos/benthos-builder/builders/sql.go index d8417f8311..8606440f2f 100644 --- a/internal/benthos/benthos-builder/builders/sql.go +++ b/internal/benthos/benthos-builder/builders/sql.go @@ -86,7 +86,7 @@ func (b *sqlSyncBuilder) BuildSourceConfigs(ctx context.Context, params *bb_inte sourceTableOpts = groupSqlJobSourceOptionsByTable(sqlSourceOpts) } - db, err := b.sqlmanagerclient.NewSqlConnection(ctx, connectionmanager.NewUniqueSession(connectionmanager.WithSessionGroup(params.RunId)), sourceConnection, logger) + db, err := b.sqlmanagerclient.NewSqlConnection(ctx, connectionmanager.NewUniqueSession(connectionmanager.WithSessionGroup(params.WorkflowId)), sourceConnection, logger) if err != nil { return nil, fmt.Errorf("unable to create new sql db: %w", err) } @@ -147,7 +147,7 @@ func (b *sqlSyncBuilder) BuildSourceConfigs(ctx context.Context, params *bb_inte return nil, fmt.Errorf("unable to build select queries: %w", err) } - configs, err := buildBenthosSqlSourceConfigResponses(logger, ctx, b.transformerclient, groupedTableMapping, runConfigs, sourceConnection.Id, tableRunTypeQueryMap, groupedColumnInfo, filteredForeignKeysMap, colTransformerMap, job.Id, params.RunId, b.redisConfig, primaryKeyToForeignKeysMap) + configs, err := buildBenthosSqlSourceConfigResponses(logger, ctx, b.transformerclient, groupedTableMapping, runConfigs, sourceConnection.Id, tableRunTypeQueryMap, groupedColumnInfo, filteredForeignKeysMap, colTransformerMap, job.Id, params.WorkflowId, b.redisConfig, primaryKeyToForeignKeysMap) if err != nil { return nil, fmt.Errorf("unable to build benthos sql source config responses: %w", err) } @@ -277,7 +277,7 @@ func (b *sqlSyncBuilder) BuildDestinationConfig(ctx context.Context, params *bb_ // lazy load if len(b.mergedSchemaColumnMap) == 0 { - sqlSchemaColMap := getSqlSchemaColumnMap(ctx, connectionmanager.NewUniqueSession(connectionmanager.WithSessionGroup(params.RunId)), params.DestConnection, b.sqlSourceSchemaColumnInfoMap, b.sqlmanagerclient, params.Logger) + sqlSchemaColMap := getSqlSchemaColumnMap(ctx, connectionmanager.NewUniqueSession(connectionmanager.WithSessionGroup(params.WorkflowId)), params.DestConnection, b.sqlSourceSchemaColumnInfoMap, b.sqlmanagerclient, params.Logger) b.mergedSchemaColumnMap = sqlSchemaColMap } if len(b.mergedSchemaColumnMap) == 0 { @@ -363,7 +363,7 @@ func (b *sqlSyncBuilder) BuildDestinationConfig(ctx context.Context, params *bb_ if b.redisConfig == nil { return nil, fmt.Errorf("missing redis config. this operation requires redis") } - hashedKey := neosync_benthos.HashBenthosCacheKey(params.Job.GetId(), params.RunId, tableKey, col) + hashedKey := neosync_benthos.HashBenthosCacheKey(params.Job.GetId(), params.WorkflowId, tableKey, col) config.Outputs = append(config.Outputs, neosync_benthos.Outputs{ RedisHashOutput: &neosync_benthos.RedisHashOutputConfig{ Url: b.redisConfig.Url, diff --git a/internal/benthos/benthos-builder/generate-benthos.go b/internal/benthos/benthos-builder/generate-benthos.go index d184bfc982..8c039843b3 100644 --- a/internal/benthos/benthos-builder/generate-benthos.go +++ b/internal/benthos/benthos-builder/generate-benthos.go @@ -24,7 +24,7 @@ func (b *BenthosConfigManager) GenerateBenthosConfigs( sourceParams := &bb_internal.SourceParams{ Job: b.job, - RunId: b.runId, + WorkflowId: b.workflowId, SourceConnection: b.sourceConnection, Logger: b.logger, } @@ -55,7 +55,7 @@ func (b *BenthosConfigManager) GenerateBenthosConfigs( destParams := &bb_internal.DestinationParams{ SourceConfig: sourceConfig, Job: b.job, - RunId: b.runId, + WorkflowId: b.workflowId, DestinationOpts: destOpts, DestConnection: destConnection, Logger: b.logger, diff --git a/internal/benthos/benthos-builder/internal/types.go b/internal/benthos/benthos-builder/internal/types.go index 870c92cbe0..016b02f656 100644 --- a/internal/benthos/benthos-builder/internal/types.go +++ b/internal/benthos/benthos-builder/internal/types.go @@ -111,7 +111,7 @@ type BenthosBuilder interface { // SourceParams contains all parameters needed to build a source benthos configuration type SourceParams struct { Job *mgmtv1alpha1.Job - RunId string + WorkflowId string SourceConnection *mgmtv1alpha1.Connection Logger *slog.Logger } @@ -125,7 +125,7 @@ type ReferenceKey struct { type DestinationParams struct { SourceConfig *BenthosSourceConfig Job *mgmtv1alpha1.Job - RunId string + WorkflowId string DestinationOpts *mgmtv1alpha1.JobDestinationOptions DestConnection *mgmtv1alpha1.Connection Logger *slog.Logger diff --git a/internal/testutil/testdata/generators.go b/internal/testutil/testdata/generators.go index 7121dec06b..c37d56bd4f 100644 --- a/internal/testutil/testdata/generators.go +++ b/internal/testutil/testdata/generators.go @@ -1,3 +1,3 @@ -package testuitl_testdata +package testutil_testdata //go:generate go run jobmapping_generator.go gen_jobmappings_config.json $GOPACKAGE diff --git a/worker/pkg/integration-test/datasync_workflow.go b/worker/pkg/integration-test/datasync_workflow.go index 42c7d43ea6..bfb9867083 100644 --- a/worker/pkg/integration-test/datasync_workflow.go +++ b/worker/pkg/integration-test/datasync_workflow.go @@ -14,6 +14,7 @@ import ( accountstatus_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/account-status" genbenthosconfigs_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/gen-benthos-configs" jobhooks_by_timing_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/jobhooks-by-timing" + posttablesync_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/post-table-sync" runsqlinittablestmts_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/run-sql-init-table-stmts" "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/shared" sync_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/sync" @@ -22,19 +23,22 @@ import ( datasync_workflow "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/workflow" "go.opentelemetry.io/otel/metric" "go.temporal.io/sdk/client" + "go.temporal.io/sdk/log" "go.temporal.io/sdk/testsuite" ) func ExecuteTestDataSyncWorkflow( - t *testing.T, + t testing.TB, neosyncApi *tcneosyncapi.NeosyncApiTestClient, redisUrl *string, jobId string, ) *testsuite.TestWorkflowEnvironment { + t.Helper() connclient := neosyncApi.UnauthdClients.Connections jobclient := neosyncApi.UnauthdClients.Jobs transformerclient := neosyncApi.UnauthdClients.Transformers userclient := neosyncApi.UnauthdClients.Users + var redisconfig *shared.RedisConfig if redisUrl != nil && *redisUrl != "" { redisconfig = &shared.RedisConfig{ @@ -57,6 +61,7 @@ func ExecuteTestDataSyncWorkflow( // temporal workflow testSuite := &testsuite.WorkflowTestSuite{} + testSuite.SetLogger(log.NewStructuredLogger(testutil.GetTestLogger(t))) env := testSuite.NewTestWorkflowEnvironment() // register activities @@ -75,6 +80,7 @@ func ExecuteTestDataSyncWorkflow( runSqlInitTableStatements := runsqlinittablestmts_activity.New(jobclient, connclient, sqlmanager, &testutil.FakeEELicense{}) jobhookTimingActivity := jobhooks_by_timing_activity.New(jobclient, connclient, sqlmanager, &testutil.FakeEELicense{}) accountStatusActivity := accountstatus_activity.New(userclient) + posttableSyncActivity := posttablesync_activity.New(jobclient, sqlmanager, connclient) env.RegisterWorkflow(datasync_workflow.Workflow) env.RegisterActivity(syncActivity.Sync) @@ -84,6 +90,7 @@ func ExecuteTestDataSyncWorkflow( env.RegisterActivity(genbenthosActivity.GenerateBenthosConfigs) env.RegisterActivity(accountStatusActivity.CheckAccountStatus) env.RegisterActivity(jobhookTimingActivity.RunJobHooksByTiming) + env.RegisterActivity(posttableSyncActivity.RunPostTableSync) env.SetTestTimeout(600 * time.Second) // increase the test timeout env.SetStartWorkflowOptions(client.StartWorkflowOptions{ID: jobId}) diff --git a/worker/pkg/workflows/datasync/activities/gen-benthos-configs/benthos-builder.go b/worker/pkg/workflows/datasync/activities/gen-benthos-configs/benthos-builder.go index aa3dfda137..db8ebcd285 100644 --- a/worker/pkg/workflows/datasync/activities/gen-benthos-configs/benthos-builder.go +++ b/worker/pkg/workflows/datasync/activities/gen-benthos-configs/benthos-builder.go @@ -97,7 +97,7 @@ func (b *benthosBuilder) GenerateBenthosConfigsNew( Job: job, SourceConnection: sourceConnection, DestinationConnections: destConnections, - RunId: wfmetadata.RunId, + WorkflowId: wfmetadata.WorkflowId, Logger: slogger, Sqlmanagerclient: b.sqlmanagerclient, Transformerclient: b.transformerclient, From d0f3eb45b2471d1fd56a3c0877bb6635337f8b13 Mon Sep 17 00:00:00 2001 From: Alisha Date: Mon, 2 Dec 2024 22:33:02 -0800 Subject: [PATCH 08/14] use workflow id as group in session --- .../datasync/activities/jobhooks-by-timing/activity.go | 2 +- .../workflows/datasync/activities/post-table-sync/activity.go | 2 +- .../datasync/activities/run-sql-init-table-stmts/activity.go | 2 +- worker/pkg/workflows/datasync/activities/sync/activity.go | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/worker/pkg/workflows/datasync/activities/jobhooks-by-timing/activity.go b/worker/pkg/workflows/datasync/activities/jobhooks-by-timing/activity.go index 9fbec2774a..93e94cc914 100644 --- a/worker/pkg/workflows/datasync/activities/jobhooks-by-timing/activity.go +++ b/worker/pkg/workflows/datasync/activities/jobhooks-by-timing/activity.go @@ -104,7 +104,7 @@ func (a *Activity) RunJobHooksByTiming( } }() - session := connectionmanager.NewUniqueSession(connectionmanager.WithSessionGroup(activityInfo.WorkflowExecution.RunID)) + session := connectionmanager.NewUniqueSession(connectionmanager.WithSessionGroup(activityInfo.WorkflowExecution.ID)) execCount := uint(0) for _, hook := range hooks { diff --git a/worker/pkg/workflows/datasync/activities/post-table-sync/activity.go b/worker/pkg/workflows/datasync/activities/post-table-sync/activity.go index 748d0d8d9a..cada9aa777 100644 --- a/worker/pkg/workflows/datasync/activities/post-table-sync/activity.go +++ b/worker/pkg/workflows/datasync/activities/post-table-sync/activity.go @@ -49,7 +49,7 @@ func (a *Activity) RunPostTableSync( req *RunPostTableSyncRequest, ) (*RunPostTableSyncResponse, error) { activityInfo := activity.GetInfo(ctx) - session := connectionmanager.NewUniqueSession(connectionmanager.WithSessionGroup(activityInfo.WorkflowExecution.RunID)) + session := connectionmanager.NewUniqueSession(connectionmanager.WithSessionGroup(activityInfo.WorkflowExecution.ID)) externalId := shared.GetPostTableSyncConfigExternalId(req.Name) loggerKeyVals := []any{ "accountId", req.AccountId, diff --git a/worker/pkg/workflows/datasync/activities/run-sql-init-table-stmts/activity.go b/worker/pkg/workflows/datasync/activities/run-sql-init-table-stmts/activity.go index 6a10b1baea..0b34146087 100644 --- a/worker/pkg/workflows/datasync/activities/run-sql-init-table-stmts/activity.go +++ b/worker/pkg/workflows/datasync/activities/run-sql-init-table-stmts/activity.go @@ -76,7 +76,7 @@ func (a *Activity) RunSqlInitTableStatements( return builder.RunSqlInitTableStatements( ctx, req, - connectionmanager.NewUniqueSession(connectionmanager.WithSessionGroup(info.WorkflowExecution.RunID)), + connectionmanager.NewUniqueSession(connectionmanager.WithSessionGroup(info.WorkflowExecution.ID)), slogger, ) } diff --git a/worker/pkg/workflows/datasync/activities/sync/activity.go b/worker/pkg/workflows/datasync/activities/sync/activity.go index e4de8c7470..0530d422c6 100644 --- a/worker/pkg/workflows/datasync/activities/sync/activity.go +++ b/worker/pkg/workflows/datasync/activities/sync/activity.go @@ -95,7 +95,7 @@ var ( // Temporal activity that runs benthos and syncs a source connection to one or more destination connections func (a *Activity) Sync(ctx context.Context, req *SyncRequest, metadata *SyncMetadata) (*SyncResponse, error) { info := activity.GetInfo(ctx) - session := connectionmanager.NewUniqueSession(connectionmanager.WithSessionGroup(info.WorkflowExecution.RunID)) + session := connectionmanager.NewUniqueSession(connectionmanager.WithSessionGroup(info.WorkflowExecution.ID)) isRetry := info.Attempt > 1 loggerKeyVals := []any{ "metadata", metadata, From 3d5d68e128d46a2015788f3d726b1044fdcad93c Mon Sep 17 00:00:00 2001 From: Alisha Date: Mon, 2 Dec 2024 22:39:15 -0800 Subject: [PATCH 09/14] set test workflow id --- .../workflows/datasync/workflow/workflow_integration_test.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/worker/pkg/workflows/datasync/workflow/workflow_integration_test.go b/worker/pkg/workflows/datasync/workflow/workflow_integration_test.go index b7d494123e..bd4fd14b07 100644 --- a/worker/pkg/workflows/datasync/workflow/workflow_integration_test.go +++ b/worker/pkg/workflows/datasync/workflow/workflow_integration_test.go @@ -36,6 +36,7 @@ import ( testdata_javascripttransformers "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/workflow/testdata/javascript-transformers" mssql_datatypes "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/workflow/testdata/mssql/data-types" mssql_simple "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/workflow/testdata/mssql/simple" + "go.temporal.io/sdk/client" mysql_alltypes "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/workflow/testdata/mysql/all-types" mysql_compositekeys "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/workflow/testdata/mysql/composite-keys" @@ -1653,6 +1654,7 @@ func executeWorkflow( env.RegisterActivity(posttableSyncActivity.RunPostTableSync) env.SetTestTimeout(600 * time.Second) // increase the test timeout + env.SetStartWorkflowOptions(client.StartWorkflowOptions{ID: jobId}) env.ExecuteWorkflow(Workflow, &WorkflowRequest{JobId: jobId}) return env } From 77f479a02d5de59e13a2948aed2553cdfda5dc7e Mon Sep 17 00:00:00 2001 From: Alisha Date: Tue, 3 Dec 2024 09:19:18 -0800 Subject: [PATCH 10/14] disable jobhooks for unauthed job service in test --- backend/pkg/integration-test/integration-test.go | 1 - .../workflows/datasync/activities/post-table-sync/activity.go | 2 +- .../workflows/datasync/workflow/workflow_integration_test.go | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/backend/pkg/integration-test/integration-test.go b/backend/pkg/integration-test/integration-test.go index 746437cf26..3970a92ca8 100644 --- a/backend/pkg/integration-test/integration-test.go +++ b/backend/pkg/integration-test/integration-test.go @@ -263,7 +263,6 @@ func (s *NeosyncApiTestClient) Setup(ctx context.Context, t testing.TB) error { jobhookService := jobhooks.New( neosyncdb.New(pgcontainer.DB, db_queries.New()), unauthdUserService, - jobhooks.WithEnabled(), ) unauthdJobsService := v1alpha1_jobservice.New( diff --git a/worker/pkg/workflows/datasync/activities/post-table-sync/activity.go b/worker/pkg/workflows/datasync/activities/post-table-sync/activity.go index cada9aa777..450863e053 100644 --- a/worker/pkg/workflows/datasync/activities/post-table-sync/activity.go +++ b/worker/pkg/workflows/datasync/activities/post-table-sync/activity.go @@ -133,5 +133,5 @@ func (a *Activity) RunPostTableSync( } func runContextNotFound(err error) bool { - return strings.Contains(err.Error(), "no run context exists") + return strings.Contains(err.Error(), "unable to find key") } diff --git a/worker/pkg/workflows/datasync/workflow/workflow_integration_test.go b/worker/pkg/workflows/datasync/workflow/workflow_integration_test.go index bd4fd14b07..aab9f30bdd 100644 --- a/worker/pkg/workflows/datasync/workflow/workflow_integration_test.go +++ b/worker/pkg/workflows/datasync/workflow/workflow_integration_test.go @@ -142,7 +142,7 @@ func (s *IntegrationTestSuite) Test_Workflow_Sync_Postgres() { } } - jobId := "115aaf2c-776e-4847-8268-d914e3c15968" + jobId := uuid.New().String() srcConnId := "c9b6ce58-5c8e-4dce-870d-96841b19d988" destConnId := "226add85-5751-4232-b085-a0ae93afc7ce" From 5bf984bb397facaa621a9e5bc367563f3f73d65f Mon Sep 17 00:00:00 2001 From: Alisha Date: Tue, 3 Dec 2024 16:15:15 -0800 Subject: [PATCH 11/14] valid ee license --- cli/internal/cmds/neosync/sync/sync_integration_test.go | 5 +++-- internal/testutil/utils.go | 6 ++++-- worker/pkg/integration-test/datasync_workflow.go | 5 +++-- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/cli/internal/cmds/neosync/sync/sync_integration_test.go b/cli/internal/cmds/neosync/sync/sync_integration_test.go index 439a1bf648..4fa027bf16 100644 --- a/cli/internal/cmds/neosync/sync/sync_integration_test.go +++ b/cli/internal/cmds/neosync/sync/sync_integration_test.go @@ -55,6 +55,7 @@ func Test_Sync(t *testing.T) { &awsS3Config.Region, ) outputType := output.PlainOutput + validEELicense := false t.Run("postgres", func(t *testing.T) { t.Parallel() @@ -177,7 +178,7 @@ func Test_Sync(t *testing.T) { require.NoError(t, err) t.Run("Postgres_to_S3", func(t *testing.T) { - env := workertest.ExecuteTestDataSyncWorkflow(t, neosyncApi, nil, job.Msg.GetJob().GetId()) + env := workertest.ExecuteTestDataSyncWorkflow(t, neosyncApi, nil, job.Msg.GetJob().GetId(), validEELicense) require.Truef(t, env.IsWorkflowCompleted(), "Workflow did not complete. Test: pg_s3") err = env.GetWorkflowError() require.NoError(t, err, "Received Temporal Workflow Error", "testName", "pg_s3") @@ -355,7 +356,7 @@ func Test_Sync(t *testing.T) { require.NoError(t, err) t.Run("Mysql_to_S3", func(t *testing.T) { - env := workertest.ExecuteTestDataSyncWorkflow(t, neosyncApi, nil, job.Msg.GetJob().GetId()) + env := workertest.ExecuteTestDataSyncWorkflow(t, neosyncApi, nil, job.Msg.GetJob().GetId(), validEELicense) require.Truef(t, env.IsWorkflowCompleted(), "Workflow did not complete. Test: mysql_to_s3") err = env.GetWorkflowError() require.NoError(t, err, "Received Temporal Workflow Error", "testName", "mysql_to_s3") diff --git a/internal/testutil/utils.go b/internal/testutil/utils.go index ca9adf1147..8b31909799 100644 --- a/internal/testutil/utils.go +++ b/internal/testutil/utils.go @@ -55,10 +55,12 @@ func GetTestLogger(t testing.TB) *slog.Logger { return slogt.New(t, f) } -type FakeEELicense struct{} +type FakeEELicense struct { + IsValid bool +} func (f *FakeEELicense) IsValid() bool { - return true + return f.IsValid } func GetConcurrentTestLogger(t testing.TB) *slog.Logger { diff --git a/worker/pkg/integration-test/datasync_workflow.go b/worker/pkg/integration-test/datasync_workflow.go index bfb9867083..6b882d5600 100644 --- a/worker/pkg/integration-test/datasync_workflow.go +++ b/worker/pkg/integration-test/datasync_workflow.go @@ -32,6 +32,7 @@ func ExecuteTestDataSyncWorkflow( neosyncApi *tcneosyncapi.NeosyncApiTestClient, redisUrl *string, jobId string, + validEELicense bool, ) *testsuite.TestWorkflowEnvironment { t.Helper() connclient := neosyncApi.UnauthdClients.Connections @@ -77,8 +78,8 @@ func ExecuteTestDataSyncWorkflow( var activityMeter metric.Meter syncActivity := sync_activity.New(connclient, jobclient, sqlconnmanager, mongoconnmanager, activityMeter, sync_activity.NewBenthosStreamManager()) retrieveActivityOpts := syncactivityopts_activity.New(jobclient) - runSqlInitTableStatements := runsqlinittablestmts_activity.New(jobclient, connclient, sqlmanager, &testutil.FakeEELicense{}) - jobhookTimingActivity := jobhooks_by_timing_activity.New(jobclient, connclient, sqlmanager, &testutil.FakeEELicense{}) + runSqlInitTableStatements := runsqlinittablestmts_activity.New(jobclient, connclient, sqlmanager, &testutil.FakeEELicense{IsValid: validEELicense}) + jobhookTimingActivity := jobhooks_by_timing_activity.New(jobclient, connclient, sqlmanager, &testutil.FakeEELicense{IsValid: validEELicense}) accountStatusActivity := accountstatus_activity.New(userclient) posttableSyncActivity := posttablesync_activity.New(jobclient, sqlmanager, connclient) From 19ad5bcd2730cd6147bd8c2b2261bd04b1ad529e Mon Sep 17 00:00:00 2001 From: Alisha Date: Wed, 4 Dec 2024 09:34:44 -0800 Subject: [PATCH 12/14] fake ee license --- internal/testutil/utils.go | 20 +++++++++++++++++-- .../pkg/integration-test/datasync_workflow.go | 20 ++++++++++++++----- 2 files changed, 33 insertions(+), 7 deletions(-) diff --git a/internal/testutil/utils.go b/internal/testutil/utils.go index 8b31909799..9ffb53b79c 100644 --- a/internal/testutil/utils.go +++ b/internal/testutil/utils.go @@ -56,11 +56,27 @@ func GetTestLogger(t testing.TB) *slog.Logger { } type FakeEELicense struct { - IsValid bool + isValid bool +} + +type Option func(*FakeEELicense) + +func WithIsValid() Option { + return func(f *FakeEELicense) { + f.isValid = true + } +} + +func NewFakeEELicense(opts ...Option) *FakeEELicense { + f := &FakeEELicense{} + for _, opt := range opts { + opt(f) + } + return f } func (f *FakeEELicense) IsValid() bool { - return f.IsValid + return f.isValid } func GetConcurrentTestLogger(t testing.TB) *slog.Logger { diff --git a/worker/pkg/integration-test/datasync_workflow.go b/worker/pkg/integration-test/datasync_workflow.go index 6b882d5600..4fb5433539 100644 --- a/worker/pkg/integration-test/datasync_workflow.go +++ b/worker/pkg/integration-test/datasync_workflow.go @@ -52,9 +52,14 @@ func ExecuteTestDataSyncWorkflow( } sqlconnmanager := connectionmanager.NewConnectionManager(sqlprovider.NewProvider(&sqlconnect.SqlOpenConnector{}), connectionmanager.WithReaperPoll(10*time.Second)) - go sqlconnmanager.Reaper(testutil.GetTestLogger(t)) + go sqlconnmanager.Reaper(testutil.GetConcurrentTestLogger(t)) mongoconnmanager := connectionmanager.NewConnectionManager(mongoprovider.NewProvider()) - go mongoconnmanager.Reaper(testutil.GetTestLogger(t)) + go mongoconnmanager.Reaper(testutil.GetConcurrentTestLogger(t)) + + t.Cleanup(func() { + sqlconnmanager.Shutdown(testutil.GetConcurrentTestLogger(t)) + mongoconnmanager.Shutdown(testutil.GetConcurrentTestLogger(t)) + }) sqlmanager := sql_manager.NewSqlManager( sql_manager.WithConnectionManager(sqlconnmanager), @@ -62,7 +67,7 @@ func ExecuteTestDataSyncWorkflow( // temporal workflow testSuite := &testsuite.WorkflowTestSuite{} - testSuite.SetLogger(log.NewStructuredLogger(testutil.GetTestLogger(t))) + testSuite.SetLogger(log.NewStructuredLogger(testutil.GetConcurrentTestLogger(t))) env := testSuite.NewTestWorkflowEnvironment() // register activities @@ -75,11 +80,16 @@ func ExecuteTestDataSyncWorkflow( false, ) + fakeEELicense := testutil.NewFakeEELicense() + if validEELicense { + fakeEELicense = testutil.NewFakeEELicense(testutil.WithIsValid()) + } + var activityMeter metric.Meter syncActivity := sync_activity.New(connclient, jobclient, sqlconnmanager, mongoconnmanager, activityMeter, sync_activity.NewBenthosStreamManager()) retrieveActivityOpts := syncactivityopts_activity.New(jobclient) - runSqlInitTableStatements := runsqlinittablestmts_activity.New(jobclient, connclient, sqlmanager, &testutil.FakeEELicense{IsValid: validEELicense}) - jobhookTimingActivity := jobhooks_by_timing_activity.New(jobclient, connclient, sqlmanager, &testutil.FakeEELicense{IsValid: validEELicense}) + runSqlInitTableStatements := runsqlinittablestmts_activity.New(jobclient, connclient, sqlmanager, fakeEELicense) + jobhookTimingActivity := jobhooks_by_timing_activity.New(jobclient, connclient, sqlmanager, fakeEELicense) accountStatusActivity := accountstatus_activity.New(userclient) posttableSyncActivity := posttablesync_activity.New(jobclient, sqlmanager, connclient) From a0c57b5b593428ca334c699d0e1598cedcc7778e Mon Sep 17 00:00:00 2001 From: Alisha Date: Wed, 4 Dec 2024 10:48:44 -0800 Subject: [PATCH 13/14] clean up test datasync workflow --- .../neosync/sync/sync_integration_test.go | 33 ++-- .../pkg/integration-test/datasync_workflow.go | 172 +++++++++++++----- .../activities/post-table-sync/activity.go | 6 +- .../workflow/workflow_integration_test.go | 2 +- 4 files changed, 150 insertions(+), 63 deletions(-) diff --git a/cli/internal/cmds/neosync/sync/sync_integration_test.go b/cli/internal/cmds/neosync/sync/sync_integration_test.go index 4fa027bf16..d24aa3a16b 100644 --- a/cli/internal/cmds/neosync/sync/sync_integration_test.go +++ b/cli/internal/cmds/neosync/sync/sync_integration_test.go @@ -8,17 +8,14 @@ import ( "connectrpc.com/connect" mgmtv1alpha1 "github.com/nucleuscloud/neosync/backend/gen/go/protos/mgmt/v1alpha1" tcneosyncapi "github.com/nucleuscloud/neosync/backend/pkg/integration-test" - "github.com/nucleuscloud/neosync/backend/pkg/sqlconnect" - "github.com/nucleuscloud/neosync/backend/pkg/sqlmanager" "github.com/nucleuscloud/neosync/cli/internal/output" connectionmanager "github.com/nucleuscloud/neosync/internal/connection-manager" - "github.com/nucleuscloud/neosync/internal/connection-manager/providers/sqlprovider" "github.com/nucleuscloud/neosync/internal/testutil" tcmysql "github.com/nucleuscloud/neosync/internal/testutil/testcontainers/mysql" tcpostgres "github.com/nucleuscloud/neosync/internal/testutil/testcontainers/postgres" mysqlalltypes "github.com/nucleuscloud/neosync/internal/testutil/testdata/mysql/alltypes" pgalltypes "github.com/nucleuscloud/neosync/internal/testutil/testdata/postgres/alltypes" - workertest "github.com/nucleuscloud/neosync/worker/pkg/integration-test" + tcworkflow "github.com/nucleuscloud/neosync/worker/pkg/integration-test" "github.com/stretchr/testify/require" ) @@ -40,9 +37,10 @@ func Test_Sync(t *testing.T) { connclient := neosyncApi.UnauthdClients.Connections conndataclient := neosyncApi.UnauthdClients.ConnectionData jobclient := neosyncApi.UnauthdClients.Jobs - connmanager := connectionmanager.NewConnectionManager(sqlprovider.NewProvider(&sqlconnect.SqlOpenConnector{})) - sqlmanagerclient := sqlmanager.NewSqlManager(sqlmanager.WithConnectionManager(connmanager)) + dbManagers := tcworkflow.NewTestDatabaseManagers(t) + connmanager := dbManagers.SqlConnManager + sqlmanagerclient := dbManagers.SqlManager accountId := tcneosyncapi.CreatePersonalAccount(ctx, t, neosyncApi.UnauthdClients.Users) awsS3Config := testutil.GetTestAwsS3Config() s3Conn := tcneosyncapi.CreateS3Connection( @@ -55,7 +53,6 @@ func Test_Sync(t *testing.T) { &awsS3Config.Region, ) outputType := output.PlainOutput - validEELicense := false t.Run("postgres", func(t *testing.T) { t.Parallel() @@ -178,10 +175,12 @@ func Test_Sync(t *testing.T) { require.NoError(t, err) t.Run("Postgres_to_S3", func(t *testing.T) { - env := workertest.ExecuteTestDataSyncWorkflow(t, neosyncApi, nil, job.Msg.GetJob().GetId(), validEELicense) - require.Truef(t, env.IsWorkflowCompleted(), "Workflow did not complete. Test: pg_s3") - err = env.GetWorkflowError() - require.NoError(t, err, "Received Temporal Workflow Error", "testName", "pg_s3") + testworkflow := tcworkflow.NewTestDataSyncWorkflowEnv(t, neosyncApi, dbManagers) + testworkflow.RequireActivitiesCompletedSuccessfully(t) + testworkflow.ExecuteTestDataSyncWorkflow(job.Msg.GetJob().GetId()) + require.Truef(t, testworkflow.TestEnv.IsWorkflowCompleted(), "Workflow did not complete. Test: pg_to_s3") + err = testworkflow.TestEnv.GetWorkflowError() + require.NoError(t, err, "Received Temporal Workflow Error", "testName", "pg_to_s3") }) t.Run("S3_to_Postgres", func(t *testing.T) { @@ -356,9 +355,11 @@ func Test_Sync(t *testing.T) { require.NoError(t, err) t.Run("Mysql_to_S3", func(t *testing.T) { - env := workertest.ExecuteTestDataSyncWorkflow(t, neosyncApi, nil, job.Msg.GetJob().GetId(), validEELicense) - require.Truef(t, env.IsWorkflowCompleted(), "Workflow did not complete. Test: mysql_to_s3") - err = env.GetWorkflowError() + testworkflow := tcworkflow.NewTestDataSyncWorkflowEnv(t, neosyncApi, dbManagers) + testworkflow.RequireActivitiesCompletedSuccessfully(t) + testworkflow.ExecuteTestDataSyncWorkflow(job.Msg.GetJob().GetId()) + require.Truef(t, testworkflow.TestEnv.IsWorkflowCompleted(), "Workflow did not complete. Test: mysql_to_s3") + err = testworkflow.TestEnv.GetWorkflowError() require.NoError(t, err, "Received Temporal Workflow Error", "testName", "mysql_to_s3") }) @@ -409,7 +410,7 @@ func Test_Sync(t *testing.T) { t.Cleanup(func() { err := mysql.TearDown(ctx) if err != nil { - panic(err) + t.Fatal(err) } }) }) @@ -417,7 +418,7 @@ func Test_Sync(t *testing.T) { t.Cleanup(func() { err = neosyncApi.TearDown(ctx) if err != nil { - panic(err) + t.Fatal(err) } }) } diff --git a/worker/pkg/integration-test/datasync_workflow.go b/worker/pkg/integration-test/datasync_workflow.go index 4fb5433539..6dfde25e1c 100644 --- a/worker/pkg/integration-test/datasync_workflow.go +++ b/worker/pkg/integration-test/datasync_workflow.go @@ -1,6 +1,7 @@ package integrationtest import ( + "fmt" "testing" "time" @@ -11,6 +12,9 @@ import ( "github.com/nucleuscloud/neosync/internal/connection-manager/providers/mongoprovider" "github.com/nucleuscloud/neosync/internal/connection-manager/providers/sqlprovider" "github.com/nucleuscloud/neosync/internal/testutil" + neosync_redis "github.com/nucleuscloud/neosync/worker/internal/redis" + neosync_benthos_mongodb "github.com/nucleuscloud/neosync/worker/pkg/benthos/mongodb" + neosync_benthos_sql "github.com/nucleuscloud/neosync/worker/pkg/benthos/sql" accountstatus_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/account-status" genbenthosconfigs_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/gen-benthos-configs" jobhooks_by_timing_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/jobhooks-by-timing" @@ -21,90 +25,168 @@ import ( syncactivityopts_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/sync-activity-opts" syncrediscleanup_activity "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/activities/sync-redis-clean-up" datasync_workflow "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/workflow" + "github.com/redis/go-redis/v9" + "github.com/stretchr/testify/require" "go.opentelemetry.io/otel/metric" + "go.temporal.io/sdk/activity" "go.temporal.io/sdk/client" + "go.temporal.io/sdk/converter" "go.temporal.io/sdk/log" "go.temporal.io/sdk/testsuite" ) -func ExecuteTestDataSyncWorkflow( - t testing.TB, - neosyncApi *tcneosyncapi.NeosyncApiTestClient, - redisUrl *string, - jobId string, - validEELicense bool, -) *testsuite.TestWorkflowEnvironment { - t.Helper() - connclient := neosyncApi.UnauthdClients.Connections - jobclient := neosyncApi.UnauthdClients.Jobs - transformerclient := neosyncApi.UnauthdClients.Transformers - userclient := neosyncApi.UnauthdClients.Users +type Option func(*TestWorkflowEnv) + +type TestWorkflowEnv struct { + neosyncApi *tcneosyncapi.NeosyncApiTestClient + redisconfig *shared.RedisConfig + redisclient redis.UniversalClient + fakeEELicense *testutil.FakeEELicense + TestEnv *testsuite.TestWorkflowEnvironment +} - var redisconfig *shared.RedisConfig - if redisUrl != nil && *redisUrl != "" { - redisconfig = &shared.RedisConfig{ - Url: *redisUrl, +// WithRedis creates redis client with provided URL +func WithRedis(url string) Option { + return func(c *TestWorkflowEnv) { + c.redisconfig = &shared.RedisConfig{ + Url: url, Kind: "simple", Tls: &shared.RedisTlsConfig{ Enabled: false, }, } } +} - sqlconnmanager := connectionmanager.NewConnectionManager(sqlprovider.NewProvider(&sqlconnect.SqlOpenConnector{}), connectionmanager.WithReaperPoll(10*time.Second)) - go sqlconnmanager.Reaper(testutil.GetConcurrentTestLogger(t)) - mongoconnmanager := connectionmanager.NewConnectionManager(mongoprovider.NewProvider()) - go mongoconnmanager.Reaper(testutil.GetConcurrentTestLogger(t)) +// WithValidEELicense creates a valid enterprise edition license +func WithValidEELicense() Option { + return func(c *TestWorkflowEnv) { + c.fakeEELicense = testutil.NewFakeEELicense(testutil.WithIsValid()) + } +} - t.Cleanup(func() { - sqlconnmanager.Shutdown(testutil.GetConcurrentTestLogger(t)) - mongoconnmanager.Shutdown(testutil.GetConcurrentTestLogger(t)) - }) +// NewTestDataSyncWorkflowEnv creates and configures a new test datasync workflow environment +func NewTestDataSyncWorkflowEnv( + t testing.TB, + neosyncApi *tcneosyncapi.NeosyncApiTestClient, + dbManagers *TestDatabaseManagers, + opts ...Option, +) *TestWorkflowEnv { + t.Helper() - sqlmanager := sql_manager.NewSqlManager( - sql_manager.WithConnectionManager(sqlconnmanager), - ) + workflowEnv := &TestWorkflowEnv{ + neosyncApi: neosyncApi, + fakeEELicense: testutil.NewFakeEELicense(), + } + + for _, opt := range opts { + opt(workflowEnv) + } + + redisclient, err := neosync_redis.GetRedisClient(workflowEnv.redisconfig) + if err != nil { + t.Fatal(err) + } + workflowEnv.redisclient = redisclient + + connclient := neosyncApi.UnauthdClients.Connections + jobclient := neosyncApi.UnauthdClients.Jobs + transformerclient := neosyncApi.UnauthdClients.Transformers + userclient := neosyncApi.UnauthdClients.Users - // temporal workflow testSuite := &testsuite.WorkflowTestSuite{} testSuite.SetLogger(log.NewStructuredLogger(testutil.GetConcurrentTestLogger(t))) env := testSuite.NewTestWorkflowEnvironment() - // register activities genbenthosActivity := genbenthosconfigs_activity.New( jobclient, connclient, transformerclient, - sqlmanager, - redisconfig, + dbManagers.SqlManager, + workflowEnv.redisconfig, false, ) - fakeEELicense := testutil.NewFakeEELicense() - if validEELicense { - fakeEELicense = testutil.NewFakeEELicense(testutil.WithIsValid()) - } - var activityMeter metric.Meter - syncActivity := sync_activity.New(connclient, jobclient, sqlconnmanager, mongoconnmanager, activityMeter, sync_activity.NewBenthosStreamManager()) + syncActivity := sync_activity.New(connclient, jobclient, dbManagers.SqlConnManager, dbManagers.MongoConnManager, activityMeter, sync_activity.NewBenthosStreamManager()) retrieveActivityOpts := syncactivityopts_activity.New(jobclient) - runSqlInitTableStatements := runsqlinittablestmts_activity.New(jobclient, connclient, sqlmanager, fakeEELicense) - jobhookTimingActivity := jobhooks_by_timing_activity.New(jobclient, connclient, sqlmanager, fakeEELicense) + runSqlInitTableStatements := runsqlinittablestmts_activity.New(jobclient, connclient, dbManagers.SqlManager, workflowEnv.fakeEELicense) + jobhookTimingActivity := jobhooks_by_timing_activity.New(jobclient, connclient, dbManagers.SqlManager, workflowEnv.fakeEELicense) accountStatusActivity := accountstatus_activity.New(userclient) - posttableSyncActivity := posttablesync_activity.New(jobclient, sqlmanager, connclient) + posttableSyncActivity := posttablesync_activity.New(jobclient, dbManagers.SqlManager, connclient) + redisCleanUpActivity := syncrediscleanup_activity.New(workflowEnv.redisclient) env.RegisterWorkflow(datasync_workflow.Workflow) env.RegisterActivity(syncActivity.Sync) env.RegisterActivity(retrieveActivityOpts.RetrieveActivityOptions) env.RegisterActivity(runSqlInitTableStatements.RunSqlInitTableStatements) - env.RegisterActivity(syncrediscleanup_activity.DeleteRedisHash) + env.RegisterActivity(redisCleanUpActivity.DeleteRedisHash) env.RegisterActivity(genbenthosActivity.GenerateBenthosConfigs) env.RegisterActivity(accountStatusActivity.CheckAccountStatus) env.RegisterActivity(jobhookTimingActivity.RunJobHooksByTiming) env.RegisterActivity(posttableSyncActivity.RunPostTableSync) - env.SetTestTimeout(600 * time.Second) // increase the test timeout + env.SetTestTimeout(600 * time.Second) + + workflowEnv.TestEnv = env + + return workflowEnv +} + +// ExecuteTestDataSyncWorkflow starts the test workflow with the given job ID +func (w *TestWorkflowEnv) ExecuteTestDataSyncWorkflow(jobId string) { + w.TestEnv.SetStartWorkflowOptions(client.StartWorkflowOptions{ID: jobId}) + w.TestEnv.ExecuteWorkflow(datasync_workflow.Workflow, &datasync_workflow.WorkflowRequest{JobId: jobId}) +} + +// RequireActivitiesCompletedSuccessfully verifies all activities completed without errors +// NOTE: this should be called before ExecuteTestDataSyncWorkflow +func (w *TestWorkflowEnv) RequireActivitiesCompletedSuccessfully(t testing.TB) { + w.TestEnv.SetOnActivityCompletedListener(func(activityInfo *activity.Info, result converter.EncodedValue, err error) { + require.NoError(t, err, "Activity %s failed", activityInfo.ActivityType.Name) + if activityInfo.ActivityType.Name == "RunPostTableSync" && result.HasValue() { + var postTableSyncResp posttablesync_activity.RunPostTableSyncResponse + decodeErr := result.Get(&postTableSyncResp) + require.NoError(t, decodeErr, "Failed to decode result for activity %s", activityInfo.ActivityType.Name) + require.Emptyf(t, postTableSyncResp.Errors, "Post table sync activity returned errors: %v", formatPostTableSyncErrors(postTableSyncResp.Errors)) + } + }) +} + +func formatPostTableSyncErrors(errors []*posttablesync_activity.PostTableSyncError) []string { + formatted := []string{} + for _, err := range errors { + for _, e := range err.Errors { + formatted = append(formatted, fmt.Sprintf("statement: %s error: %s", e.Statement, e.Error)) + } + } + return formatted +} - env.SetStartWorkflowOptions(client.StartWorkflowOptions{ID: jobId}) - env.ExecuteWorkflow(datasync_workflow.Workflow, &datasync_workflow.WorkflowRequest{JobId: jobId}) - return env +// TestDatabaseManagers holds managers for supported connection types +type TestDatabaseManagers struct { + SqlConnManager *connectionmanager.ConnectionManager[neosync_benthos_sql.SqlDbtx] + SqlManager *sql_manager.SqlManager + MongoConnManager *connectionmanager.ConnectionManager[neosync_benthos_mongodb.MongoClient] +} + +// NewTestDatabaseManagers creates and configures database connection managers for testing +func NewTestDatabaseManagers(t testing.TB) *TestDatabaseManagers { + sqlconnmanager := connectionmanager.NewConnectionManager(sqlprovider.NewProvider(&sqlconnect.SqlOpenConnector{}), connectionmanager.WithReaperPoll(10*time.Second)) + go sqlconnmanager.Reaper(testutil.GetConcurrentTestLogger(t)) + mongoconnmanager := connectionmanager.NewConnectionManager(mongoprovider.NewProvider()) + go mongoconnmanager.Reaper(testutil.GetConcurrentTestLogger(t)) + + t.Cleanup(func() { + sqlconnmanager.Shutdown(testutil.GetConcurrentTestLogger(t)) + mongoconnmanager.Shutdown(testutil.GetConcurrentTestLogger(t)) + }) + + sqlmanager := sql_manager.NewSqlManager( + sql_manager.WithConnectionManager(sqlconnmanager), + ) + return &TestDatabaseManagers{ + SqlConnManager: sqlconnmanager, + SqlManager: sqlmanager, + MongoConnManager: mongoconnmanager, + } } diff --git a/worker/pkg/workflows/datasync/activities/post-table-sync/activity.go b/worker/pkg/workflows/datasync/activities/post-table-sync/activity.go index 7e1ce3c1d3..fb7ce15f57 100644 --- a/worker/pkg/workflows/datasync/activities/post-table-sync/activity.go +++ b/worker/pkg/workflows/datasync/activities/post-table-sync/activity.go @@ -157,5 +157,9 @@ func (a *Activity) RunPostTableSync( } func runContextNotFound(err error) bool { - return strings.Contains(err.Error(), "unable to find key") + connectErr, ok := err.(*connect.Error) + if ok && connectErr.Code() == connect.CodeNotFound { + return true + } + return strings.Contains(err.Error(), "unable to find key") || strings.Contains(err.Error(), "no run context exists with the provided key") } diff --git a/worker/pkg/workflows/datasync/workflow/workflow_integration_test.go b/worker/pkg/workflows/datasync/workflow/workflow_integration_test.go index d0fa4df87c..83a5f0e779 100644 --- a/worker/pkg/workflows/datasync/workflow/workflow_integration_test.go +++ b/worker/pkg/workflows/datasync/workflow/workflow_integration_test.go @@ -1666,7 +1666,7 @@ func executeWorkflow( env.RegisterActivity(syncActivity.Sync) env.RegisterActivity(retrieveActivityOpts.RetrieveActivityOptions) env.RegisterActivity(runSqlInitTableStatements.RunSqlInitTableStatements) - env.RegisterActivity(redisCleanUpActivity) + env.RegisterActivity(redisCleanUpActivity.DeleteRedisHash) env.RegisterActivity(genbenthosActivity.GenerateBenthosConfigs) env.RegisterActivity(accountStatusActivity.CheckAccountStatus) env.RegisterActivity(jobhookTimingActivity.RunJobHooksByTiming) From 20bd317ef18a45a789fa2aaf2ac591621ff30762 Mon Sep 17 00:00:00 2001 From: Alisha Date: Wed, 4 Dec 2024 10:58:03 -0800 Subject: [PATCH 14/14] fix import --- .../pkg/workflows/datasync/workflow/workflow_integration_test.go | 1 - 1 file changed, 1 deletion(-) diff --git a/worker/pkg/workflows/datasync/workflow/workflow_integration_test.go b/worker/pkg/workflows/datasync/workflow/workflow_integration_test.go index 83a5f0e779..cbdd2a14ef 100644 --- a/worker/pkg/workflows/datasync/workflow/workflow_integration_test.go +++ b/worker/pkg/workflows/datasync/workflow/workflow_integration_test.go @@ -37,7 +37,6 @@ import ( testdata_javascripttransformers "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/workflow/testdata/javascript-transformers" mssql_datatypes "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/workflow/testdata/mssql/data-types" mssql_simple "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/workflow/testdata/mssql/simple" - "go.temporal.io/sdk/client" mysql_alltypes "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/workflow/testdata/mysql/all-types" mysql_compositekeys "github.com/nucleuscloud/neosync/worker/pkg/workflows/datasync/workflow/testdata/mysql/composite-keys"