From 0e046cdcd3c7eb4c09adef928bdbeca907f31bf1 Mon Sep 17 00:00:00 2001 From: Shreya Khajanchi Date: Thu, 26 May 2022 13:40:58 +0530 Subject: [PATCH 01/15] initial changes --- conversion/conversion.go | 1 + ui/src/app/app-routing.module.ts | 5 ++ ui/src/app/app.module.ts | 4 +- .../prepare-migration.component.html | 50 +++++++++++ .../prepare-migration.component.scss | 87 +++++++++++++++++++ .../prepare-migration.component.spec.ts | 25 ++++++ .../prepare-migration.component.ts | 20 +++++ .../workspace/workspace.component.html | 1 + .../workspace/workspace.component.ts | 8 +- 9 files changed, 199 insertions(+), 2 deletions(-) create mode 100644 ui/src/app/components/prepare-migration/prepare-migration.component.html create mode 100644 ui/src/app/components/prepare-migration/prepare-migration.component.scss create mode 100644 ui/src/app/components/prepare-migration/prepare-migration.component.spec.ts create mode 100644 ui/src/app/components/prepare-migration/prepare-migration.component.ts diff --git a/conversion/conversion.go b/conversion/conversion.go index 1dbd0ae417..3ca1dd3581 100644 --- a/conversion/conversion.go +++ b/conversion/conversion.go @@ -458,6 +458,7 @@ func CreateOrUpdateDatabase(ctx context.Context, adminClient *database.DatabaseA } // Adding migration metadata to the outgoing context. migrationData := metrics.GetMigrationData(conv, driver, targetDb, constants.SchemaConv) + fmt.Println(migrationData.String()) serializedMigrationData, _ := proto.Marshal(migrationData) migrationMetadataValue := base64.StdEncoding.EncodeToString(serializedMigrationData) ctx = metadata.AppendToOutgoingContext(ctx, migrationMetadataKey, migrationMetadataValue) diff --git a/ui/src/app/app-routing.module.ts b/ui/src/app/app-routing.module.ts index 06f7481b5a..4ef22270b1 100644 --- a/ui/src/app/app-routing.module.ts +++ b/ui/src/app/app-routing.module.ts @@ -8,6 +8,7 @@ import { LoadSessionComponent } from './components/load-session/load-session.com import { SourceSelectionComponent } from './components/source-selection/source-selection.component' import { SummaryComponent } from './components/summary/summary.component' import { WorkspaceComponent } from './components/workspace/workspace.component' +import { PrepareMigrationComponent } from './components/prepare-migration/prepare-migration.component' const routes: Routes = [ @@ -42,6 +43,10 @@ const routes: Routes = [ path: 'workspace', component: WorkspaceComponent, }, + { + path: 'prepare-migration', + component: PrepareMigrationComponent, + }, { path: 'instruction', component: InstructionComponent, diff --git a/ui/src/app/app.module.ts b/ui/src/app/app.module.ts index 46f9e58ad2..9c7e862866 100644 --- a/ui/src/app/app.module.ts +++ b/ui/src/app/app.module.ts @@ -30,7 +30,8 @@ import { AddIndexFormComponent } from './components/add-index-form/add-index-for import { EditGlobalDatatypeFormComponent } from './components/edit-global-datatype-form/edit-global-datatype-form.component' import { SidenavViewAssessmentComponent } from './components/sidenav-view-assessment/sidenav-view-assessment.component' import { SidenavSaveSessionComponent } from './components/sidenav-save-session/sidenav-save-session.component' -import { DropIndexDialogComponent } from './components/drop-index-dialog/drop-index-dialog.component' +import { DropIndexDialogComponent } from './components/drop-index-dialog/drop-index-dialog.component'; +import { PrepareMigrationComponent } from './components/prepare-migration/prepare-migration.component' @NgModule({ declarations: [ @@ -58,6 +59,7 @@ import { DropIndexDialogComponent } from './components/drop-index-dialog/drop-in SidenavViewAssessmentComponent, SidenavSaveSessionComponent, DropIndexDialogComponent, + PrepareMigrationComponent, ], imports: [ BrowserModule, diff --git a/ui/src/app/components/prepare-migration/prepare-migration.component.html b/ui/src/app/components/prepare-migration/prepare-migration.component.html new file mode 100644 index 0000000000..dfb76bbb63 --- /dev/null +++ b/ui/src/app/components/prepare-migration/prepare-migration.component.html @@ -0,0 +1,50 @@ +
+ +
+
+
+

Source and destination Database definitions

+
+ + + + Database engine type + + + + + + + + + + + +
TitleDatabase versionConnection detailsSourceDestination
+
+
+
+
+ + Prerequisites + Before we begin, please ensure you have done the following: +
+
+

+ 1 + Connect Database via Datastream +

+

+ 2 + Setup Dataflow template +

+
+
+
+
\ No newline at end of file diff --git a/ui/src/app/components/prepare-migration/prepare-migration.component.scss b/ui/src/app/components/prepare-migration/prepare-migration.component.scss new file mode 100644 index 0000000000..88dce0a0f0 --- /dev/null +++ b/ui/src/app/components/prepare-migration/prepare-migration.component.scss @@ -0,0 +1,87 @@ +.header { + padding: 0 20px; + display: flex; + justify-content: space-between; + align-items: center; + border-bottom: 1px solid #cccccc; + padding-bottom: 10px; + .header_action { + button { + border: none; + background-color: inherit; + color: #1967d2; + font-size: 0.8rem; + } + } + .breadcrumb { + display: flex; + align-items: center; + .breadcrumb_source { + padding-left: 0; + color: rgba(0, 0, 0, 0.56); + font-weight: 400; + font-size: 14px; + } + .breadcrumb_workspace { + font-weight: 400; + font-size: 14px; + } + .breadcrumb_prepare_migration { + font-weight: 400; + font-size: 14px; + } + span { + font-size: 1.2rem; + color: rgb(143, 143, 143); + } + } + } +.prerequisites { + padding: 0 20px; + width: 600px; + .mat-card-title { + font-size: 13pt; + font-weight: normal; + } + .mat-card-subtitle { + font-size: 9pt; + font-weight: normal; + } + p { + color: #5c5c5c; + .bullet { + position: absolute; + display: inline-block; + background-color: #e5e5e5; + padding: 0px 5px 10px 5px; + margin: 0; + border-radius: 10px; + font-size: 9px; + height: 9px; + width: 9px; + text-align: center; + } + span { + margin-left: 40px; + font-weight: lighter; + font-size: 13px; + } + } + hr { + border-color: #ffffff; + margin-bottom: 20px; + } +} + +.definition-container { + max-height: 500px; + margin-left: 20px; + overflow: auto; + th { + font-size: 13px; + } + } + + table { + width: 30%; + } \ No newline at end of file diff --git a/ui/src/app/components/prepare-migration/prepare-migration.component.spec.ts b/ui/src/app/components/prepare-migration/prepare-migration.component.spec.ts new file mode 100644 index 0000000000..2e4ec076db --- /dev/null +++ b/ui/src/app/components/prepare-migration/prepare-migration.component.spec.ts @@ -0,0 +1,25 @@ +import { ComponentFixture, TestBed } from '@angular/core/testing'; + +import { PrepareMigrationComponent } from './prepare-migration.component'; + +describe('PrepareMigrationComponent', () => { + let component: PrepareMigrationComponent; + let fixture: ComponentFixture; + + beforeEach(async () => { + await TestBed.configureTestingModule({ + declarations: [ PrepareMigrationComponent ] + }) + .compileComponents(); + }); + + beforeEach(() => { + fixture = TestBed.createComponent(PrepareMigrationComponent); + component = fixture.componentInstance; + fixture.detectChanges(); + }); + + it('should create', () => { + expect(component).toBeTruthy(); + }); +}); diff --git a/ui/src/app/components/prepare-migration/prepare-migration.component.ts b/ui/src/app/components/prepare-migration/prepare-migration.component.ts new file mode 100644 index 0000000000..fcda72dc20 --- /dev/null +++ b/ui/src/app/components/prepare-migration/prepare-migration.component.ts @@ -0,0 +1,20 @@ +import { Component, OnInit } from '@angular/core'; + +@Component({ + selector: 'app-prepare-migration', + templateUrl: './prepare-migration.component.html', + styleUrls: ['./prepare-migration.component.scss'] +}) +export class PrepareMigrationComponent implements OnInit { + + displayedColumns = [ + 'Title', + 'Source', + 'Destination', + ] + constructor() { } + + ngOnInit(): void { + } + +} diff --git a/ui/src/app/components/workspace/workspace.component.html b/ui/src/app/components/workspace/workspace.component.html index 4e05aa65e9..8a6976c716 100644 --- a/ui/src/app/components/workspace/workspace.component.html +++ b/ui/src/app/components/workspace/workspace.component.html @@ -6,6 +6,7 @@ Configure Schema
+

- \ No newline at end of file + + \ No newline at end of file diff --git a/ui/src/app/components/prepare-migration/prepare-migration.component.scss b/ui/src/app/components/prepare-migration/prepare-migration.component.scss index 88dce0a0f0..1be6d47489 100644 --- a/ui/src/app/components/prepare-migration/prepare-migration.component.scss +++ b/ui/src/app/components/prepare-migration/prepare-migration.component.scss @@ -84,4 +84,7 @@ table { width: 30%; + } + .link-test { + color: #1967d2; } \ No newline at end of file diff --git a/ui/src/app/components/prepare-migration/prepare-migration.component.ts b/ui/src/app/components/prepare-migration/prepare-migration.component.ts index fcda72dc20..423f2daade 100644 --- a/ui/src/app/components/prepare-migration/prepare-migration.component.ts +++ b/ui/src/app/components/prepare-migration/prepare-migration.component.ts @@ -1,4 +1,6 @@ import { Component, OnInit } from '@angular/core'; +import { MatDialog } from '@angular/material/dialog'; +import { TargetDetailsFormComponent } from '../target-details-form/target-details-form.component'; @Component({ selector: 'app-prepare-migration', @@ -12,9 +14,19 @@ export class PrepareMigrationComponent implements OnInit { 'Source', 'Destination', ] - constructor() { } + constructor(private dialog: MatDialog) { } ngOnInit(): void { } + openTargetDetailsForm() { + let openDialog = this.dialog.open(TargetDetailsFormComponent, { + width: '30vw', + minWidth: '400px', + maxWidth: '500px', + }) + openDialog.afterClosed().subscribe(() => { + + }) + } } diff --git a/ui/src/app/components/target-details-form/target-details-form.component.html b/ui/src/app/components/target-details-form/target-details-form.component.html new file mode 100644 index 0000000000..05f67e01e5 --- /dev/null +++ b/ui/src/app/components/target-details-form/target-details-form.component.html @@ -0,0 +1,19 @@ +
+
+

Target Details

+ + Target Database + + +
+
+ + +
+
diff --git a/ui/src/app/components/target-details-form/target-details-form.component.scss b/ui/src/app/components/target-details-form/target-details-form.component.scss new file mode 100644 index 0000000000..e69de29bb2 diff --git a/ui/src/app/components/target-details-form/target-details-form.component.spec.ts b/ui/src/app/components/target-details-form/target-details-form.component.spec.ts new file mode 100644 index 0000000000..47b7beb3a9 --- /dev/null +++ b/ui/src/app/components/target-details-form/target-details-form.component.spec.ts @@ -0,0 +1,25 @@ +import { ComponentFixture, TestBed } from '@angular/core/testing'; + +import { TargetDetailsFormComponent } from './target-details-form.component'; + +describe('TargetDetailsFormComponent', () => { + let component: TargetDetailsFormComponent; + let fixture: ComponentFixture; + + beforeEach(async () => { + await TestBed.configureTestingModule({ + declarations: [ TargetDetailsFormComponent ] + }) + .compileComponents(); + }); + + beforeEach(() => { + fixture = TestBed.createComponent(TargetDetailsFormComponent); + component = fixture.componentInstance; + fixture.detectChanges(); + }); + + it('should create', () => { + expect(component).toBeTruthy(); + }); +}); diff --git a/ui/src/app/components/target-details-form/target-details-form.component.ts b/ui/src/app/components/target-details-form/target-details-form.component.ts new file mode 100644 index 0000000000..06f77b4ec8 --- /dev/null +++ b/ui/src/app/components/target-details-form/target-details-form.component.ts @@ -0,0 +1,44 @@ +import { Component, Inject, OnInit } from '@angular/core'; +import { FormBuilder, FormControl, FormGroup, Validators } from '@angular/forms'; +import { MatDialogRef, MAT_DIALOG_DATA } from '@angular/material/dialog'; +import ITargetDetails from 'src/app/model/target-details'; + +@Component({ + selector: 'app-target-details-form', + templateUrl: './target-details-form.component.html', + styleUrls: ['./target-details-form.component.scss'] +}) +export class TargetDetailsFormComponent implements OnInit { +targetDetailsForm : FormGroup + fetch: any; + snack: any; + constructor( + private fb: FormBuilder, + @Inject(MAT_DIALOG_DATA) public data: ITargetDetails, + private dialogRef: MatDialogRef) { + this.targetDetailsForm = this.fb.group({ + TargetDB: ['', Validators.required], + }) + dialogRef.disableClose = true + } + + ngOnInit(): void { + } + + updateTargetDetails() { + let formValue = this.targetDetailsForm.value + let payload: ITargetDetails = { + TargetDB: formValue.TargetDB, + } + + this.fetch.setSpannerConfig(payload).subscribe({ + next: (res: TargetDetailsFormComponent) => { + this.snack.openSnackBar('Target details updated successfully', 'Close', 5) + this.dialogRef.close({ ...res }) + }, + error: (err: any) => { + this.snack.openSnackBar(err.message, 'Close') + }, + }) + } +} diff --git a/ui/src/app/model/target-details.ts b/ui/src/app/model/target-details.ts new file mode 100644 index 0000000000..b224f9c05e --- /dev/null +++ b/ui/src/app/model/target-details.ts @@ -0,0 +1,3 @@ +export default interface ITargetDetails { + TargetDB: string + } \ No newline at end of file diff --git a/ui/src/app/services/fetch/fetch.service.ts b/ui/src/app/services/fetch/fetch.service.ts index 203c515c5d..cfe8622161 100644 --- a/ui/src/app/services/fetch/fetch.service.ts +++ b/ui/src/app/services/fetch/fetch.service.ts @@ -7,6 +7,7 @@ import IConv, { ICreateIndex, IInterleaveStatus, IPrimaryKey } from '../../model import IDumpConfig from '../../model/dump-config' import ISessionConfig from '../../model/session-config' import ISpannerConfig from '../../model/spanner-config' +import ITargetDetails from 'src/app/model/target-details' @Injectable({ providedIn: 'root', @@ -101,6 +102,10 @@ export class FetchService { return this.http.post(`${this.url}/SetSpannerConfig`, payload) } + setTargetDetails(payload: ITargetDetails) { + return this.http.post(`${this.url}/SetTargetDetails`,payload) + } + InitiateSession() { return this.http.post(`${this.url}/InitiateSession`, {}) } From c72edbc491cc907d2566eb976017b25ec3466900 Mon Sep 17 00:00:00 2001 From: Deep1998 Date: Fri, 1 Jul 2022 14:25:34 +0530 Subject: [PATCH 03/15] Lowdb demo --- common/utils/utils.go | 38 ++++++++++++++ conversion/conversion.go | 27 +++++++--- sources/mysql/infoschema.go | 15 +++++- streaming/streaming.go | 35 +++++++++++-- test_data/mysqldump.test.out | 99 ------------------------------------ 5 files changed, 104 insertions(+), 110 deletions(-) diff --git a/common/utils/utils.go b/common/utils/utils.go index 4081b2d347..3181ffd5bf 100644 --- a/common/utils/utils.go +++ b/common/utils/utils.go @@ -170,6 +170,44 @@ func PreloadGCSFiles(tables []ManifestTable) ([]ManifestTable, error) { return tables, nil } +func WriteToGCS(filePath, fileName, data string) error { + ctx := context.Background() + + client, err := storage.NewClient(ctx) + if err != nil { + fmt.Printf("Failed to create GCS client") + return err + } + defer client.Close() + u, err := url.Parse(filePath) + if err != nil { + fmt.Printf("parseFilePath: unable to parse file path %s", filePath) + return err + } + if u.Scheme != "gs" { + fmt.Printf("not a valid GCS path: %s, should start with 'gs'", filePath) + return err + } + bucketName := u.Host + prefix := u.Path[1:] + if prefix[len(prefix)-1] != '/' { + prefix = prefix + "/" + } + bucket := client.Bucket(bucketName) + obj := bucket.Object(prefix + fileName) + + w := obj.NewWriter(ctx) + if _, err := fmt.Fprint(w, data); err != nil { + fmt.Printf("Failed to write to Cloud Storage: %s", filePath) + return err + } + if err := w.Close(); err != nil { + fmt.Printf("Failed to close GCS file: %s", filePath) + return err + } + return nil +} + // GetProject returns the cloud project we should use for accessing Spanner. // Use environment variable GCLOUD_PROJECT if it is set. // Otherwise, use the default project returned from gcloud. diff --git a/conversion/conversion.go b/conversion/conversion.go index 1dbd0ae417..bdb43f0167 100644 --- a/conversion/conversion.go +++ b/conversion/conversion.go @@ -184,6 +184,18 @@ func performSnapshotMigration(config writer.BatchWriterConfig, conv *internal.Co return batchWriter, nil } +func snapshotMigrationHandler(sourceProfile profiles.SourceProfile, config writer.BatchWriterConfig, conv *internal.Conv, client *sp.Client, infoSchema common.InfoSchema) (*writer.BatchWriter, error) { + switch sourceProfile.Driver { + // Skip snapshot migration via harbourbridge for mysql and oracle since dataflow job will job will handle this from backfilled data. + case constants.MYSQL, constants.ORACLE: + return &writer.BatchWriter{}, nil + case constants.DYNAMODB: + return performSnapshotMigration(config, conv, client, infoSchema) + default: + return &writer.BatchWriter{}, fmt.Errorf("streaming migration not supported for driver %s", sourceProfile.Driver) + } +} + func dataFromDatabase(ctx context.Context, sourceProfile profiles.SourceProfile, targetProfile profiles.TargetProfile, config writer.BatchWriterConfig, conv *internal.Conv, client *sp.Client) (*writer.BatchWriter, error) { infoSchema, err := GetInfoSchema(sourceProfile, targetProfile) if err != nil { @@ -195,16 +207,19 @@ func dataFromDatabase(ctx context.Context, sourceProfile profiles.SourceProfile, if err != nil { return nil, err } - } - bw, err := performSnapshotMigration(config, conv, client, infoSchema) - if err != nil { - return nil, err - } - if sourceProfile.Conn.Streaming { + bw, err := snapshotMigrationHandler(sourceProfile, config, conv, client, infoSchema) + if err != nil { + return nil, err + } err = infoSchema.StartStreamingMigration(ctx, client, conv, streamInfo) if err != nil { return nil, err } + return bw, nil + } + bw, err := performSnapshotMigration(config, conv, client, infoSchema) + if err != nil { + return nil, err } return bw, nil } diff --git a/sources/mysql/infoschema.go b/sources/mysql/infoschema.go index 2542877ea9..6b88892441 100644 --- a/sources/mysql/infoschema.go +++ b/sources/mysql/infoschema.go @@ -17,6 +17,7 @@ package mysql import ( "context" "database/sql" + "encoding/json" "fmt" "sort" "strings" @@ -25,6 +26,7 @@ import ( _ "github.com/go-sql-driver/mysql" // The driver should be used via the database/sql package. _ "github.com/lib/pq" + "github.com/cloudspannerecosystem/harbourbridge/common/utils" "github.com/cloudspannerecosystem/harbourbridge/internal" "github.com/cloudspannerecosystem/harbourbridge/profiles" "github.com/cloudspannerecosystem/harbourbridge/schema" @@ -350,8 +352,19 @@ func (isi InfoSchemaImpl) StartChangeDataCapture(ctx context.Context, conv *inte // performing a streaming migration. func (isi InfoSchemaImpl) StartStreamingMigration(ctx context.Context, client *sp.Client, conv *internal.Conv, streamingInfo map[string]interface{}) error { streamingCfg, _ := streamingInfo["streamingCfg"].(streaming.StreamingCfg) - err := streaming.StartDataflow(ctx, isi.SourceProfile, isi.TargetProfile, streamingCfg) + convJSON, err := json.MarshalIndent(conv, "", " ") if err != nil { + err = fmt.Errorf("can't encode session state to JSON: %v", err) + return err + } + err = utils.WriteToGCS(streamingCfg.TmpDir, "session.json", string(convJSON)) + if err != nil { + err = fmt.Errorf("error writing session file to GCS: %v", err) + return err + } + err = streaming.StartDataflow(ctx, isi.SourceProfile, isi.TargetProfile, streamingCfg) + if err != nil { + err = fmt.Errorf("error starting dataflow: %v", err) return err } return nil diff --git a/streaming/streaming.go b/streaming/streaming.go index ae8d7dc85f..3216323664 100644 --- a/streaming/streaming.go +++ b/streaming/streaming.go @@ -18,10 +18,12 @@ import ( "encoding/json" "fmt" "io/ioutil" + "net/url" "time" dataflow "cloud.google.com/go/dataflow/apiv1beta3" datastream "cloud.google.com/go/datastream/apiv1alpha1" + "cloud.google.com/go/storage" datastreampb "google.golang.org/genproto/googleapis/cloud/datastream/v1alpha1" dataflowpb "google.golang.org/genproto/googleapis/dataflow/v1beta3" fieldmaskpb "google.golang.org/protobuf/types/known/fieldmaskpb" @@ -58,6 +60,7 @@ type DataflowCfg struct { type StreamingCfg struct { DatastreamCfg DatastreamCfg DataflowCfg DataflowCfg + TmpDir string } // VerifyAndUpdateCfg checks the fields and errors out if certain fields are empty. @@ -105,6 +108,27 @@ func VerifyAndUpdateCfg(streamingCfg *StreamingCfg, dbName string) error { } streamingCfg.DataflowCfg.JobName = jobName } + + filePath := streamingCfg.TmpDir + u, err := url.Parse(filePath) + if err != nil { + return fmt.Errorf("parseFilePath: unable to parse file path %s", filePath) + } + if u.Scheme != "gs" { + return fmt.Errorf("not a valid GCS path: %s, should start with 'gs'", filePath) + } + bucketName := u.Host + ctx := context.Background() + client, err := storage.NewClient(ctx) + if err != nil { + return fmt.Errorf("failed to create GCS client") + } + defer client.Close() + bucket := client.Bucket(bucketName) + _, err = bucket.Attrs(ctx) + if err != nil { + return fmt.Errorf("bucket %s does not exist", bucketName) + } return nil } @@ -187,7 +211,7 @@ func LaunchStream(ctx context.Context, sourceProfile profiles.SourceProfile, pro SourceConfig: srcCfg, DestinationConfig: dstCfg, State: datastreampb.Stream_RUNNING, - BackfillStrategy: &datastreampb.Stream_BackfillNone{BackfillNone: &datastreampb.Stream_BackfillNoneStrategy{}}, + BackfillStrategy: &datastreampb.Stream_BackfillAll{BackfillAll: &datastreampb.Stream_BackfillAllStrategy{}}, } createStreamRequest := &datastreampb.CreateStreamRequest{ Parent: fmt.Sprintf("projects/%s/locations/%s", projectID, datastreamCfg.StreamLocation), @@ -229,8 +253,10 @@ func LaunchStream(ctx context.Context, sourceProfile profiles.SourceProfile, pro } // LaunchDataflowJob populates the parameters from the streaming config and triggers a Dataflow job. -func LaunchDataflowJob(ctx context.Context, targetProfile profiles.TargetProfile, datastreamCfg DatastreamCfg, dataflowCfg DataflowCfg) error { +func LaunchDataflowJob(ctx context.Context, targetProfile profiles.TargetProfile, streamingCfg StreamingCfg) error { project, instance, dbName, _ := targetProfile.GetResourceIds(ctx, time.Now(), "", nil) + dataflowCfg := streamingCfg.DataflowCfg + datastreamCfg := streamingCfg.DatastreamCfg fmt.Println("Launching dataflow job ", dataflowCfg.JobName, " in ", project, "-", dataflowCfg.Location) c, err := dataflow.NewFlexTemplatesClient(ctx) @@ -259,12 +285,13 @@ func LaunchDataflowJob(ctx context.Context, targetProfile profiles.TargetProfile launchParameter := &dataflowpb.LaunchFlexTemplateParameter{ JobName: dataflowCfg.JobName, - Template: &dataflowpb.LaunchFlexTemplateParameter_ContainerSpecGcsPath{ContainerSpecGcsPath: "gs://dataflow-templates/latest/flex/Cloud_Datastream_to_Spanner"}, + Template: &dataflowpb.LaunchFlexTemplateParameter_ContainerSpecGcsPath{ContainerSpecGcsPath: "gs://deepchowdhury-gsql/images/datastream-to-spanner-transform-image-spec.json"}, Parameters: map[string]string{ "inputFilePattern": inputFilePattern, "streamName": fmt.Sprintf("projects/%s/locations/%s/streams/%s", project, datastreamCfg.StreamLocation, datastreamCfg.StreamId), "instanceId": instance, "databaseId": dbName, + "sessionFile": streamingCfg.TmpDir + "session.json", }, } @@ -313,7 +340,7 @@ func StartDatastream(ctx context.Context, sourceProfile profiles.SourceProfile, } func StartDataflow(ctx context.Context, sourceProfile profiles.SourceProfile, targetProfile profiles.TargetProfile, streamingCfg StreamingCfg) error { - err := LaunchDataflowJob(ctx, targetProfile, streamingCfg.DatastreamCfg, streamingCfg.DataflowCfg) + err := LaunchDataflowJob(ctx, targetProfile, streamingCfg) if err != nil { return fmt.Errorf("error launching dataflow: %v", err) } diff --git a/test_data/mysqldump.test.out b/test_data/mysqldump.test.out index 3a44f3296e..d35207a876 100644 --- a/test_data/mysqldump.test.out +++ b/test_data/mysqldump.test.out @@ -1,65 +1,3 @@ --- MySQL dump 10.17 Distrib 10.3.23-MariaDB, for debian-linux-gnu (x86_64) --- --- Host: 127.0.0.1 Database: cart --- ------------------------------------------------------ --- Server version 5.7.25-google-log - -/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; -/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; -/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; -/*!40101 SET NAMES utf8mb4 */; -/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; -/*!40103 SET TIME_ZONE='+00:00' */; -/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; -/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; -/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; -/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; - --- --- Table structure for table `cart` --- - -DROP TABLE IF EXISTS `cart`; -/*!40101 SET @saved_cs_client = @@character_set_client */; -/*!40101 SET character_set_client = utf8 */; -CREATE TABLE `cart` ( - `user_id` varchar(20) NOT NULL, - `product_id` varchar(20) NOT NULL, - `quantity` bigint(20) DEFAULT NULL, - `last_modified` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`user_id`,`product_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; -/*!40101 SET character_set_client = @saved_cs_client */; - -CREATE INDEX idx ON `cart` (`quantity`); - --- --- Dumping data for table `cart` --- - -LOCK TABLES `cart` WRITE; -/*!40000 ALTER TABLE `cart` DISABLE KEYS */; -INSERT INTO `cart` VALUES ('901e-a6cfc2b502dc','abc-123',1,'2020-07-20 05:10:26'),('901e-a6cfc2b502dc','axd-673',2,'2020-07-20 05:10:43'),('a86b-82493320a775','zxi-631',5,'2020-07-20 05:10:46'); -/*!40000 ALTER TABLE `cart` ENABLE KEYS */; -UNLOCK TABLES; - --- --- Table name `PRODUCTS` differs only case from the table `cart`. --- This was added to cover more cases in our integration tests. --- -DROP TABLE IF EXISTS `PRODUCTS`; -/*!40101 SET @saved_cs_client = @@character_set_client */; -/*!40101 SET character_set_client = utf8 */; -CREATE TABLE `PRODUCTS` ( - `usr_id` varchar(20) NOT NULL, - `prod_id` varchar(20) NOT NULL, - PRIMARY KEY (`usr_id`,`prod_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; -/*!40101 SET character_set_client = @saved_cs_client */; - --- --- Table structure for table `products` --- DROP TABLE IF EXISTS `products`; /*!40101 SET @saved_cs_client = @@character_set_client */; @@ -88,40 +26,3 @@ LOCK TABLES `products` WRITE; INSERT INTO `products` VALUES ('abc-123','Blue suede shoes',141.99,'2020-06-06'),('axd-673','Antique typewriter',99.99,'2020-06-07'),('zxi-631','Glass vase',55.50,'2020-06-10'); /*!40000 ALTER TABLE `products` ENABLE KEYS */; UNLOCK TABLES; - --- --- Table structure for table `customers` --- - -DROP TABLE IF EXISTS `customers`; -/*!40101 SET @saved_cs_client = @@character_set_client */; -/*!40101 SET character_set_client = utf8 */; -CREATE TABLE `customers` ( - `c_id` varchar(20) NOT NULL, - `customer_profile` json DEFAULT NULL, - PRIMARY KEY (`c_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; -/*!40101 SET character_set_client = @saved_cs_client */; - --- --- Dumping data for table `customers` --- - -LOCK TABLES `customers` WRITE; -/*!40000 ALTER TABLE `customers` DISABLE KEYS */; -INSERT INTO `customers` VALUES -('svd-124','{"first_name": "Lola", "last_name": "Dog", "location": "NYC", "online" : true, "friends" : 547}'), -('tel-595','{"first_name": "Ernie", "status": "Looking for treats", "location" : "Brooklyn"}'); -/*!40000 ALTER TABLE `customers` ENABLE KEYS */; -UNLOCK TABLES; -/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; - -/*!40101 SET SQL_MODE=@OLD_SQL_MODE */; -/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; -/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; -/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; -/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; -/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; -/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; - --- Dump completed on 2020-08-10 17:40:01 From 46eafc6f7bcda48752e56212dbf4f95b85733da6 Mon Sep 17 00:00:00 2001 From: Deep1998 Date: Thu, 7 Jul 2022 15:32:27 +0530 Subject: [PATCH 04/15] Use STRING(50) type instead of INT64 for synthetic PK --- internal/convert.go | 2 +- internal/convert_test.go | 4 +- sources/mysql/data.go | 2 +- sources/mysql/data_test.go | 4 +- sources/mysql/infoschema_test.go | 6 +- sources/mysql/mysqldump_test.go | 56 +++++++-------- sources/oracle/data.go | 2 +- sources/oracle/data_test.go | 4 +- sources/postgres/data.go | 2 +- sources/postgres/data_test.go | 4 +- sources/postgres/infoschema.go | 2 +- sources/postgres/infoschema_test.go | 4 +- sources/postgres/pgdump_test.go | 102 ++++++++++++++-------------- sources/sqlserver/data.go | 2 +- sources/sqlserver/data_test.go | 4 +- 15 files changed, 100 insertions(+), 100 deletions(-) diff --git a/internal/convert.go b/internal/convert.go index 8f66667049..120be1bb54 100644 --- a/internal/convert.go +++ b/internal/convert.go @@ -309,7 +309,7 @@ func (conv *Conv) AddPrimaryKeys() { if !primaryKeyPopulated { k := conv.buildPrimaryKey(t) ct.ColNames = append(ct.ColNames, k) - ct.ColDefs[k] = ddl.ColumnDef{Name: k, T: ddl.Type{Name: ddl.Int64}} + ct.ColDefs[k] = ddl.ColumnDef{Name: k, T: ddl.Type{Name: ddl.String, Len: 50}} ct.Pks = []ddl.IndexKey{{Col: k}} conv.SyntheticPKeys[t] = SyntheticPKey{k, 0} } diff --git a/internal/convert_test.go b/internal/convert_test.go index 82832abd83..5ec5f77fa1 100644 --- a/internal/convert_test.go +++ b/internal/convert_test.go @@ -218,7 +218,7 @@ func TestAddPrimaryKeys(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "a": {Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": {Name: "b", T: ddl.Type{Name: ddl.Float64}}, - "synth_id": {Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": {Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{{Col: "synth_id"}}, Indexes: []ddl.CreateIndex{{Name: "", Table: "", Unique: false, Keys: []ddl.IndexKey{{Col: "b"}}}}, @@ -246,7 +246,7 @@ func TestAddPrimaryKeys(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "a": {Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": {Name: "b", T: ddl.Type{Name: ddl.Float64}}, - "synth_id": {Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": {Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{{Col: "synth_id"}}}, }, diff --git a/sources/mysql/data.go b/sources/mysql/data.go index 590e811900..33de4ee39f 100644 --- a/sources/mysql/data.go +++ b/sources/mysql/data.go @@ -84,7 +84,7 @@ func ConvertData(conv *internal.Conv, srcTable string, srcCols []string, srcSche } if aux, ok := conv.SyntheticPKeys[spTable]; ok { c = append(c, aux.Col) - v = append(v, int64(bits.Reverse64(uint64(aux.Sequence)))) + v = append(v, fmt.Sprintf("%d", int64(bits.Reverse64(uint64(aux.Sequence))))) aux.Sequence++ conv.SyntheticPKeys[spTable] = aux } diff --git a/sources/mysql/data_test.go b/sources/mysql/data_test.go index c48baae0ca..5e02712a2e 100644 --- a/sources/mysql/data_test.go +++ b/sources/mysql/data_test.go @@ -279,14 +279,14 @@ func TestConvertsyntheticPKey(t *testing.T) { cols: []string{"a", "b", "c"}, vals: []string{"6", "6.6", "true"}, ecols: []string{"a", "b", "c", "synth_id"}, - evals: []interface{}{int64(6), float64(6.6), true, int64(0)}, + evals: []interface{}{int64(6), float64(6.6), true, fmt.Sprintf("%d", 0)}, }, { name: "Sequence 1", cols: []string{"a"}, vals: []string{"7"}, ecols: []string{"a", "synth_id"}, - evals: []interface{}{int64(7), int64(bits.Reverse64(1))}, + evals: []interface{}{int64(7), fmt.Sprintf("%d", int64(bits.Reverse64(1)))}, }, } tableName := "testtable" diff --git a/sources/mysql/infoschema_test.go b/sources/mysql/infoschema_test.go index dc1e101140..9e21ce24eb 100644 --- a/sources/mysql/infoschema_test.go +++ b/sources/mysql/infoschema_test.go @@ -388,7 +388,7 @@ func TestProcessData_MultiCol(t *testing.T) { "a": ddl.ColumnDef{Name: "a", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}, NotNull: true}, "b": ddl.ColumnDef{Name: "b", T: ddl.Type{Name: ddl.Float64}}, "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}}, } @@ -404,8 +404,8 @@ func TestProcessData_MultiCol(t *testing.T) { }) common.ProcessData(conv, isi) assert.Equal(t, []spannerData{ - {table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"cat", float64(42.3), int64(0)}}, - {table: "test", cols: []string{"a", "c", "synth_id"}, vals: []interface{}{"dog", int64(22), int64(-9223372036854775808)}}}, + {table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"cat", float64(42.3), "0"}}, + {table: "test", cols: []string{"a", "c", "synth_id"}, vals: []interface{}{"dog", int64(22), "-9223372036854775808"}}}, rows) assert.Equal(t, int64(0), conv.Unexpecteds()) } diff --git a/sources/mysql/mysqldump_test.go b/sources/mysql/mysqldump_test.go index 1111e81685..69378506c9 100644 --- a/sources/mysql/mysqldump_test.go +++ b/sources/mysql/mysqldump_test.go @@ -125,7 +125,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { "productid": {Name: "productid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "userid": {Name: "userid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}, NotNull: true}, "quantity": {Name: "quantity", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": {Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": {Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{{Col: "synth_id"}}}}, }, @@ -175,7 +175,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": {Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": {Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": {Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": {Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{{Col: "synth_id"}}, Fks: []ddl.Foreignkey{{Name: "fk_test", Columns: []string{"d"}, ReferTable: "test", ReferColumns: []string{"a"}}}}}, @@ -209,7 +209,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "A_fk_test_2", Columns: []string{"d"}, ReferTable: "test", ReferColumns: []string{"a"}}, @@ -235,7 +235,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Columns: []string{"d"}, ReferTable: "test", ReferColumns: []string{"a"}}}}}, @@ -260,7 +260,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"d"}, ReferTable: "test", ReferColumns: []string{"a"}}}}}, @@ -294,7 +294,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { "e": ddl.ColumnDef{Name: "e", T: ddl.Type{Name: ddl.Int64}}, "f": ddl.ColumnDef{Name: "f", T: ddl.Type{Name: ddl.Int64}}, "g": ddl.ColumnDef{Name: "g", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"e"}, ReferTable: "test", ReferColumns: []string{"a"}}, @@ -322,7 +322,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { "e": ddl.ColumnDef{Name: "e", T: ddl.Type{Name: ddl.Int64}}, "f": ddl.ColumnDef{Name: "f", T: ddl.Type{Name: ddl.Int64}}, "g": ddl.ColumnDef{Name: "g", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"e", "f"}, ReferTable: "test", ReferColumns: []string{"a", "b"}}}}}, @@ -343,7 +343,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { "a": ddl.ColumnDef{Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": ddl.ColumnDef{Name: "b", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Indexes: []ddl.CreateIndex{ddl.CreateIndex{Name: "custom_index", Table: "test", Unique: false, Keys: []ddl.IndexKey{ddl.IndexKey{Col: "b", Desc: false}, ddl.IndexKey{Col: "c", Desc: false}}}}}}, @@ -405,7 +405,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { "a": ddl.ColumnDef{Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": ddl.ColumnDef{Name: "b", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Indexes: []ddl.CreateIndex{ddl.CreateIndex{Name: "custom_index", Table: "test", Unique: false, Keys: []ddl.IndexKey{ddl.IndexKey{Col: "b", Desc: false}, ddl.IndexKey{Col: "c", Desc: false}}}}}}, @@ -468,7 +468,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { "a": ddl.ColumnDef{Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": ddl.ColumnDef{Name: "b", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Indexes: []ddl.CreateIndex{ddl.CreateIndex{Name: "custom_index", Table: "test", Unique: false, Keys: []ddl.IndexKey{ddl.IndexKey{Col: "b", Desc: false}, ddl.IndexKey{Col: "c", Desc: false}}}}}}, @@ -625,10 +625,10 @@ CREATE TABLE test (a text PRIMARY KEY, b text);`, "('a33','b',9),\n" + "('a3','b',7);", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a1", "b1", int64(42), bitReverse(0)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a22", "b99", int64(6), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a33", "b", int64(9), bitReverse(2)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), bitReverse(3)}}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a1", "b1", int64(42), fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a22", "b99", int64(6), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a33", "b", int64(9), fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), fmt.Sprintf("%d", bitReverse(3))}}}, }, { name: "INSERT INTO with empty cols", @@ -639,10 +639,10 @@ CREATE TABLE test (a text PRIMARY KEY, b text);`, "('a33','b',NULL),\n" + "('a3','b',7);\n", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"b", "n", "synth_id"}, vals: []interface{}{"b1", int64(42), bitReverse(0)}}, - spannerData{table: "test", cols: []string{"a", "n", "synth_id"}, vals: []interface{}{"a22", int64(6), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"a33", "b", bitReverse(2)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), bitReverse(3)}}}, + spannerData{table: "test", cols: []string{"b", "n", "synth_id"}, vals: []interface{}{"b1", int64(42), fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"a", "n", "synth_id"}, vals: []interface{}{"a22", int64(6), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"a33", "b", fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), fmt.Sprintf("%d", bitReverse(3))}}}, }, { name: "INSERT", @@ -657,7 +657,7 @@ CREATE TABLE test (a text PRIMARY KEY, b text);`, input: "CREATE TABLE test (a text NOT NULL, b text NOT NULL, n bigint);\n" + "INSERT INTO test (a, b, n) VALUES ('a42', 'b6', 2);", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a42", "b6", int64(2), bitReverse(0)}}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a42", "b6", int64(2), fmt.Sprintf("%d", bitReverse(0))}}}, }, { name: "INSERT with spaces", @@ -809,14 +809,14 @@ func TestProcessMySQLDump_DataError(t *testing.T) { vals: []interface{}{int64(7), float64(42.1), true, getDate("2019-10-29"), []byte{0x89, 0x50}, []spanner.NullString{{StringVal: "42", Valid: true}, {StringVal: "6", Valid: true}}, - bitReverse(0)}}, - spannerData{table: "test", cols: []string{"a", "synth_id"}, vals: []interface{}{int64(7), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"b", "synth_id"}, vals: []interface{}{float64(42.1), bitReverse(2)}}, - spannerData{table: "test", cols: []string{"c", "synth_id"}, vals: []interface{}{true, bitReverse(3)}}, - spannerData{table: "test", cols: []string{"d", "synth_id"}, vals: []interface{}{getDate("2019-10-29"), bitReverse(4)}}, - spannerData{table: "test", cols: []string{"e", "synth_id"}, vals: []interface{}{[]byte{0x89, 0x50}, bitReverse(5)}}, + fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"a", "synth_id"}, vals: []interface{}{int64(7), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"b", "synth_id"}, vals: []interface{}{float64(42.1), fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"c", "synth_id"}, vals: []interface{}{true, fmt.Sprintf("%d", bitReverse(3))}}, + spannerData{table: "test", cols: []string{"d", "synth_id"}, vals: []interface{}{getDate("2019-10-29"), fmt.Sprintf("%d", bitReverse(4))}}, + spannerData{table: "test", cols: []string{"e", "synth_id"}, vals: []interface{}{[]byte{0x89, 0x50}, fmt.Sprintf("%d", bitReverse(5))}}, spannerData{table: "test", cols: []string{"f", "synth_id"}, - vals: []interface{}{[]spanner.NullString{{StringVal: "42", Valid: true}, {StringVal: "6", Valid: true}}, bitReverse(6)}}, + vals: []interface{}{[]spanner.NullString{{StringVal: "42", Valid: true}, {StringVal: "6", Valid: true}}, fmt.Sprintf("%d", bitReverse(6))}}, }, }, } @@ -876,7 +876,7 @@ func TestProcessMySQLDump_AddPrimaryKeys(t *testing.T) { "productid": ddl.ColumnDef{Name: "productid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "userid": ddl.ColumnDef{Name: "userid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "quantity": ddl.ColumnDef{Name: "quantity", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}}}, }, @@ -891,7 +891,7 @@ func TestProcessMySQLDump_AddPrimaryKeys(t *testing.T) { "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "synth_id0": ddl.ColumnDef{Name: "synth_id0", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "synth_id1": ddl.ColumnDef{Name: "synth_id1", T: ddl.Type{Name: ddl.Int64}}, - "synth_id2": ddl.ColumnDef{Name: "synth_id2", T: ddl.Type{Name: ddl.Int64}}, + "synth_id2": ddl.ColumnDef{Name: "synth_id2", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id2"}}}}, }, diff --git a/sources/oracle/data.go b/sources/oracle/data.go index ee251ba230..5c1dea44f7 100644 --- a/sources/oracle/data.go +++ b/sources/oracle/data.go @@ -80,7 +80,7 @@ func convertData(conv *internal.Conv, srcTable string, srcCols []string, srcSche } if aux, ok := conv.SyntheticPKeys[spTable]; ok { c = append(c, aux.Col) - v = append(v, int64(bits.Reverse64(uint64(aux.Sequence)))) + v = append(v, fmt.Sprintf("%d", int64(bits.Reverse64(uint64(aux.Sequence))))) aux.Sequence++ conv.SyntheticPKeys[spTable] = aux } diff --git a/sources/oracle/data_test.go b/sources/oracle/data_test.go index 67390137a6..0e37ed04a6 100644 --- a/sources/oracle/data_test.go +++ b/sources/oracle/data_test.go @@ -132,14 +132,14 @@ func TestConvertsyntheticPKey(t *testing.T) { cols: []string{"a", "b", "c"}, vals: []string{"6", "6.6", "t"}, ecols: []string{"a", "b", "c", "synth_id"}, - evals: []interface{}{int64(6), float64(6.6), "t", int64(0)}, + evals: []interface{}{int64(6), float64(6.6), "t", fmt.Sprintf("%d", 0)}, }, { name: "Sequence 1", cols: []string{"a"}, vals: []string{"7"}, ecols: []string{"a", "synth_id"}, - evals: []interface{}{int64(7), int64(bits.Reverse64(1))}, + evals: []interface{}{int64(7), fmt.Sprintf("%d", int64(bits.Reverse64(1)))}, }, } tableName := "testtable" diff --git a/sources/postgres/data.go b/sources/postgres/data.go index a8392816f7..a64a3f5bbf 100644 --- a/sources/postgres/data.go +++ b/sources/postgres/data.go @@ -102,7 +102,7 @@ func ConvertData(conv *internal.Conv, srcTable string, srcCols []string, vals [] } if aux, ok := conv.SyntheticPKeys[spTable]; ok { c = append(c, aux.Col) - v = append(v, int64(bits.Reverse64(uint64(aux.Sequence)))) + v = append(v, fmt.Sprintf("%d", int64(bits.Reverse64(uint64(aux.Sequence))))) aux.Sequence++ conv.SyntheticPKeys[spTable] = aux } diff --git a/sources/postgres/data_test.go b/sources/postgres/data_test.go index 89d567373b..9b9718d4ef 100644 --- a/sources/postgres/data_test.go +++ b/sources/postgres/data_test.go @@ -258,14 +258,14 @@ func TestConvertData(t *testing.T) { cols: []string{"a", "b", "c"}, vals: []string{"6", "6.6", "true"}, ecols: []string{"a", "b", "c", "synth_id"}, - evals: []interface{}{int64(6), float64(6.6), true, int64(0)}, + evals: []interface{}{int64(6), float64(6.6), true, "0"}, }, { name: "Sequence 1", cols: []string{"a"}, vals: []string{"7"}, ecols: []string{"a", "synth_id"}, - evals: []interface{}{int64(7), int64(bits.Reverse64(1))}, + evals: []interface{}{int64(7), fmt.Sprintf("%d", int64(bits.Reverse64(1)))}, }, } conv := buildConv(spTable, srcTable) diff --git a/sources/postgres/infoschema.go b/sources/postgres/infoschema.go index bfbd6d6ebf..b1b267c611 100644 --- a/sources/postgres/infoschema.go +++ b/sources/postgres/infoschema.go @@ -156,7 +156,7 @@ func convertSQLRow(conv *internal.Conv, srcTable string, srcCols []string, srcSc } if aux, ok := conv.SyntheticPKeys[spTable]; ok { cs = append(cs, aux.Col) - vs = append(vs, int64(bits.Reverse64(uint64(aux.Sequence)))) + vs = append(vs, fmt.Sprintf("%d", int64(bits.Reverse64(uint64(aux.Sequence))))) aux.Sequence++ conv.SyntheticPKeys[spTable] = aux } diff --git a/sources/postgres/infoschema_test.go b/sources/postgres/infoschema_test.go index fe61a5c7ea..c1424d2caf 100644 --- a/sources/postgres/infoschema_test.go +++ b/sources/postgres/infoschema_test.go @@ -461,8 +461,8 @@ func TestConvertSqlRow_MultiCol(t *testing.T) { }) common.ProcessData(conv, InfoSchemaImpl{db}) assert.Equal(t, []spannerData{ - {table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"cat", float64(42.3), int64(0)}}, - {table: "test", cols: []string{"a", "c", "synth_id"}, vals: []interface{}{"dog", int64(22), int64(-9223372036854775808)}}}, + {table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"cat", float64(42.3), "0"}}, + {table: "test", cols: []string{"a", "c", "synth_id"}, vals: []interface{}{"dog", int64(22), "-9223372036854775808"}}}, rows) assert.Equal(t, int64(0), conv.Unexpecteds()) } diff --git a/sources/postgres/pgdump_test.go b/sources/postgres/pgdump_test.go index 38ec38b382..f0aceb49e8 100644 --- a/sources/postgres/pgdump_test.go +++ b/sources/postgres/pgdump_test.go @@ -131,7 +131,7 @@ func TestProcessPgDump(t *testing.T) { "productid": ddl.ColumnDef{Name: "productid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "userid": ddl.ColumnDef{Name: "userid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}, NotNull: true}, "quantity": ddl.ColumnDef{Name: "quantity", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}}}, }, @@ -181,7 +181,7 @@ func TestProcessPgDump(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"d"}, ReferTable: "test", ReferColumns: []string{"a"}}}, @@ -207,7 +207,7 @@ func TestProcessPgDump(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"d"}, ReferTable: "test", ReferColumns: []string{"a"}}}, @@ -234,7 +234,7 @@ func TestProcessPgDump(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"c", "d"}, ReferTable: "test", ReferColumns: []string{"a", "b"}}}, @@ -271,7 +271,7 @@ func TestProcessPgDump(t *testing.T) { "e": ddl.ColumnDef{Name: "e", T: ddl.Type{Name: ddl.Int64}}, "f": ddl.ColumnDef{Name: "f", T: ddl.Type{Name: ddl.Int64}}, "g": ddl.ColumnDef{Name: "g", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"e"}, ReferTable: "test", ReferColumns: []string{"a"}}, @@ -293,7 +293,7 @@ func TestProcessPgDump(t *testing.T) { "a": ddl.ColumnDef{Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": ddl.ColumnDef{Name: "b", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Indexes: []ddl.CreateIndex{ddl.CreateIndex{Name: "custom_index", Table: "test", Unique: false, Keys: []ddl.IndexKey{ddl.IndexKey{Col: "b", Desc: false}, ddl.IndexKey{Col: "c", Desc: false}}}}}}, @@ -314,7 +314,7 @@ func TestProcessPgDump(t *testing.T) { "a": ddl.ColumnDef{Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": ddl.ColumnDef{Name: "b", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Indexes: []ddl.CreateIndex{ddl.CreateIndex{Name: "custom_index", Table: "test", Unique: false, Keys: []ddl.IndexKey{ddl.IndexKey{Col: "b", Desc: true}, ddl.IndexKey{Col: "c", Desc: false}}}}}}, @@ -487,10 +487,10 @@ func TestProcessPgDump(t *testing.T) { "a3 b 7\n" + "\\.\n", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a1", "b1", int64(42), bitReverse(0)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a22", "b99", int64(6), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a33", "b", int64(9), bitReverse(2)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), bitReverse(3)}}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a1", "b1", int64(42), fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a22", "b99", int64(6), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a33", "b", int64(9), fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), fmt.Sprintf("%d", bitReverse(3))}}}, }, { name: "COPY FROM with empty cols", @@ -502,10 +502,10 @@ func TestProcessPgDump(t *testing.T) { "a3 b 7\n" + "\\.\n", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"b", "n", "synth_id"}, vals: []interface{}{"b1", int64(42), bitReverse(0)}}, - spannerData{table: "test", cols: []string{"a", "n", "synth_id"}, vals: []interface{}{"a22", int64(6), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"a33", "b", bitReverse(2)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), bitReverse(3)}}}, + spannerData{table: "test", cols: []string{"b", "n", "synth_id"}, vals: []interface{}{"b1", int64(42), fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"a", "n", "synth_id"}, vals: []interface{}{"a22", int64(6), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"a33", "b", fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), fmt.Sprintf("%d", bitReverse(3))}}}, }, { name: "INSERT", @@ -538,7 +538,7 @@ func TestProcessPgDump(t *testing.T) { input: "CREATE TABLE test (a text NOT NULL, b text NOT NULL, n bigint);\n" + "INSERT INTO test (a, b, n) VALUES ('a42', 'b6', 2);", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a42", "b6", int64(2), bitReverse(0)}}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a42", "b6", int64(2), fmt.Sprintf("%d", bitReverse(0))}}}, }, { name: "INSERT with spaces", @@ -690,15 +690,15 @@ COPY test (id, a, b, c, d, e, f, g) FROM stdin; vals: []interface{}{int64(7), float64(42.1), true, getTime(t, "2019-10-29T05:30:00Z"), getDate("2019-10-29"), []byte{0x0, 0x1, 0xbe, 0xef}, []spanner.NullInt64{{Int64: 42, Valid: true}, {Int64: 6, Valid: true}}, - bitReverse(0)}}, - spannerData{table: "test", cols: []string{"int8", "synth_id"}, vals: []interface{}{int64(7), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"float8", "synth_id"}, vals: []interface{}{float64(42.1), bitReverse(2)}}, - spannerData{table: "test", cols: []string{"bool", "synth_id"}, vals: []interface{}{true, bitReverse(3)}}, - spannerData{table: "test", cols: []string{"timestamp", "synth_id"}, vals: []interface{}{getTime(t, "2019-10-29T05:30:00Z"), bitReverse(4)}}, - spannerData{table: "test", cols: []string{"date", "synth_id"}, vals: []interface{}{getDate("2019-10-29"), bitReverse(5)}}, - spannerData{table: "test", cols: []string{"bytea", "synth_id"}, vals: []interface{}{[]byte{0x0, 0x1, 0xbe, 0xef}, bitReverse(6)}}, + fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"int8", "synth_id"}, vals: []interface{}{int64(7), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"float8", "synth_id"}, vals: []interface{}{float64(42.1), fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"bool", "synth_id"}, vals: []interface{}{true, fmt.Sprintf("%d", bitReverse(3))}}, + spannerData{table: "test", cols: []string{"timestamp", "synth_id"}, vals: []interface{}{getTime(t, "2019-10-29T05:30:00Z"), fmt.Sprintf("%d", bitReverse(4))}}, + spannerData{table: "test", cols: []string{"date", "synth_id"}, vals: []interface{}{getDate("2019-10-29"), fmt.Sprintf("%d", bitReverse(5))}}, + spannerData{table: "test", cols: []string{"bytea", "synth_id"}, vals: []interface{}{[]byte{0x0, 0x1, 0xbe, 0xef}, fmt.Sprintf("%d", bitReverse(6))}}, spannerData{table: "test", cols: []string{"arr", "synth_id"}, - vals: []interface{}{[]spanner.NullInt64{{Int64: 42, Valid: true}, {Int64: 6, Valid: true}}, bitReverse(7)}}, + vals: []interface{}{[]spanner.NullInt64{{Int64: 42, Valid: true}, {Int64: 6, Valid: true}}, fmt.Sprintf("%d", bitReverse(7))}}, }, }, } @@ -799,7 +799,7 @@ func TestProcessPgDumpPGTarget(t *testing.T) { "productid": ddl.ColumnDef{Name: "productid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "userid": ddl.ColumnDef{Name: "userid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}, NotNull: true}, "quantity": ddl.ColumnDef{Name: "quantity", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}}}, }, @@ -849,7 +849,7 @@ func TestProcessPgDumpPGTarget(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"d"}, ReferTable: "test", ReferColumns: []string{"a"}}}, @@ -875,7 +875,7 @@ func TestProcessPgDumpPGTarget(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"d"}, ReferTable: "test", ReferColumns: []string{"a"}}}, @@ -902,7 +902,7 @@ func TestProcessPgDumpPGTarget(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"c", "d"}, ReferTable: "test", ReferColumns: []string{"a", "b"}}}, @@ -939,7 +939,7 @@ func TestProcessPgDumpPGTarget(t *testing.T) { "e": ddl.ColumnDef{Name: "e", T: ddl.Type{Name: ddl.Int64}}, "f": ddl.ColumnDef{Name: "f", T: ddl.Type{Name: ddl.Int64}}, "g": ddl.ColumnDef{Name: "g", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"e"}, ReferTable: "test", ReferColumns: []string{"a"}}, @@ -961,7 +961,7 @@ func TestProcessPgDumpPGTarget(t *testing.T) { "a": ddl.ColumnDef{Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": ddl.ColumnDef{Name: "b", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Indexes: []ddl.CreateIndex{ddl.CreateIndex{Name: "custom_index", Table: "test", Unique: false, Keys: []ddl.IndexKey{ddl.IndexKey{Col: "b", Desc: false}, ddl.IndexKey{Col: "c", Desc: false}}}}}}, @@ -982,7 +982,7 @@ func TestProcessPgDumpPGTarget(t *testing.T) { "a": ddl.ColumnDef{Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": ddl.ColumnDef{Name: "b", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Indexes: []ddl.CreateIndex{ddl.CreateIndex{Name: "custom_index", Table: "test", Unique: false, Keys: []ddl.IndexKey{ddl.IndexKey{Col: "b", Desc: true}, ddl.IndexKey{Col: "c", Desc: false}}}}}}, @@ -1155,10 +1155,10 @@ func TestProcessPgDumpPGTarget(t *testing.T) { "a3 b 7\n" + "\\.\n", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a1", "b1", int64(42), bitReverse(0)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a22", "b99", int64(6), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a33", "b", int64(9), bitReverse(2)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), bitReverse(3)}}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a1", "b1", int64(42), fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a22", "b99", int64(6), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a33", "b", int64(9), fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), fmt.Sprintf("%d", bitReverse(3))}}}, }, { name: "COPY FROM with empty cols", @@ -1170,10 +1170,10 @@ func TestProcessPgDumpPGTarget(t *testing.T) { "a3 b 7\n" + "\\.\n", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"b", "n", "synth_id"}, vals: []interface{}{"b1", int64(42), bitReverse(0)}}, - spannerData{table: "test", cols: []string{"a", "n", "synth_id"}, vals: []interface{}{"a22", int64(6), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"a33", "b", bitReverse(2)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), bitReverse(3)}}}, + spannerData{table: "test", cols: []string{"b", "n", "synth_id"}, vals: []interface{}{"b1", int64(42), fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"a", "n", "synth_id"}, vals: []interface{}{"a22", int64(6), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"a33", "b", fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), fmt.Sprintf("%d", bitReverse(3))}}}, }, { name: "INSERT", @@ -1206,7 +1206,7 @@ func TestProcessPgDumpPGTarget(t *testing.T) { input: "CREATE TABLE test (a text NOT NULL, b text NOT NULL, n bigint);\n" + "INSERT INTO test (a, b, n) VALUES ('a42', 'b6', 2);", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a42", "b6", int64(2), bitReverse(0)}}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a42", "b6", int64(2), fmt.Sprintf("%d", bitReverse(0))}}}, }, { name: "INSERT with spaces", @@ -1358,15 +1358,15 @@ COPY test (id, a, b, c, d, e) FROM stdin; vals: []interface{}{int64(7), float64(42.1), true, getTime(t, "2019-10-29T05:30:00Z"), getDate("2019-10-29"), []byte{0x0, 0x1, 0xbe, 0xef}, "{42,6}", - bitReverse(0)}}, - spannerData{table: "test", cols: []string{"int8", "synth_id"}, vals: []interface{}{int64(7), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"float8", "synth_id"}, vals: []interface{}{float64(42.1), bitReverse(2)}}, - spannerData{table: "test", cols: []string{"bool", "synth_id"}, vals: []interface{}{true, bitReverse(3)}}, - spannerData{table: "test", cols: []string{"timestamp", "synth_id"}, vals: []interface{}{getTime(t, "2019-10-29T05:30:00Z"), bitReverse(4)}}, - spannerData{table: "test", cols: []string{"date", "synth_id"}, vals: []interface{}{getDate("2019-10-29"), bitReverse(5)}}, - spannerData{table: "test", cols: []string{"bytea", "synth_id"}, vals: []interface{}{[]byte{0x0, 0x1, 0xbe, 0xef}, bitReverse(6)}}, - spannerData{table: "test", cols: []string{"arr", "synth_id"}, vals: []interface{}{"{42,6}", bitReverse(7)}}, - spannerData{table: "test", cols: []string{"arr", "synth_id"}, vals: []interface{}{"{42, 6}", bitReverse(8)}}, + fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"int8", "synth_id"}, vals: []interface{}{int64(7), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"float8", "synth_id"}, vals: []interface{}{float64(42.1), fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"bool", "synth_id"}, vals: []interface{}{true, fmt.Sprintf("%d", bitReverse(3))}}, + spannerData{table: "test", cols: []string{"timestamp", "synth_id"}, vals: []interface{}{getTime(t, "2019-10-29T05:30:00Z"), fmt.Sprintf("%d", bitReverse(4))}}, + spannerData{table: "test", cols: []string{"date", "synth_id"}, vals: []interface{}{getDate("2019-10-29"), fmt.Sprintf("%d", bitReverse(5))}}, + spannerData{table: "test", cols: []string{"bytea", "synth_id"}, vals: []interface{}{[]byte{0x0, 0x1, 0xbe, 0xef}, fmt.Sprintf("%d", bitReverse(6))}}, + spannerData{table: "test", cols: []string{"arr", "synth_id"}, vals: []interface{}{"{42,6}", fmt.Sprintf("%d", bitReverse(7))}}, + spannerData{table: "test", cols: []string{"arr", "synth_id"}, vals: []interface{}{"{42, 6}", fmt.Sprintf("%d", bitReverse(8))}}, }, }, } @@ -1441,7 +1441,7 @@ func TestProcessPgDump_AddPrimaryKeys(t *testing.T) { "productid": ddl.ColumnDef{Name: "productid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "userid": ddl.ColumnDef{Name: "userid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "quantity": ddl.ColumnDef{Name: "quantity", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}}}, }, @@ -1456,7 +1456,7 @@ func TestProcessPgDump_AddPrimaryKeys(t *testing.T) { "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "synth_id0": ddl.ColumnDef{Name: "synth_id0", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "synth_id1": ddl.ColumnDef{Name: "synth_id1", T: ddl.Type{Name: ddl.Int64}}, - "synth_id2": ddl.ColumnDef{Name: "synth_id2", T: ddl.Type{Name: ddl.Int64}}, + "synth_id2": ddl.ColumnDef{Name: "synth_id2", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id2"}}}}, }, diff --git a/sources/sqlserver/data.go b/sources/sqlserver/data.go index 2a4fe3b97e..fd4963d58f 100644 --- a/sources/sqlserver/data.go +++ b/sources/sqlserver/data.go @@ -77,7 +77,7 @@ func ConvertData(conv *internal.Conv, srcTable string, srcCols []string, srcSche } if aux, ok := conv.SyntheticPKeys[spTable]; ok { c = append(c, aux.Col) - v = append(v, int64(bits.Reverse64(uint64(aux.Sequence)))) + v = append(v, fmt.Sprintf("%d", int64(bits.Reverse64(uint64(aux.Sequence))))) aux.Sequence++ conv.SyntheticPKeys[spTable] = aux } diff --git a/sources/sqlserver/data_test.go b/sources/sqlserver/data_test.go index 4e43c9be4f..9cfb351b43 100644 --- a/sources/sqlserver/data_test.go +++ b/sources/sqlserver/data_test.go @@ -237,14 +237,14 @@ func TestConvertsyntheticPKey(t *testing.T) { cols: []string{"a", "b", "c"}, vals: []string{"6", "6.6", "true"}, ecols: []string{"a", "b", "c", "synth_id"}, - evals: []interface{}{int64(6), float64(6.6), true, int64(0)}, + evals: []interface{}{int64(6), float64(6.6), true, fmt.Sprintf("%d", 0)}, }, { name: "Sequence 1", cols: []string{"a"}, vals: []string{"7"}, ecols: []string{"a", "synth_id"}, - evals: []interface{}{int64(7), int64(bits.Reverse64(1))}, + evals: []interface{}{int64(7), fmt.Sprintf("%d", int64(bits.Reverse64(1)))}, }, } tableName := "testtable" From 576a8e4ccccf5d045183ea15a122ba0e9cdc52dc Mon Sep 17 00:00:00 2001 From: Shreya Khajanchi Date: Fri, 8 Jul 2022 13:09:11 +0530 Subject: [PATCH 05/15] migrate api --- ui/src/app/services/fetch/fetch.service.ts | 6 +++--- .../target-details.service.spec.ts | 16 ++++++++++++++++ .../target-details/target-details.service.ts | 18 ++++++++++++++++++ 3 files changed, 37 insertions(+), 3 deletions(-) create mode 100644 ui/src/app/services/target-details/target-details.service.spec.ts create mode 100644 ui/src/app/services/target-details/target-details.service.ts diff --git a/ui/src/app/services/fetch/fetch.service.ts b/ui/src/app/services/fetch/fetch.service.ts index cfe8622161..42081de794 100644 --- a/ui/src/app/services/fetch/fetch.service.ts +++ b/ui/src/app/services/fetch/fetch.service.ts @@ -7,7 +7,7 @@ import IConv, { ICreateIndex, IInterleaveStatus, IPrimaryKey } from '../../model import IDumpConfig from '../../model/dump-config' import ISessionConfig from '../../model/session-config' import ISpannerConfig from '../../model/spanner-config' -import ITargetDetails from 'src/app/model/target-details' +import ITargetDetails from '../../model/target-details' @Injectable({ providedIn: 'root', @@ -102,8 +102,8 @@ export class FetchService { return this.http.post(`${this.url}/SetSpannerConfig`, payload) } - setTargetDetails(payload: ITargetDetails) { - return this.http.post(`${this.url}/SetTargetDetails`,payload) + migrate(payload: ITargetDetails) { + return this.http.post(`${this.url}/Migrate`,payload) } InitiateSession() { diff --git a/ui/src/app/services/target-details/target-details.service.spec.ts b/ui/src/app/services/target-details/target-details.service.spec.ts new file mode 100644 index 0000000000..2610d9aadb --- /dev/null +++ b/ui/src/app/services/target-details/target-details.service.spec.ts @@ -0,0 +1,16 @@ +import { TestBed } from '@angular/core/testing'; + +import { TargetDetailsService } from './target-details.service'; + +describe('TargetDetailsService', () => { + let service: TargetDetailsService; + + beforeEach(() => { + TestBed.configureTestingModule({}); + service = TestBed.inject(TargetDetailsService); + }); + + it('should be created', () => { + expect(service).toBeTruthy(); + }); +}); diff --git a/ui/src/app/services/target-details/target-details.service.ts b/ui/src/app/services/target-details/target-details.service.ts new file mode 100644 index 0000000000..f7ca1d8e37 --- /dev/null +++ b/ui/src/app/services/target-details/target-details.service.ts @@ -0,0 +1,18 @@ +import { Injectable } from '@angular/core'; +import ITargetDetails from 'src/app/model/target-details'; + +@Injectable({ + providedIn: 'root' +}) +export class TargetDetailsService { + targetDetail : ITargetDetails = {TargetDB:"", Dialect:"google_standard_sql",StreamingConfig:""}; + constructor() { } + updateTargetDetails(details: ITargetDetails) { + this.targetDetail.TargetDB = details.TargetDB; + this.targetDetail.Dialect = details.Dialect; + this.targetDetail.StreamingConfig = details.StreamingConfig; + } + getTargetDetails() { + return this.targetDetail; + } +} From 1f2297a39c0eca51d080831caed4aeab6fc6950d Mon Sep 17 00:00:00 2001 From: Shreya Khajanchi Date: Fri, 8 Jul 2022 13:12:17 +0530 Subject: [PATCH 06/15] migrate api --- .../prepare-migration.component.html | 8 ++- .../prepare-migration.component.ts | 52 ++++++++++----- .../target-details-form.component.html | 16 ++++- .../target-details-form.component.ts | 46 ++++++------- ui/src/app/model/target-details.ts | 2 + webv2/routes.go | 3 + webv2/web.go | 65 +++++++++++++++++++ 7 files changed, 147 insertions(+), 45 deletions(-) diff --git a/ui/src/app/components/prepare-migration/prepare-migration.component.html b/ui/src/app/components/prepare-migration/prepare-migration.component.html index 8afd4b2a35..773622d6af 100644 --- a/ui/src/app/components/prepare-migration/prepare-migration.component.html +++ b/ui/src/app/components/prepare-migration/prepare-migration.component.html @@ -48,6 +48,12 @@

Source and destination Database definitions


- \ No newline at end of file diff --git a/ui/src/app/components/prepare-migration/prepare-migration.component.ts b/ui/src/app/components/prepare-migration/prepare-migration.component.ts index 423f2daade..babf9c6261 100644 --- a/ui/src/app/components/prepare-migration/prepare-migration.component.ts +++ b/ui/src/app/components/prepare-migration/prepare-migration.component.ts @@ -1,32 +1,50 @@ -import { Component, OnInit } from '@angular/core'; -import { MatDialog } from '@angular/material/dialog'; -import { TargetDetailsFormComponent } from '../target-details-form/target-details-form.component'; - +import { Component, OnInit } from '@angular/core' +import { MatDialog } from '@angular/material/dialog' +import { TargetDetailsFormComponent } from '../target-details-form/target-details-form.component' +import { TargetDetailsService } from 'src/app/services/target-details/target-details.service' +import { FetchService } from 'src/app/services/fetch/fetch.service' +import { SnackbarService } from 'src/app/services/snackbar/snackbar.service' +import ITargetDetails from 'src/app/model/target-details' @Component({ selector: 'app-prepare-migration', templateUrl: './prepare-migration.component.html', - styleUrls: ['./prepare-migration.component.scss'] + styleUrls: ['./prepare-migration.component.scss'], }) export class PrepareMigrationComponent implements OnInit { + displayedColumns = ['Title', 'Source', 'Destination'] + constructor( + private dialog: MatDialog, + private fetch: FetchService, + private snack: SnackbarService, + private targetDetailService: TargetDetailsService + ) {} - displayedColumns = [ - 'Title', - 'Source', - 'Destination', - ] - constructor(private dialog: MatDialog) { } + isTargetDetailSet: boolean = false; + targetDetails: ITargetDetails = this.targetDetailService.getTargetDetails() - ngOnInit(): void { - } + ngOnInit(): void {} openTargetDetailsForm() { - let openDialog = this.dialog.open(TargetDetailsFormComponent, { + let dialogRef = this.dialog.open(TargetDetailsFormComponent, { width: '30vw', minWidth: '400px', maxWidth: '500px', }) - openDialog.afterClosed().subscribe(() => { - - }) + dialogRef.afterClosed().subscribe(() => { + if (this.targetDetails.TargetDB != '') { + this.isTargetDetailSet = true; + } + }); + console.log(this.targetDetailService.getTargetDetails()) } + migrate() { + this.fetch.migrate(this.targetDetailService.getTargetDetails()).subscribe({ + next: () => { + this.snack.openSnackBar('Migration completed successfully', 'Close', 5) + }, + error: (err: any) => { + this.snack.openSnackBar(err.message, 'Close') + }, + }) + } } diff --git a/ui/src/app/components/target-details-form/target-details-form.component.html b/ui/src/app/components/target-details-form/target-details-form.component.html index 05f67e01e5..76bdaa2594 100644 --- a/ui/src/app/components/target-details-form/target-details-form.component.html +++ b/ui/src/app/components/target-details-form/target-details-form.component.html @@ -3,7 +3,19 @@

Target Details

Target Database - + + +
+ + Target Dialect + + Google Standard SQL + + +
+ + Streaming config +
@@ -12,6 +24,8 @@

Target Details

mat-button type="submit" color="primary" + [disabled]="!(targetDetailsForm.valid)" + (click)="updateTargetDetails()" > Save diff --git a/ui/src/app/components/target-details-form/target-details-form.component.ts b/ui/src/app/components/target-details-form/target-details-form.component.ts index 06f77b4ec8..516d14f4aa 100644 --- a/ui/src/app/components/target-details-form/target-details-form.component.ts +++ b/ui/src/app/components/target-details-form/target-details-form.component.ts @@ -1,44 +1,38 @@ -import { Component, Inject, OnInit } from '@angular/core'; -import { FormBuilder, FormControl, FormGroup, Validators } from '@angular/forms'; -import { MatDialogRef, MAT_DIALOG_DATA } from '@angular/material/dialog'; -import ITargetDetails from 'src/app/model/target-details'; +import { Component, OnInit } from '@angular/core' +import { FormBuilder, FormGroup, Validators } from '@angular/forms' +import { MatDialogRef } from '@angular/material/dialog' +import ITargetDetails from 'src/app/model/target-details' +import { TargetDetailsService } from 'src/app/services/target-details/target-details.service' @Component({ selector: 'app-target-details-form', templateUrl: './target-details-form.component.html', - styleUrls: ['./target-details-form.component.scss'] + styleUrls: ['./target-details-form.component.scss'], }) export class TargetDetailsFormComponent implements OnInit { -targetDetailsForm : FormGroup - fetch: any; - snack: any; + targetDetailsForm: FormGroup constructor( private fb: FormBuilder, - @Inject(MAT_DIALOG_DATA) public data: ITargetDetails, - private dialogRef: MatDialogRef) { + private targetDetailService: TargetDetailsService, + private dialogRef: MatDialogRef + ) { this.targetDetailsForm = this.fb.group({ - TargetDB: ['', Validators.required], + targetDb: ['', Validators.required], + streamingConfig: ['', Validators.required], + dialect: ['',Validators.required], }) - dialogRef.disableClose = true - } - - ngOnInit(): void { } + ngOnInit(): void {} + updateTargetDetails() { let formValue = this.targetDetailsForm.value let payload: ITargetDetails = { - TargetDB: formValue.TargetDB, + TargetDB: formValue.targetDb, + Dialect: formValue.dialect, + StreamingConfig: formValue.streamingConfig } - - this.fetch.setSpannerConfig(payload).subscribe({ - next: (res: TargetDetailsFormComponent) => { - this.snack.openSnackBar('Target details updated successfully', 'Close', 5) - this.dialogRef.close({ ...res }) - }, - error: (err: any) => { - this.snack.openSnackBar(err.message, 'Close') - }, - }) + this.targetDetailService.updateTargetDetails(payload) + this.dialogRef.close() } } diff --git a/ui/src/app/model/target-details.ts b/ui/src/app/model/target-details.ts index b224f9c05e..87df947f00 100644 --- a/ui/src/app/model/target-details.ts +++ b/ui/src/app/model/target-details.ts @@ -1,3 +1,5 @@ export default interface ITargetDetails { TargetDB: string + Dialect: string + StreamingConfig:string } \ No newline at end of file diff --git a/webv2/routes.go b/webv2/routes.go index fedc85f91a..3849f01b40 100644 --- a/webv2/routes.go +++ b/webv2/routes.go @@ -67,5 +67,8 @@ func getRoutes() *mux.Router { router.HandleFunc("/GetConfig", config.GetConfig).Methods("GET") router.HandleFunc("/SetSpannerConfig", config.SetSpannerConfig).Methods("POST") + // Run migration + router.HandleFunc("/Migrate", migrate).Methods("POST") + return router } diff --git a/webv2/web.go b/webv2/web.go index 7ba6cf6744..ac1b424195 100644 --- a/webv2/web.go +++ b/webv2/web.go @@ -20,6 +20,7 @@ package webv2 import ( "bufio" "bytes" + "context" "database/sql" "encoding/json" "fmt" @@ -90,6 +91,10 @@ type driverConfig struct { Password string `json:"Password"` } +type targetDetails struct { + TargetDB string `json:TargetDB` +} + // databaseConnection creates connection with database when using // with postgres and mysql driver. func databaseConnection(w http.ResponseWriter, r *http.Request) { @@ -951,6 +956,66 @@ func addIndexes(w http.ResponseWriter, r *http.Request) { json.NewEncoder(w).Encode(convm) } +func migrate(w http.ResponseWriter, r *http.Request) { + + log.Println("request started", "method", r.Method, "path", r.URL.Path) + + reqBody, err := ioutil.ReadAll(r.Body) + + if err != nil { + log.Println("request's body Read Error") + http.Error(w, fmt.Sprintf("Body Read Error : %v", err), http.StatusInternalServerError) + } + + details := targetDetails{} + + err = json.Unmarshal(reqBody, &details) + + if err != nil { + log.Println("request's Body parse error") + http.Error(w, fmt.Sprintf("Request Body parse error : %v", err), http.StatusBadRequest) + return + } + + sessionState := session.GetSessionState() + + dbURI := fmt.Sprintf("projects/%s/instances/%s/databases/%s", sessionState.GCPProjectID, sessionState.SpannerInstanceID, details.TargetDB) + ctx := context.Background() + adminClient, err := utils.NewDatabaseAdminClient(ctx) + if err != nil { + log.Println("can't create admin client") + http.Error(w, fmt.Sprintf("can't create admin client : %v", err), http.StatusBadRequest) + return + } + defer adminClient.Close() + client, err := utils.GetClient(ctx, dbURI) + if err != nil { + log.Println("can't create client for db") + http.Error(w, fmt.Sprintf("can't create client for db %s: %v", dbURI, err), http.StatusBadRequest) + return + } + defer client.Close() + + err = conversion.CreateOrUpdateDatabase(ctx, adminClient, dbURI, sessionState.Driver, "spanner", sessionState.Conv, nil) + if err != nil { + log.Println("can't create/update database]") + http.Error(w, fmt.Sprintf("can't create/update database: %v", err), http.StatusBadRequest) + return + } + + helpers.UpdateSessionFile() + + convm := session.ConvWithMetadata{ + SessionMetadata: sessionState.SessionMetadata, + Conv: *sessionState.Conv, + } + + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(convm) + + log.Println("migration completed", "method", r.Method, "path", r.URL.Path, "remoteaddr", r.RemoteAddr) +} + func checkSpannerNamesValidity(input []string) (bool, []string) { status := true var invalidNewNames []string From 78638532891f678652178e7a0f14fb9d4068dc58 Mon Sep 17 00:00:00 2001 From: Deep1998 Date: Fri, 1 Jul 2022 14:25:34 +0530 Subject: [PATCH 07/15] Lowdb demo --- common/utils/utils.go | 38 ++++++++++++++ conversion/conversion.go | 27 +++++++--- sources/mysql/infoschema.go | 15 +++++- streaming/streaming.go | 35 +++++++++++-- test_data/mysqldump.test.out | 99 ------------------------------------ 5 files changed, 104 insertions(+), 110 deletions(-) diff --git a/common/utils/utils.go b/common/utils/utils.go index 4081b2d347..3181ffd5bf 100644 --- a/common/utils/utils.go +++ b/common/utils/utils.go @@ -170,6 +170,44 @@ func PreloadGCSFiles(tables []ManifestTable) ([]ManifestTable, error) { return tables, nil } +func WriteToGCS(filePath, fileName, data string) error { + ctx := context.Background() + + client, err := storage.NewClient(ctx) + if err != nil { + fmt.Printf("Failed to create GCS client") + return err + } + defer client.Close() + u, err := url.Parse(filePath) + if err != nil { + fmt.Printf("parseFilePath: unable to parse file path %s", filePath) + return err + } + if u.Scheme != "gs" { + fmt.Printf("not a valid GCS path: %s, should start with 'gs'", filePath) + return err + } + bucketName := u.Host + prefix := u.Path[1:] + if prefix[len(prefix)-1] != '/' { + prefix = prefix + "/" + } + bucket := client.Bucket(bucketName) + obj := bucket.Object(prefix + fileName) + + w := obj.NewWriter(ctx) + if _, err := fmt.Fprint(w, data); err != nil { + fmt.Printf("Failed to write to Cloud Storage: %s", filePath) + return err + } + if err := w.Close(); err != nil { + fmt.Printf("Failed to close GCS file: %s", filePath) + return err + } + return nil +} + // GetProject returns the cloud project we should use for accessing Spanner. // Use environment variable GCLOUD_PROJECT if it is set. // Otherwise, use the default project returned from gcloud. diff --git a/conversion/conversion.go b/conversion/conversion.go index 3ca1dd3581..1453cc3f26 100644 --- a/conversion/conversion.go +++ b/conversion/conversion.go @@ -184,6 +184,18 @@ func performSnapshotMigration(config writer.BatchWriterConfig, conv *internal.Co return batchWriter, nil } +func snapshotMigrationHandler(sourceProfile profiles.SourceProfile, config writer.BatchWriterConfig, conv *internal.Conv, client *sp.Client, infoSchema common.InfoSchema) (*writer.BatchWriter, error) { + switch sourceProfile.Driver { + // Skip snapshot migration via harbourbridge for mysql and oracle since dataflow job will job will handle this from backfilled data. + case constants.MYSQL, constants.ORACLE: + return &writer.BatchWriter{}, nil + case constants.DYNAMODB: + return performSnapshotMigration(config, conv, client, infoSchema) + default: + return &writer.BatchWriter{}, fmt.Errorf("streaming migration not supported for driver %s", sourceProfile.Driver) + } +} + func dataFromDatabase(ctx context.Context, sourceProfile profiles.SourceProfile, targetProfile profiles.TargetProfile, config writer.BatchWriterConfig, conv *internal.Conv, client *sp.Client) (*writer.BatchWriter, error) { infoSchema, err := GetInfoSchema(sourceProfile, targetProfile) if err != nil { @@ -195,16 +207,19 @@ func dataFromDatabase(ctx context.Context, sourceProfile profiles.SourceProfile, if err != nil { return nil, err } - } - bw, err := performSnapshotMigration(config, conv, client, infoSchema) - if err != nil { - return nil, err - } - if sourceProfile.Conn.Streaming { + bw, err := snapshotMigrationHandler(sourceProfile, config, conv, client, infoSchema) + if err != nil { + return nil, err + } err = infoSchema.StartStreamingMigration(ctx, client, conv, streamInfo) if err != nil { return nil, err } + return bw, nil + } + bw, err := performSnapshotMigration(config, conv, client, infoSchema) + if err != nil { + return nil, err } return bw, nil } diff --git a/sources/mysql/infoschema.go b/sources/mysql/infoschema.go index 2542877ea9..6b88892441 100644 --- a/sources/mysql/infoschema.go +++ b/sources/mysql/infoschema.go @@ -17,6 +17,7 @@ package mysql import ( "context" "database/sql" + "encoding/json" "fmt" "sort" "strings" @@ -25,6 +26,7 @@ import ( _ "github.com/go-sql-driver/mysql" // The driver should be used via the database/sql package. _ "github.com/lib/pq" + "github.com/cloudspannerecosystem/harbourbridge/common/utils" "github.com/cloudspannerecosystem/harbourbridge/internal" "github.com/cloudspannerecosystem/harbourbridge/profiles" "github.com/cloudspannerecosystem/harbourbridge/schema" @@ -350,8 +352,19 @@ func (isi InfoSchemaImpl) StartChangeDataCapture(ctx context.Context, conv *inte // performing a streaming migration. func (isi InfoSchemaImpl) StartStreamingMigration(ctx context.Context, client *sp.Client, conv *internal.Conv, streamingInfo map[string]interface{}) error { streamingCfg, _ := streamingInfo["streamingCfg"].(streaming.StreamingCfg) - err := streaming.StartDataflow(ctx, isi.SourceProfile, isi.TargetProfile, streamingCfg) + convJSON, err := json.MarshalIndent(conv, "", " ") if err != nil { + err = fmt.Errorf("can't encode session state to JSON: %v", err) + return err + } + err = utils.WriteToGCS(streamingCfg.TmpDir, "session.json", string(convJSON)) + if err != nil { + err = fmt.Errorf("error writing session file to GCS: %v", err) + return err + } + err = streaming.StartDataflow(ctx, isi.SourceProfile, isi.TargetProfile, streamingCfg) + if err != nil { + err = fmt.Errorf("error starting dataflow: %v", err) return err } return nil diff --git a/streaming/streaming.go b/streaming/streaming.go index ae8d7dc85f..3216323664 100644 --- a/streaming/streaming.go +++ b/streaming/streaming.go @@ -18,10 +18,12 @@ import ( "encoding/json" "fmt" "io/ioutil" + "net/url" "time" dataflow "cloud.google.com/go/dataflow/apiv1beta3" datastream "cloud.google.com/go/datastream/apiv1alpha1" + "cloud.google.com/go/storage" datastreampb "google.golang.org/genproto/googleapis/cloud/datastream/v1alpha1" dataflowpb "google.golang.org/genproto/googleapis/dataflow/v1beta3" fieldmaskpb "google.golang.org/protobuf/types/known/fieldmaskpb" @@ -58,6 +60,7 @@ type DataflowCfg struct { type StreamingCfg struct { DatastreamCfg DatastreamCfg DataflowCfg DataflowCfg + TmpDir string } // VerifyAndUpdateCfg checks the fields and errors out if certain fields are empty. @@ -105,6 +108,27 @@ func VerifyAndUpdateCfg(streamingCfg *StreamingCfg, dbName string) error { } streamingCfg.DataflowCfg.JobName = jobName } + + filePath := streamingCfg.TmpDir + u, err := url.Parse(filePath) + if err != nil { + return fmt.Errorf("parseFilePath: unable to parse file path %s", filePath) + } + if u.Scheme != "gs" { + return fmt.Errorf("not a valid GCS path: %s, should start with 'gs'", filePath) + } + bucketName := u.Host + ctx := context.Background() + client, err := storage.NewClient(ctx) + if err != nil { + return fmt.Errorf("failed to create GCS client") + } + defer client.Close() + bucket := client.Bucket(bucketName) + _, err = bucket.Attrs(ctx) + if err != nil { + return fmt.Errorf("bucket %s does not exist", bucketName) + } return nil } @@ -187,7 +211,7 @@ func LaunchStream(ctx context.Context, sourceProfile profiles.SourceProfile, pro SourceConfig: srcCfg, DestinationConfig: dstCfg, State: datastreampb.Stream_RUNNING, - BackfillStrategy: &datastreampb.Stream_BackfillNone{BackfillNone: &datastreampb.Stream_BackfillNoneStrategy{}}, + BackfillStrategy: &datastreampb.Stream_BackfillAll{BackfillAll: &datastreampb.Stream_BackfillAllStrategy{}}, } createStreamRequest := &datastreampb.CreateStreamRequest{ Parent: fmt.Sprintf("projects/%s/locations/%s", projectID, datastreamCfg.StreamLocation), @@ -229,8 +253,10 @@ func LaunchStream(ctx context.Context, sourceProfile profiles.SourceProfile, pro } // LaunchDataflowJob populates the parameters from the streaming config and triggers a Dataflow job. -func LaunchDataflowJob(ctx context.Context, targetProfile profiles.TargetProfile, datastreamCfg DatastreamCfg, dataflowCfg DataflowCfg) error { +func LaunchDataflowJob(ctx context.Context, targetProfile profiles.TargetProfile, streamingCfg StreamingCfg) error { project, instance, dbName, _ := targetProfile.GetResourceIds(ctx, time.Now(), "", nil) + dataflowCfg := streamingCfg.DataflowCfg + datastreamCfg := streamingCfg.DatastreamCfg fmt.Println("Launching dataflow job ", dataflowCfg.JobName, " in ", project, "-", dataflowCfg.Location) c, err := dataflow.NewFlexTemplatesClient(ctx) @@ -259,12 +285,13 @@ func LaunchDataflowJob(ctx context.Context, targetProfile profiles.TargetProfile launchParameter := &dataflowpb.LaunchFlexTemplateParameter{ JobName: dataflowCfg.JobName, - Template: &dataflowpb.LaunchFlexTemplateParameter_ContainerSpecGcsPath{ContainerSpecGcsPath: "gs://dataflow-templates/latest/flex/Cloud_Datastream_to_Spanner"}, + Template: &dataflowpb.LaunchFlexTemplateParameter_ContainerSpecGcsPath{ContainerSpecGcsPath: "gs://deepchowdhury-gsql/images/datastream-to-spanner-transform-image-spec.json"}, Parameters: map[string]string{ "inputFilePattern": inputFilePattern, "streamName": fmt.Sprintf("projects/%s/locations/%s/streams/%s", project, datastreamCfg.StreamLocation, datastreamCfg.StreamId), "instanceId": instance, "databaseId": dbName, + "sessionFile": streamingCfg.TmpDir + "session.json", }, } @@ -313,7 +340,7 @@ func StartDatastream(ctx context.Context, sourceProfile profiles.SourceProfile, } func StartDataflow(ctx context.Context, sourceProfile profiles.SourceProfile, targetProfile profiles.TargetProfile, streamingCfg StreamingCfg) error { - err := LaunchDataflowJob(ctx, targetProfile, streamingCfg.DatastreamCfg, streamingCfg.DataflowCfg) + err := LaunchDataflowJob(ctx, targetProfile, streamingCfg) if err != nil { return fmt.Errorf("error launching dataflow: %v", err) } diff --git a/test_data/mysqldump.test.out b/test_data/mysqldump.test.out index 3a44f3296e..d35207a876 100644 --- a/test_data/mysqldump.test.out +++ b/test_data/mysqldump.test.out @@ -1,65 +1,3 @@ --- MySQL dump 10.17 Distrib 10.3.23-MariaDB, for debian-linux-gnu (x86_64) --- --- Host: 127.0.0.1 Database: cart --- ------------------------------------------------------ --- Server version 5.7.25-google-log - -/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; -/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; -/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; -/*!40101 SET NAMES utf8mb4 */; -/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; -/*!40103 SET TIME_ZONE='+00:00' */; -/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; -/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; -/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; -/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; - --- --- Table structure for table `cart` --- - -DROP TABLE IF EXISTS `cart`; -/*!40101 SET @saved_cs_client = @@character_set_client */; -/*!40101 SET character_set_client = utf8 */; -CREATE TABLE `cart` ( - `user_id` varchar(20) NOT NULL, - `product_id` varchar(20) NOT NULL, - `quantity` bigint(20) DEFAULT NULL, - `last_modified` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`user_id`,`product_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; -/*!40101 SET character_set_client = @saved_cs_client */; - -CREATE INDEX idx ON `cart` (`quantity`); - --- --- Dumping data for table `cart` --- - -LOCK TABLES `cart` WRITE; -/*!40000 ALTER TABLE `cart` DISABLE KEYS */; -INSERT INTO `cart` VALUES ('901e-a6cfc2b502dc','abc-123',1,'2020-07-20 05:10:26'),('901e-a6cfc2b502dc','axd-673',2,'2020-07-20 05:10:43'),('a86b-82493320a775','zxi-631',5,'2020-07-20 05:10:46'); -/*!40000 ALTER TABLE `cart` ENABLE KEYS */; -UNLOCK TABLES; - --- --- Table name `PRODUCTS` differs only case from the table `cart`. --- This was added to cover more cases in our integration tests. --- -DROP TABLE IF EXISTS `PRODUCTS`; -/*!40101 SET @saved_cs_client = @@character_set_client */; -/*!40101 SET character_set_client = utf8 */; -CREATE TABLE `PRODUCTS` ( - `usr_id` varchar(20) NOT NULL, - `prod_id` varchar(20) NOT NULL, - PRIMARY KEY (`usr_id`,`prod_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; -/*!40101 SET character_set_client = @saved_cs_client */; - --- --- Table structure for table `products` --- DROP TABLE IF EXISTS `products`; /*!40101 SET @saved_cs_client = @@character_set_client */; @@ -88,40 +26,3 @@ LOCK TABLES `products` WRITE; INSERT INTO `products` VALUES ('abc-123','Blue suede shoes',141.99,'2020-06-06'),('axd-673','Antique typewriter',99.99,'2020-06-07'),('zxi-631','Glass vase',55.50,'2020-06-10'); /*!40000 ALTER TABLE `products` ENABLE KEYS */; UNLOCK TABLES; - --- --- Table structure for table `customers` --- - -DROP TABLE IF EXISTS `customers`; -/*!40101 SET @saved_cs_client = @@character_set_client */; -/*!40101 SET character_set_client = utf8 */; -CREATE TABLE `customers` ( - `c_id` varchar(20) NOT NULL, - `customer_profile` json DEFAULT NULL, - PRIMARY KEY (`c_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; -/*!40101 SET character_set_client = @saved_cs_client */; - --- --- Dumping data for table `customers` --- - -LOCK TABLES `customers` WRITE; -/*!40000 ALTER TABLE `customers` DISABLE KEYS */; -INSERT INTO `customers` VALUES -('svd-124','{"first_name": "Lola", "last_name": "Dog", "location": "NYC", "online" : true, "friends" : 547}'), -('tel-595','{"first_name": "Ernie", "status": "Looking for treats", "location" : "Brooklyn"}'); -/*!40000 ALTER TABLE `customers` ENABLE KEYS */; -UNLOCK TABLES; -/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; - -/*!40101 SET SQL_MODE=@OLD_SQL_MODE */; -/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; -/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; -/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; -/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; -/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; -/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; - --- Dump completed on 2020-08-10 17:40:01 From 3bce69343c1795f6fe3b0288d8c868a68671d804 Mon Sep 17 00:00:00 2001 From: Deep1998 Date: Thu, 7 Jul 2022 15:32:27 +0530 Subject: [PATCH 08/15] Use STRING(50) type instead of INT64 for synthetic PK --- internal/convert.go | 2 +- internal/convert_test.go | 4 +- sources/mysql/data.go | 2 +- sources/mysql/data_test.go | 4 +- sources/mysql/infoschema_test.go | 6 +- sources/mysql/mysqldump_test.go | 56 +++++++-------- sources/oracle/data.go | 2 +- sources/oracle/data_test.go | 4 +- sources/postgres/data.go | 2 +- sources/postgres/data_test.go | 4 +- sources/postgres/infoschema.go | 2 +- sources/postgres/infoschema_test.go | 4 +- sources/postgres/pgdump_test.go | 102 ++++++++++++++-------------- sources/sqlserver/data.go | 2 +- sources/sqlserver/data_test.go | 4 +- 15 files changed, 100 insertions(+), 100 deletions(-) diff --git a/internal/convert.go b/internal/convert.go index 8f66667049..120be1bb54 100644 --- a/internal/convert.go +++ b/internal/convert.go @@ -309,7 +309,7 @@ func (conv *Conv) AddPrimaryKeys() { if !primaryKeyPopulated { k := conv.buildPrimaryKey(t) ct.ColNames = append(ct.ColNames, k) - ct.ColDefs[k] = ddl.ColumnDef{Name: k, T: ddl.Type{Name: ddl.Int64}} + ct.ColDefs[k] = ddl.ColumnDef{Name: k, T: ddl.Type{Name: ddl.String, Len: 50}} ct.Pks = []ddl.IndexKey{{Col: k}} conv.SyntheticPKeys[t] = SyntheticPKey{k, 0} } diff --git a/internal/convert_test.go b/internal/convert_test.go index 82832abd83..5ec5f77fa1 100644 --- a/internal/convert_test.go +++ b/internal/convert_test.go @@ -218,7 +218,7 @@ func TestAddPrimaryKeys(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "a": {Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": {Name: "b", T: ddl.Type{Name: ddl.Float64}}, - "synth_id": {Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": {Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{{Col: "synth_id"}}, Indexes: []ddl.CreateIndex{{Name: "", Table: "", Unique: false, Keys: []ddl.IndexKey{{Col: "b"}}}}, @@ -246,7 +246,7 @@ func TestAddPrimaryKeys(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "a": {Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": {Name: "b", T: ddl.Type{Name: ddl.Float64}}, - "synth_id": {Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": {Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{{Col: "synth_id"}}}, }, diff --git a/sources/mysql/data.go b/sources/mysql/data.go index 590e811900..33de4ee39f 100644 --- a/sources/mysql/data.go +++ b/sources/mysql/data.go @@ -84,7 +84,7 @@ func ConvertData(conv *internal.Conv, srcTable string, srcCols []string, srcSche } if aux, ok := conv.SyntheticPKeys[spTable]; ok { c = append(c, aux.Col) - v = append(v, int64(bits.Reverse64(uint64(aux.Sequence)))) + v = append(v, fmt.Sprintf("%d", int64(bits.Reverse64(uint64(aux.Sequence))))) aux.Sequence++ conv.SyntheticPKeys[spTable] = aux } diff --git a/sources/mysql/data_test.go b/sources/mysql/data_test.go index c48baae0ca..5e02712a2e 100644 --- a/sources/mysql/data_test.go +++ b/sources/mysql/data_test.go @@ -279,14 +279,14 @@ func TestConvertsyntheticPKey(t *testing.T) { cols: []string{"a", "b", "c"}, vals: []string{"6", "6.6", "true"}, ecols: []string{"a", "b", "c", "synth_id"}, - evals: []interface{}{int64(6), float64(6.6), true, int64(0)}, + evals: []interface{}{int64(6), float64(6.6), true, fmt.Sprintf("%d", 0)}, }, { name: "Sequence 1", cols: []string{"a"}, vals: []string{"7"}, ecols: []string{"a", "synth_id"}, - evals: []interface{}{int64(7), int64(bits.Reverse64(1))}, + evals: []interface{}{int64(7), fmt.Sprintf("%d", int64(bits.Reverse64(1)))}, }, } tableName := "testtable" diff --git a/sources/mysql/infoschema_test.go b/sources/mysql/infoschema_test.go index dc1e101140..9e21ce24eb 100644 --- a/sources/mysql/infoschema_test.go +++ b/sources/mysql/infoschema_test.go @@ -388,7 +388,7 @@ func TestProcessData_MultiCol(t *testing.T) { "a": ddl.ColumnDef{Name: "a", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}, NotNull: true}, "b": ddl.ColumnDef{Name: "b", T: ddl.Type{Name: ddl.Float64}}, "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}}, } @@ -404,8 +404,8 @@ func TestProcessData_MultiCol(t *testing.T) { }) common.ProcessData(conv, isi) assert.Equal(t, []spannerData{ - {table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"cat", float64(42.3), int64(0)}}, - {table: "test", cols: []string{"a", "c", "synth_id"}, vals: []interface{}{"dog", int64(22), int64(-9223372036854775808)}}}, + {table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"cat", float64(42.3), "0"}}, + {table: "test", cols: []string{"a", "c", "synth_id"}, vals: []interface{}{"dog", int64(22), "-9223372036854775808"}}}, rows) assert.Equal(t, int64(0), conv.Unexpecteds()) } diff --git a/sources/mysql/mysqldump_test.go b/sources/mysql/mysqldump_test.go index 1111e81685..69378506c9 100644 --- a/sources/mysql/mysqldump_test.go +++ b/sources/mysql/mysqldump_test.go @@ -125,7 +125,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { "productid": {Name: "productid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "userid": {Name: "userid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}, NotNull: true}, "quantity": {Name: "quantity", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": {Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": {Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{{Col: "synth_id"}}}}, }, @@ -175,7 +175,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": {Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": {Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": {Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": {Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{{Col: "synth_id"}}, Fks: []ddl.Foreignkey{{Name: "fk_test", Columns: []string{"d"}, ReferTable: "test", ReferColumns: []string{"a"}}}}}, @@ -209,7 +209,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "A_fk_test_2", Columns: []string{"d"}, ReferTable: "test", ReferColumns: []string{"a"}}, @@ -235,7 +235,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Columns: []string{"d"}, ReferTable: "test", ReferColumns: []string{"a"}}}}}, @@ -260,7 +260,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"d"}, ReferTable: "test", ReferColumns: []string{"a"}}}}}, @@ -294,7 +294,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { "e": ddl.ColumnDef{Name: "e", T: ddl.Type{Name: ddl.Int64}}, "f": ddl.ColumnDef{Name: "f", T: ddl.Type{Name: ddl.Int64}}, "g": ddl.ColumnDef{Name: "g", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"e"}, ReferTable: "test", ReferColumns: []string{"a"}}, @@ -322,7 +322,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { "e": ddl.ColumnDef{Name: "e", T: ddl.Type{Name: ddl.Int64}}, "f": ddl.ColumnDef{Name: "f", T: ddl.Type{Name: ddl.Int64}}, "g": ddl.ColumnDef{Name: "g", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"e", "f"}, ReferTable: "test", ReferColumns: []string{"a", "b"}}}}}, @@ -343,7 +343,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { "a": ddl.ColumnDef{Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": ddl.ColumnDef{Name: "b", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Indexes: []ddl.CreateIndex{ddl.CreateIndex{Name: "custom_index", Table: "test", Unique: false, Keys: []ddl.IndexKey{ddl.IndexKey{Col: "b", Desc: false}, ddl.IndexKey{Col: "c", Desc: false}}}}}}, @@ -405,7 +405,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { "a": ddl.ColumnDef{Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": ddl.ColumnDef{Name: "b", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Indexes: []ddl.CreateIndex{ddl.CreateIndex{Name: "custom_index", Table: "test", Unique: false, Keys: []ddl.IndexKey{ddl.IndexKey{Col: "b", Desc: false}, ddl.IndexKey{Col: "c", Desc: false}}}}}}, @@ -468,7 +468,7 @@ func TestProcessMySQLDump_MultiCol(t *testing.T) { "a": ddl.ColumnDef{Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": ddl.ColumnDef{Name: "b", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Indexes: []ddl.CreateIndex{ddl.CreateIndex{Name: "custom_index", Table: "test", Unique: false, Keys: []ddl.IndexKey{ddl.IndexKey{Col: "b", Desc: false}, ddl.IndexKey{Col: "c", Desc: false}}}}}}, @@ -625,10 +625,10 @@ CREATE TABLE test (a text PRIMARY KEY, b text);`, "('a33','b',9),\n" + "('a3','b',7);", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a1", "b1", int64(42), bitReverse(0)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a22", "b99", int64(6), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a33", "b", int64(9), bitReverse(2)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), bitReverse(3)}}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a1", "b1", int64(42), fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a22", "b99", int64(6), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a33", "b", int64(9), fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), fmt.Sprintf("%d", bitReverse(3))}}}, }, { name: "INSERT INTO with empty cols", @@ -639,10 +639,10 @@ CREATE TABLE test (a text PRIMARY KEY, b text);`, "('a33','b',NULL),\n" + "('a3','b',7);\n", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"b", "n", "synth_id"}, vals: []interface{}{"b1", int64(42), bitReverse(0)}}, - spannerData{table: "test", cols: []string{"a", "n", "synth_id"}, vals: []interface{}{"a22", int64(6), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"a33", "b", bitReverse(2)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), bitReverse(3)}}}, + spannerData{table: "test", cols: []string{"b", "n", "synth_id"}, vals: []interface{}{"b1", int64(42), fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"a", "n", "synth_id"}, vals: []interface{}{"a22", int64(6), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"a33", "b", fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), fmt.Sprintf("%d", bitReverse(3))}}}, }, { name: "INSERT", @@ -657,7 +657,7 @@ CREATE TABLE test (a text PRIMARY KEY, b text);`, input: "CREATE TABLE test (a text NOT NULL, b text NOT NULL, n bigint);\n" + "INSERT INTO test (a, b, n) VALUES ('a42', 'b6', 2);", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a42", "b6", int64(2), bitReverse(0)}}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a42", "b6", int64(2), fmt.Sprintf("%d", bitReverse(0))}}}, }, { name: "INSERT with spaces", @@ -809,14 +809,14 @@ func TestProcessMySQLDump_DataError(t *testing.T) { vals: []interface{}{int64(7), float64(42.1), true, getDate("2019-10-29"), []byte{0x89, 0x50}, []spanner.NullString{{StringVal: "42", Valid: true}, {StringVal: "6", Valid: true}}, - bitReverse(0)}}, - spannerData{table: "test", cols: []string{"a", "synth_id"}, vals: []interface{}{int64(7), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"b", "synth_id"}, vals: []interface{}{float64(42.1), bitReverse(2)}}, - spannerData{table: "test", cols: []string{"c", "synth_id"}, vals: []interface{}{true, bitReverse(3)}}, - spannerData{table: "test", cols: []string{"d", "synth_id"}, vals: []interface{}{getDate("2019-10-29"), bitReverse(4)}}, - spannerData{table: "test", cols: []string{"e", "synth_id"}, vals: []interface{}{[]byte{0x89, 0x50}, bitReverse(5)}}, + fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"a", "synth_id"}, vals: []interface{}{int64(7), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"b", "synth_id"}, vals: []interface{}{float64(42.1), fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"c", "synth_id"}, vals: []interface{}{true, fmt.Sprintf("%d", bitReverse(3))}}, + spannerData{table: "test", cols: []string{"d", "synth_id"}, vals: []interface{}{getDate("2019-10-29"), fmt.Sprintf("%d", bitReverse(4))}}, + spannerData{table: "test", cols: []string{"e", "synth_id"}, vals: []interface{}{[]byte{0x89, 0x50}, fmt.Sprintf("%d", bitReverse(5))}}, spannerData{table: "test", cols: []string{"f", "synth_id"}, - vals: []interface{}{[]spanner.NullString{{StringVal: "42", Valid: true}, {StringVal: "6", Valid: true}}, bitReverse(6)}}, + vals: []interface{}{[]spanner.NullString{{StringVal: "42", Valid: true}, {StringVal: "6", Valid: true}}, fmt.Sprintf("%d", bitReverse(6))}}, }, }, } @@ -876,7 +876,7 @@ func TestProcessMySQLDump_AddPrimaryKeys(t *testing.T) { "productid": ddl.ColumnDef{Name: "productid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "userid": ddl.ColumnDef{Name: "userid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "quantity": ddl.ColumnDef{Name: "quantity", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}}}, }, @@ -891,7 +891,7 @@ func TestProcessMySQLDump_AddPrimaryKeys(t *testing.T) { "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "synth_id0": ddl.ColumnDef{Name: "synth_id0", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "synth_id1": ddl.ColumnDef{Name: "synth_id1", T: ddl.Type{Name: ddl.Int64}}, - "synth_id2": ddl.ColumnDef{Name: "synth_id2", T: ddl.Type{Name: ddl.Int64}}, + "synth_id2": ddl.ColumnDef{Name: "synth_id2", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id2"}}}}, }, diff --git a/sources/oracle/data.go b/sources/oracle/data.go index ee251ba230..5c1dea44f7 100644 --- a/sources/oracle/data.go +++ b/sources/oracle/data.go @@ -80,7 +80,7 @@ func convertData(conv *internal.Conv, srcTable string, srcCols []string, srcSche } if aux, ok := conv.SyntheticPKeys[spTable]; ok { c = append(c, aux.Col) - v = append(v, int64(bits.Reverse64(uint64(aux.Sequence)))) + v = append(v, fmt.Sprintf("%d", int64(bits.Reverse64(uint64(aux.Sequence))))) aux.Sequence++ conv.SyntheticPKeys[spTable] = aux } diff --git a/sources/oracle/data_test.go b/sources/oracle/data_test.go index 67390137a6..0e37ed04a6 100644 --- a/sources/oracle/data_test.go +++ b/sources/oracle/data_test.go @@ -132,14 +132,14 @@ func TestConvertsyntheticPKey(t *testing.T) { cols: []string{"a", "b", "c"}, vals: []string{"6", "6.6", "t"}, ecols: []string{"a", "b", "c", "synth_id"}, - evals: []interface{}{int64(6), float64(6.6), "t", int64(0)}, + evals: []interface{}{int64(6), float64(6.6), "t", fmt.Sprintf("%d", 0)}, }, { name: "Sequence 1", cols: []string{"a"}, vals: []string{"7"}, ecols: []string{"a", "synth_id"}, - evals: []interface{}{int64(7), int64(bits.Reverse64(1))}, + evals: []interface{}{int64(7), fmt.Sprintf("%d", int64(bits.Reverse64(1)))}, }, } tableName := "testtable" diff --git a/sources/postgres/data.go b/sources/postgres/data.go index a8392816f7..a64a3f5bbf 100644 --- a/sources/postgres/data.go +++ b/sources/postgres/data.go @@ -102,7 +102,7 @@ func ConvertData(conv *internal.Conv, srcTable string, srcCols []string, vals [] } if aux, ok := conv.SyntheticPKeys[spTable]; ok { c = append(c, aux.Col) - v = append(v, int64(bits.Reverse64(uint64(aux.Sequence)))) + v = append(v, fmt.Sprintf("%d", int64(bits.Reverse64(uint64(aux.Sequence))))) aux.Sequence++ conv.SyntheticPKeys[spTable] = aux } diff --git a/sources/postgres/data_test.go b/sources/postgres/data_test.go index 89d567373b..9b9718d4ef 100644 --- a/sources/postgres/data_test.go +++ b/sources/postgres/data_test.go @@ -258,14 +258,14 @@ func TestConvertData(t *testing.T) { cols: []string{"a", "b", "c"}, vals: []string{"6", "6.6", "true"}, ecols: []string{"a", "b", "c", "synth_id"}, - evals: []interface{}{int64(6), float64(6.6), true, int64(0)}, + evals: []interface{}{int64(6), float64(6.6), true, "0"}, }, { name: "Sequence 1", cols: []string{"a"}, vals: []string{"7"}, ecols: []string{"a", "synth_id"}, - evals: []interface{}{int64(7), int64(bits.Reverse64(1))}, + evals: []interface{}{int64(7), fmt.Sprintf("%d", int64(bits.Reverse64(1)))}, }, } conv := buildConv(spTable, srcTable) diff --git a/sources/postgres/infoschema.go b/sources/postgres/infoschema.go index bfbd6d6ebf..b1b267c611 100644 --- a/sources/postgres/infoschema.go +++ b/sources/postgres/infoschema.go @@ -156,7 +156,7 @@ func convertSQLRow(conv *internal.Conv, srcTable string, srcCols []string, srcSc } if aux, ok := conv.SyntheticPKeys[spTable]; ok { cs = append(cs, aux.Col) - vs = append(vs, int64(bits.Reverse64(uint64(aux.Sequence)))) + vs = append(vs, fmt.Sprintf("%d", int64(bits.Reverse64(uint64(aux.Sequence))))) aux.Sequence++ conv.SyntheticPKeys[spTable] = aux } diff --git a/sources/postgres/infoschema_test.go b/sources/postgres/infoschema_test.go index fe61a5c7ea..c1424d2caf 100644 --- a/sources/postgres/infoschema_test.go +++ b/sources/postgres/infoschema_test.go @@ -461,8 +461,8 @@ func TestConvertSqlRow_MultiCol(t *testing.T) { }) common.ProcessData(conv, InfoSchemaImpl{db}) assert.Equal(t, []spannerData{ - {table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"cat", float64(42.3), int64(0)}}, - {table: "test", cols: []string{"a", "c", "synth_id"}, vals: []interface{}{"dog", int64(22), int64(-9223372036854775808)}}}, + {table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"cat", float64(42.3), "0"}}, + {table: "test", cols: []string{"a", "c", "synth_id"}, vals: []interface{}{"dog", int64(22), "-9223372036854775808"}}}, rows) assert.Equal(t, int64(0), conv.Unexpecteds()) } diff --git a/sources/postgres/pgdump_test.go b/sources/postgres/pgdump_test.go index 38ec38b382..f0aceb49e8 100644 --- a/sources/postgres/pgdump_test.go +++ b/sources/postgres/pgdump_test.go @@ -131,7 +131,7 @@ func TestProcessPgDump(t *testing.T) { "productid": ddl.ColumnDef{Name: "productid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "userid": ddl.ColumnDef{Name: "userid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}, NotNull: true}, "quantity": ddl.ColumnDef{Name: "quantity", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}}}, }, @@ -181,7 +181,7 @@ func TestProcessPgDump(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"d"}, ReferTable: "test", ReferColumns: []string{"a"}}}, @@ -207,7 +207,7 @@ func TestProcessPgDump(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"d"}, ReferTable: "test", ReferColumns: []string{"a"}}}, @@ -234,7 +234,7 @@ func TestProcessPgDump(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"c", "d"}, ReferTable: "test", ReferColumns: []string{"a", "b"}}}, @@ -271,7 +271,7 @@ func TestProcessPgDump(t *testing.T) { "e": ddl.ColumnDef{Name: "e", T: ddl.Type{Name: ddl.Int64}}, "f": ddl.ColumnDef{Name: "f", T: ddl.Type{Name: ddl.Int64}}, "g": ddl.ColumnDef{Name: "g", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"e"}, ReferTable: "test", ReferColumns: []string{"a"}}, @@ -293,7 +293,7 @@ func TestProcessPgDump(t *testing.T) { "a": ddl.ColumnDef{Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": ddl.ColumnDef{Name: "b", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Indexes: []ddl.CreateIndex{ddl.CreateIndex{Name: "custom_index", Table: "test", Unique: false, Keys: []ddl.IndexKey{ddl.IndexKey{Col: "b", Desc: false}, ddl.IndexKey{Col: "c", Desc: false}}}}}}, @@ -314,7 +314,7 @@ func TestProcessPgDump(t *testing.T) { "a": ddl.ColumnDef{Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": ddl.ColumnDef{Name: "b", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Indexes: []ddl.CreateIndex{ddl.CreateIndex{Name: "custom_index", Table: "test", Unique: false, Keys: []ddl.IndexKey{ddl.IndexKey{Col: "b", Desc: true}, ddl.IndexKey{Col: "c", Desc: false}}}}}}, @@ -487,10 +487,10 @@ func TestProcessPgDump(t *testing.T) { "a3 b 7\n" + "\\.\n", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a1", "b1", int64(42), bitReverse(0)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a22", "b99", int64(6), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a33", "b", int64(9), bitReverse(2)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), bitReverse(3)}}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a1", "b1", int64(42), fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a22", "b99", int64(6), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a33", "b", int64(9), fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), fmt.Sprintf("%d", bitReverse(3))}}}, }, { name: "COPY FROM with empty cols", @@ -502,10 +502,10 @@ func TestProcessPgDump(t *testing.T) { "a3 b 7\n" + "\\.\n", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"b", "n", "synth_id"}, vals: []interface{}{"b1", int64(42), bitReverse(0)}}, - spannerData{table: "test", cols: []string{"a", "n", "synth_id"}, vals: []interface{}{"a22", int64(6), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"a33", "b", bitReverse(2)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), bitReverse(3)}}}, + spannerData{table: "test", cols: []string{"b", "n", "synth_id"}, vals: []interface{}{"b1", int64(42), fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"a", "n", "synth_id"}, vals: []interface{}{"a22", int64(6), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"a33", "b", fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), fmt.Sprintf("%d", bitReverse(3))}}}, }, { name: "INSERT", @@ -538,7 +538,7 @@ func TestProcessPgDump(t *testing.T) { input: "CREATE TABLE test (a text NOT NULL, b text NOT NULL, n bigint);\n" + "INSERT INTO test (a, b, n) VALUES ('a42', 'b6', 2);", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a42", "b6", int64(2), bitReverse(0)}}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a42", "b6", int64(2), fmt.Sprintf("%d", bitReverse(0))}}}, }, { name: "INSERT with spaces", @@ -690,15 +690,15 @@ COPY test (id, a, b, c, d, e, f, g) FROM stdin; vals: []interface{}{int64(7), float64(42.1), true, getTime(t, "2019-10-29T05:30:00Z"), getDate("2019-10-29"), []byte{0x0, 0x1, 0xbe, 0xef}, []spanner.NullInt64{{Int64: 42, Valid: true}, {Int64: 6, Valid: true}}, - bitReverse(0)}}, - spannerData{table: "test", cols: []string{"int8", "synth_id"}, vals: []interface{}{int64(7), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"float8", "synth_id"}, vals: []interface{}{float64(42.1), bitReverse(2)}}, - spannerData{table: "test", cols: []string{"bool", "synth_id"}, vals: []interface{}{true, bitReverse(3)}}, - spannerData{table: "test", cols: []string{"timestamp", "synth_id"}, vals: []interface{}{getTime(t, "2019-10-29T05:30:00Z"), bitReverse(4)}}, - spannerData{table: "test", cols: []string{"date", "synth_id"}, vals: []interface{}{getDate("2019-10-29"), bitReverse(5)}}, - spannerData{table: "test", cols: []string{"bytea", "synth_id"}, vals: []interface{}{[]byte{0x0, 0x1, 0xbe, 0xef}, bitReverse(6)}}, + fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"int8", "synth_id"}, vals: []interface{}{int64(7), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"float8", "synth_id"}, vals: []interface{}{float64(42.1), fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"bool", "synth_id"}, vals: []interface{}{true, fmt.Sprintf("%d", bitReverse(3))}}, + spannerData{table: "test", cols: []string{"timestamp", "synth_id"}, vals: []interface{}{getTime(t, "2019-10-29T05:30:00Z"), fmt.Sprintf("%d", bitReverse(4))}}, + spannerData{table: "test", cols: []string{"date", "synth_id"}, vals: []interface{}{getDate("2019-10-29"), fmt.Sprintf("%d", bitReverse(5))}}, + spannerData{table: "test", cols: []string{"bytea", "synth_id"}, vals: []interface{}{[]byte{0x0, 0x1, 0xbe, 0xef}, fmt.Sprintf("%d", bitReverse(6))}}, spannerData{table: "test", cols: []string{"arr", "synth_id"}, - vals: []interface{}{[]spanner.NullInt64{{Int64: 42, Valid: true}, {Int64: 6, Valid: true}}, bitReverse(7)}}, + vals: []interface{}{[]spanner.NullInt64{{Int64: 42, Valid: true}, {Int64: 6, Valid: true}}, fmt.Sprintf("%d", bitReverse(7))}}, }, }, } @@ -799,7 +799,7 @@ func TestProcessPgDumpPGTarget(t *testing.T) { "productid": ddl.ColumnDef{Name: "productid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "userid": ddl.ColumnDef{Name: "userid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}, NotNull: true}, "quantity": ddl.ColumnDef{Name: "quantity", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}}}, }, @@ -849,7 +849,7 @@ func TestProcessPgDumpPGTarget(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"d"}, ReferTable: "test", ReferColumns: []string{"a"}}}, @@ -875,7 +875,7 @@ func TestProcessPgDumpPGTarget(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"d"}, ReferTable: "test", ReferColumns: []string{"a"}}}, @@ -902,7 +902,7 @@ func TestProcessPgDumpPGTarget(t *testing.T) { ColDefs: map[string]ddl.ColumnDef{ "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.Int64}}, "d": ddl.ColumnDef{Name: "d", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"c", "d"}, ReferTable: "test", ReferColumns: []string{"a", "b"}}}, @@ -939,7 +939,7 @@ func TestProcessPgDumpPGTarget(t *testing.T) { "e": ddl.ColumnDef{Name: "e", T: ddl.Type{Name: ddl.Int64}}, "f": ddl.ColumnDef{Name: "f", T: ddl.Type{Name: ddl.Int64}}, "g": ddl.ColumnDef{Name: "g", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Fks: []ddl.Foreignkey{ddl.Foreignkey{Name: "fk_test", Columns: []string{"e"}, ReferTable: "test", ReferColumns: []string{"a"}}, @@ -961,7 +961,7 @@ func TestProcessPgDumpPGTarget(t *testing.T) { "a": ddl.ColumnDef{Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": ddl.ColumnDef{Name: "b", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Indexes: []ddl.CreateIndex{ddl.CreateIndex{Name: "custom_index", Table: "test", Unique: false, Keys: []ddl.IndexKey{ddl.IndexKey{Col: "b", Desc: false}, ddl.IndexKey{Col: "c", Desc: false}}}}}}, @@ -982,7 +982,7 @@ func TestProcessPgDumpPGTarget(t *testing.T) { "a": ddl.ColumnDef{Name: "a", T: ddl.Type{Name: ddl.Int64}}, "b": ddl.ColumnDef{Name: "b", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "c": ddl.ColumnDef{Name: "c", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}, Indexes: []ddl.CreateIndex{ddl.CreateIndex{Name: "custom_index", Table: "test", Unique: false, Keys: []ddl.IndexKey{ddl.IndexKey{Col: "b", Desc: true}, ddl.IndexKey{Col: "c", Desc: false}}}}}}, @@ -1155,10 +1155,10 @@ func TestProcessPgDumpPGTarget(t *testing.T) { "a3 b 7\n" + "\\.\n", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a1", "b1", int64(42), bitReverse(0)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a22", "b99", int64(6), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a33", "b", int64(9), bitReverse(2)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), bitReverse(3)}}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a1", "b1", int64(42), fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a22", "b99", int64(6), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a33", "b", int64(9), fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), fmt.Sprintf("%d", bitReverse(3))}}}, }, { name: "COPY FROM with empty cols", @@ -1170,10 +1170,10 @@ func TestProcessPgDumpPGTarget(t *testing.T) { "a3 b 7\n" + "\\.\n", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"b", "n", "synth_id"}, vals: []interface{}{"b1", int64(42), bitReverse(0)}}, - spannerData{table: "test", cols: []string{"a", "n", "synth_id"}, vals: []interface{}{"a22", int64(6), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"a33", "b", bitReverse(2)}}, - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), bitReverse(3)}}}, + spannerData{table: "test", cols: []string{"b", "n", "synth_id"}, vals: []interface{}{"b1", int64(42), fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"a", "n", "synth_id"}, vals: []interface{}{"a22", int64(6), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"a", "b", "synth_id"}, vals: []interface{}{"a33", "b", fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a3", "b", int64(7), fmt.Sprintf("%d", bitReverse(3))}}}, }, { name: "INSERT", @@ -1206,7 +1206,7 @@ func TestProcessPgDumpPGTarget(t *testing.T) { input: "CREATE TABLE test (a text NOT NULL, b text NOT NULL, n bigint);\n" + "INSERT INTO test (a, b, n) VALUES ('a42', 'b6', 2);", expectedData: []spannerData{ - spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a42", "b6", int64(2), bitReverse(0)}}}, + spannerData{table: "test", cols: []string{"a", "b", "n", "synth_id"}, vals: []interface{}{"a42", "b6", int64(2), fmt.Sprintf("%d", bitReverse(0))}}}, }, { name: "INSERT with spaces", @@ -1358,15 +1358,15 @@ COPY test (id, a, b, c, d, e) FROM stdin; vals: []interface{}{int64(7), float64(42.1), true, getTime(t, "2019-10-29T05:30:00Z"), getDate("2019-10-29"), []byte{0x0, 0x1, 0xbe, 0xef}, "{42,6}", - bitReverse(0)}}, - spannerData{table: "test", cols: []string{"int8", "synth_id"}, vals: []interface{}{int64(7), bitReverse(1)}}, - spannerData{table: "test", cols: []string{"float8", "synth_id"}, vals: []interface{}{float64(42.1), bitReverse(2)}}, - spannerData{table: "test", cols: []string{"bool", "synth_id"}, vals: []interface{}{true, bitReverse(3)}}, - spannerData{table: "test", cols: []string{"timestamp", "synth_id"}, vals: []interface{}{getTime(t, "2019-10-29T05:30:00Z"), bitReverse(4)}}, - spannerData{table: "test", cols: []string{"date", "synth_id"}, vals: []interface{}{getDate("2019-10-29"), bitReverse(5)}}, - spannerData{table: "test", cols: []string{"bytea", "synth_id"}, vals: []interface{}{[]byte{0x0, 0x1, 0xbe, 0xef}, bitReverse(6)}}, - spannerData{table: "test", cols: []string{"arr", "synth_id"}, vals: []interface{}{"{42,6}", bitReverse(7)}}, - spannerData{table: "test", cols: []string{"arr", "synth_id"}, vals: []interface{}{"{42, 6}", bitReverse(8)}}, + fmt.Sprintf("%d", bitReverse(0))}}, + spannerData{table: "test", cols: []string{"int8", "synth_id"}, vals: []interface{}{int64(7), fmt.Sprintf("%d", bitReverse(1))}}, + spannerData{table: "test", cols: []string{"float8", "synth_id"}, vals: []interface{}{float64(42.1), fmt.Sprintf("%d", bitReverse(2))}}, + spannerData{table: "test", cols: []string{"bool", "synth_id"}, vals: []interface{}{true, fmt.Sprintf("%d", bitReverse(3))}}, + spannerData{table: "test", cols: []string{"timestamp", "synth_id"}, vals: []interface{}{getTime(t, "2019-10-29T05:30:00Z"), fmt.Sprintf("%d", bitReverse(4))}}, + spannerData{table: "test", cols: []string{"date", "synth_id"}, vals: []interface{}{getDate("2019-10-29"), fmt.Sprintf("%d", bitReverse(5))}}, + spannerData{table: "test", cols: []string{"bytea", "synth_id"}, vals: []interface{}{[]byte{0x0, 0x1, 0xbe, 0xef}, fmt.Sprintf("%d", bitReverse(6))}}, + spannerData{table: "test", cols: []string{"arr", "synth_id"}, vals: []interface{}{"{42,6}", fmt.Sprintf("%d", bitReverse(7))}}, + spannerData{table: "test", cols: []string{"arr", "synth_id"}, vals: []interface{}{"{42, 6}", fmt.Sprintf("%d", bitReverse(8))}}, }, }, } @@ -1441,7 +1441,7 @@ func TestProcessPgDump_AddPrimaryKeys(t *testing.T) { "productid": ddl.ColumnDef{Name: "productid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "userid": ddl.ColumnDef{Name: "userid", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "quantity": ddl.ColumnDef{Name: "quantity", T: ddl.Type{Name: ddl.Int64}}, - "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.Int64}}, + "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id"}}}}, }, @@ -1456,7 +1456,7 @@ func TestProcessPgDump_AddPrimaryKeys(t *testing.T) { "synth_id": ddl.ColumnDef{Name: "synth_id", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "synth_id0": ddl.ColumnDef{Name: "synth_id0", T: ddl.Type{Name: ddl.String, Len: ddl.MaxLength}}, "synth_id1": ddl.ColumnDef{Name: "synth_id1", T: ddl.Type{Name: ddl.Int64}}, - "synth_id2": ddl.ColumnDef{Name: "synth_id2", T: ddl.Type{Name: ddl.Int64}}, + "synth_id2": ddl.ColumnDef{Name: "synth_id2", T: ddl.Type{Name: ddl.String, Len: 50}}, }, Pks: []ddl.IndexKey{ddl.IndexKey{Col: "synth_id2"}}}}, }, diff --git a/sources/sqlserver/data.go b/sources/sqlserver/data.go index 2a4fe3b97e..fd4963d58f 100644 --- a/sources/sqlserver/data.go +++ b/sources/sqlserver/data.go @@ -77,7 +77,7 @@ func ConvertData(conv *internal.Conv, srcTable string, srcCols []string, srcSche } if aux, ok := conv.SyntheticPKeys[spTable]; ok { c = append(c, aux.Col) - v = append(v, int64(bits.Reverse64(uint64(aux.Sequence)))) + v = append(v, fmt.Sprintf("%d", int64(bits.Reverse64(uint64(aux.Sequence))))) aux.Sequence++ conv.SyntheticPKeys[spTable] = aux } diff --git a/sources/sqlserver/data_test.go b/sources/sqlserver/data_test.go index 4e43c9be4f..9cfb351b43 100644 --- a/sources/sqlserver/data_test.go +++ b/sources/sqlserver/data_test.go @@ -237,14 +237,14 @@ func TestConvertsyntheticPKey(t *testing.T) { cols: []string{"a", "b", "c"}, vals: []string{"6", "6.6", "true"}, ecols: []string{"a", "b", "c", "synth_id"}, - evals: []interface{}{int64(6), float64(6.6), true, int64(0)}, + evals: []interface{}{int64(6), float64(6.6), true, fmt.Sprintf("%d", 0)}, }, { name: "Sequence 1", cols: []string{"a"}, vals: []string{"7"}, ecols: []string{"a", "synth_id"}, - evals: []interface{}{int64(7), int64(bits.Reverse64(1))}, + evals: []interface{}{int64(7), fmt.Sprintf("%d", int64(bits.Reverse64(1)))}, }, } tableName := "testtable" From c1e31822e44699043d68b141cc2d3ce862dae5fb Mon Sep 17 00:00:00 2001 From: Shreya Khajanchi Date: Mon, 11 Jul 2022 10:42:04 +0530 Subject: [PATCH 09/15] data migration integrated with UI --- .../prepare-migration.component.html | 110 ++++++++++-------- .../prepare-migration.component.scss | 23 ++-- .../prepare-migration.component.ts | 26 ++++- .../target-details-form.component.ts | 1 - ui/src/app/model/conv.ts | 9 ++ ui/src/app/services/fetch/fetch.service.ts | 8 +- webv2/routes.go | 2 + webv2/session/types.go | 27 +++-- webv2/web.go | 91 +++++++++++++-- 9 files changed, 216 insertions(+), 81 deletions(-) diff --git a/ui/src/app/components/prepare-migration/prepare-migration.component.html b/ui/src/app/components/prepare-migration/prepare-migration.component.html index 773622d6af..bbc39225f0 100644 --- a/ui/src/app/components/prepare-migration/prepare-migration.component.html +++ b/ui/src/app/components/prepare-migration/prepare-migration.component.html @@ -1,59 +1,71 @@ -
-
+
+
+

Source and destination Database definitions

- - - - Database engine type - - - - - - - - - - - -
TitleDatabase versionConnection detailsSourceDestination
+ + + + + + + + + + + + + + + +
Title + {{ element.title }} + Source{{ element.source }}Destination{{ element.target }}
-
-
-
+
+
+
- Prerequisites - Before we begin, please ensure you have done the following: -
-
-

- 1 - Connect Database via Datastream -

-

- 2 - Setup Target details -

-
+ Prerequisites + Before we begin, please ensure you have done the following: +
+
+

+ 1 + Set datastream connection profiles +

+

+ 2 + Setup Target details +

+

-
-
+
+

Target Details

- Target Database:{{targetDetails.TargetDB}}
- Spanner Dialect:{{targetDetails.Dialect}}
- Streaming Config:{{targetDetails.StreamingConfig}}
+ Target Database:{{ targetDetails.TargetDB }}
+ Spanner Dialect:{{ targetDetails.Dialect }}
+ Streaming Config:{{ targetDetails.StreamingConfig }}
+
+
- \ No newline at end of file diff --git a/ui/src/app/components/prepare-migration/prepare-migration.component.scss b/ui/src/app/components/prepare-migration/prepare-migration.component.scss index 1be6d47489..6e2e59969b 100644 --- a/ui/src/app/components/prepare-migration/prepare-migration.component.scss +++ b/ui/src/app/components/prepare-migration/prepare-migration.component.scss @@ -25,6 +25,7 @@ .breadcrumb_workspace { font-weight: 400; font-size: 14px; + color: rgba(0, 0, 0, 0.56); } .breadcrumb_prepare_migration { font-weight: 400; @@ -37,7 +38,7 @@ } } .prerequisites { - padding: 0 20px; + padding: 0; width: 600px; .mat-card-title { font-size: 13pt; @@ -50,12 +51,12 @@ p { color: #5c5c5c; .bullet { - position: absolute; + position: relative; display: inline-block; background-color: #e5e5e5; padding: 0px 5px 10px 5px; margin: 0; - border-radius: 10px; + border-radius: 12px; font-size: 9px; height: 9px; width: 9px; @@ -75,16 +76,24 @@ .definition-container { max-height: 500px; - margin-left: 20px; overflow: auto; th { font-size: 13px; } } - + .body { + margin-left: 20px; + } table { - width: 30%; + min-width: 30%; + max-width: 50%; + th { + width: 10%; + } } - .link-test { + .configure { color: #1967d2; + } + .migrate { + margin-top: 10px; } \ No newline at end of file diff --git a/ui/src/app/components/prepare-migration/prepare-migration.component.ts b/ui/src/app/components/prepare-migration/prepare-migration.component.ts index babf9c6261..00335c010e 100644 --- a/ui/src/app/components/prepare-migration/prepare-migration.component.ts +++ b/ui/src/app/components/prepare-migration/prepare-migration.component.ts @@ -5,6 +5,7 @@ import { TargetDetailsService } from 'src/app/services/target-details/target-det import { FetchService } from 'src/app/services/fetch/fetch.service' import { SnackbarService } from 'src/app/services/snackbar/snackbar.service' import ITargetDetails from 'src/app/model/target-details' +import { ISourceDestinationDetails } from 'src/app/model/conv' @Component({ selector: 'app-prepare-migration', templateUrl: './prepare-migration.component.html', @@ -12,6 +13,7 @@ import ITargetDetails from 'src/app/model/target-details' }) export class PrepareMigrationComponent implements OnInit { displayedColumns = ['Title', 'Source', 'Destination'] + dataSource : any =[] constructor( private dialog: MatDialog, private fetch: FetchService, @@ -20,9 +22,26 @@ export class PrepareMigrationComponent implements OnInit { ) {} isTargetDetailSet: boolean = false; + isStreamingCfgSet: boolean = false; targetDetails: ITargetDetails = this.targetDetailService.getTargetDetails() - ngOnInit(): void {} + ngOnInit(): void { + this.fetch.getSourceDestinationSummary().subscribe({ + next: (res: ISourceDestinationDetails) => { + this.dataSource = [ + {title: 'Database driver', source:res.DatabaseType, target:'Spanner'}, + {title: 'Number of tables', source:res.SourceTableCount, target: res.SpannerTableCount}, + {title: 'Number of indexes', source:res.SourceIndexCount, target: res.SpannerIndexCount}, + ]; + console.log(this.dataSource) + }, + error: (err: any) => { + console.log(err.error) + // this.snackbar.openSnackBar(err.error, 'Close') + }, + }) + + } openTargetDetailsForm() { let dialogRef = this.dialog.open(TargetDetailsFormComponent, { width: '30vw', @@ -33,6 +52,9 @@ export class PrepareMigrationComponent implements OnInit { if (this.targetDetails.TargetDB != '') { this.isTargetDetailSet = true; } + if (this.targetDetails.StreamingConfig != '') { + this.isStreamingCfgSet = true; + } }); console.log(this.targetDetailService.getTargetDetails()) } @@ -43,7 +65,7 @@ export class PrepareMigrationComponent implements OnInit { this.snack.openSnackBar('Migration completed successfully', 'Close', 5) }, error: (err: any) => { - this.snack.openSnackBar(err.message, 'Close') + this.snack.openSnackBar(err.error, 'Close') }, }) } diff --git a/ui/src/app/components/target-details-form/target-details-form.component.ts b/ui/src/app/components/target-details-form/target-details-form.component.ts index 516d14f4aa..ec9a7dcc5d 100644 --- a/ui/src/app/components/target-details-form/target-details-form.component.ts +++ b/ui/src/app/components/target-details-form/target-details-form.component.ts @@ -18,7 +18,6 @@ export class TargetDetailsFormComponent implements OnInit { ) { this.targetDetailsForm = this.fb.group({ targetDb: ['', Validators.required], - streamingConfig: ['', Validators.required], dialect: ['',Validators.required], }) } diff --git a/ui/src/app/model/conv.ts b/ui/src/app/model/conv.ts index 4c735cfede..a15c7cd69c 100644 --- a/ui/src/app/model/conv.ts +++ b/ui/src/app/model/conv.ts @@ -156,3 +156,12 @@ export interface IPkColumnDefs { Desc: boolean Order: number } + +export interface ISourceDestinationDetails { + DatabaseType: string + ConnectionDetail: string + SourceTableCount: number + SpannerTableCount: number + SourceIndexCount: number + SpannerIndexCount: number +} \ No newline at end of file diff --git a/ui/src/app/services/fetch/fetch.service.ts b/ui/src/app/services/fetch/fetch.service.ts index 42081de794..dc319804f1 100644 --- a/ui/src/app/services/fetch/fetch.service.ts +++ b/ui/src/app/services/fetch/fetch.service.ts @@ -3,7 +3,7 @@ import { Injectable } from '@angular/core' import IDbConfig from 'src/app/model/db-config' import ISession, { ISaveSessionPayload } from '../../model/session' import IUpdateTable from '../../model/update-table' -import IConv, { ICreateIndex, IInterleaveStatus, IPrimaryKey } from '../../model/conv' +import IConv, { ICreateIndex, IInterleaveStatus, IPrimaryKey, ISourceDestinationDetails } from '../../model/conv' import IDumpConfig from '../../model/dump-config' import ISessionConfig from '../../model/session-config' import ISpannerConfig from '../../model/spanner-config' @@ -86,6 +86,10 @@ export class FetchService { }) } + getSourceDestinationSummary() { + return this.http.get(`${this.url}/GetSourceDestinationSummary`) + } + resumeSession(versionId: string) { return this.http.post(`${this.url}/ResumeSession/${versionId}`, {}) } @@ -103,7 +107,7 @@ export class FetchService { } migrate(payload: ITargetDetails) { - return this.http.post(`${this.url}/Migrate`,payload) + return this.http.post(`${this.url}/Migrate`,payload) } InitiateSession() { diff --git a/webv2/routes.go b/webv2/routes.go index 3849f01b40..44e3895947 100644 --- a/webv2/routes.go +++ b/webv2/routes.go @@ -70,5 +70,7 @@ func getRoutes() *mux.Router { // Run migration router.HandleFunc("/Migrate", migrate).Methods("POST") + router.HandleFunc("/GetSourceDestinationSummary", getSourceDestinationSummary).Methods("GET") + return router } diff --git a/webv2/session/types.go b/webv2/session/types.go index c9106e5417..19468116b2 100644 --- a/webv2/session/types.go +++ b/webv2/session/types.go @@ -25,6 +25,14 @@ type SessionMetadata struct { Tags []string } +type SourceDBConnDetails struct { + Host string + Port string + User string + Password string + Path string +} + type ConvWithMetadata struct { SessionMetadata internal.Conv @@ -32,15 +40,16 @@ type ConvWithMetadata struct { // SessionState stores information for the current migration session. type SessionState struct { - SourceDB *sql.DB // Connection to source database in case of direct connection - DbName string // Name of source database - Driver string // Name of HarbourBridge driver in use - Conv *internal.Conv // Current conversion state - SessionFile string // Path to session file - IsOffline bool // True if the connection to remote metadata database is invalid - GCPProjectID string - SpannerInstanceID string - SessionMetadata SessionMetadata + SourceDB *sql.DB // Connection to source database in case of direct connection + SourceDBConnDetails SourceDBConnDetails // Connection details for source database + DbName string // Name of source database + Driver string // Name of HarbourBridge driver in use + Conv *internal.Conv // Current conversion state + SessionFile string // Path to session file + IsOffline bool // True if the connection to remote metadata database is invalid + GCPProjectID string + SpannerInstanceID string + SessionMetadata SessionMetadata Counter } diff --git a/webv2/web.go b/webv2/web.go index ac1b424195..37eae916d5 100644 --- a/webv2/web.go +++ b/webv2/web.go @@ -92,7 +92,8 @@ type driverConfig struct { } type targetDetails struct { - TargetDB string `json:TargetDB` + TargetDB string `json:TargetDB` + StreamingCfg string `json:StreamingCfg` } // databaseConnection creates connection with database when using @@ -145,6 +146,12 @@ func databaseConnection(w http.ResponseWriter, r *http.Request) { } sessionState.Driver = config.Driver sessionState.SessionFile = "" + sessionState.SourceDBConnDetails = session.SourceDBConnDetails{ + Host: config.Host, + Port: config.Port, + User: config.User, + Password: config.Password, + } w.WriteHeader(http.StatusOK) } @@ -256,6 +263,9 @@ func convertSchemaDump(w http.ResponseWriter, r *http.Request) { sessionState.DbName = "" sessionState.SessionFile = "" sessionState.SourceDB = nil + sessionState.SourceDBConnDetails = session.SourceDBConnDetails{ + Path: dc.FilePath, + } convm := session.ConvWithMetadata{ SessionMetadata: sessionMetadata, @@ -311,6 +321,9 @@ func loadSession(w http.ResponseWriter, r *http.Request) { sessionState.SessionMetadata = sessionMetadata sessionState.Driver = s.Driver sessionState.SessionFile = s.FilePath + sessionState.SourceDBConnDetails = session.SourceDBConnDetails{ + Path: s.FilePath, + } convm := session.ConvWithMetadata{ SessionMetadata: sessionMetadata, @@ -956,6 +969,26 @@ func addIndexes(w http.ResponseWriter, r *http.Request) { json.NewEncoder(w).Encode(convm) } +func getSourceDestinationSummary(w http.ResponseWriter, r *http.Request) { + sessionState := session.GetSessionState() + var sourceDestinationDetails sourceDestinationDetails + sourceDestinationDetails.DatabaseType = sessionState.Driver + sourceDestinationDetails.SourceTableCount = len(sessionState.Conv.SrcSchema) + sourceDestinationDetails.SpannerTableCount = len(sessionState.Conv.SpSchema) + + sourceIndexCount, spannerIndexCount := 0, 0 + for _, spannerSchema := range sessionState.Conv.SpSchema { + spannerIndexCount = spannerIndexCount + len(spannerSchema.Indexes) + } + for _, sourceSchema := range sessionState.Conv.SrcSchema { + sourceIndexCount = sourceIndexCount + len(sourceSchema.Indexes) + } + sourceDestinationDetails.SourceIndexCount = sourceIndexCount + sourceDestinationDetails.SpannerIndexCount = spannerIndexCount + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(sourceDestinationDetails) +} + func migrate(w http.ResponseWriter, r *http.Request) { log.Println("request started", "method", r.Method, "path", r.URL.Path) @@ -978,40 +1011,67 @@ func migrate(w http.ResponseWriter, r *http.Request) { } sessionState := session.GetSessionState() + sourceDBConnectionDetails := sessionState.SourceDBConnDetails + sourceProfileString := fmt.Sprintf("host=%v,port=%v,user=%v,password=%v,dbName=%v", + sourceDBConnectionDetails.Host, sourceDBConnectionDetails.Port, sourceDBConnectionDetails.User, + sourceDBConnectionDetails.Password, sessionState.DbName) + if details.StreamingCfg != "" { + sourceProfileString = sourceProfileString + fmt.Sprintf(",streamingCfg=%v", details.StreamingCfg) + } + sourceProfile, err := profiles.NewSourceProfile(sourceProfileString, sessionState.Driver) + if err != nil { + log.Println("can't create source profile") + http.Error(w, fmt.Sprintf("Can't create source profile : %v", err), http.StatusBadRequest) + return + } + sourceProfile.Driver = sessionState.Driver + + targetProfileString := fmt.Sprintf("project=%v,instance=%v,dbName=%v", sessionState.GCPProjectID, sessionState.SpannerInstanceID, details.TargetDB) + targetProfile, err := profiles.NewTargetProfile(targetProfileString) + if err != nil { + log.Println("can't create target profile") + http.Error(w, fmt.Sprintf("Can't create target profile : %v", err), http.StatusBadRequest) + return + } + targetProfile.TargetDb = targetProfile.ToLegacyTargetDb() dbURI := fmt.Sprintf("projects/%s/instances/%s/databases/%s", sessionState.GCPProjectID, sessionState.SpannerInstanceID, details.TargetDB) ctx := context.Background() adminClient, err := utils.NewDatabaseAdminClient(ctx) if err != nil { log.Println("can't create admin client") - http.Error(w, fmt.Sprintf("can't create admin client : %v", err), http.StatusBadRequest) + http.Error(w, fmt.Sprintf("Can't create admin client : %v", err), http.StatusBadRequest) return } defer adminClient.Close() client, err := utils.GetClient(ctx, dbURI) if err != nil { log.Println("can't create client for db") - http.Error(w, fmt.Sprintf("can't create client for db %s: %v", dbURI, err), http.StatusBadRequest) + http.Error(w, fmt.Sprintf("Can't create client for db %s: %v", dbURI, err), http.StatusBadRequest) return } defer client.Close() err = conversion.CreateOrUpdateDatabase(ctx, adminClient, dbURI, sessionState.Driver, "spanner", sessionState.Conv, nil) if err != nil { - log.Println("can't create/update database]") - http.Error(w, fmt.Sprintf("can't create/update database: %v", err), http.StatusBadRequest) + log.Println("can't create/update database") + http.Error(w, fmt.Sprintf("Can't create/update database: %v", err), http.StatusBadRequest) + return + } + _, err = conversion.DataConv(ctx, sourceProfile, targetProfile, nil, client, sessionState.Conv, true, 40) + if err != nil { + log.Println("can't finish data migration") + http.Error(w, fmt.Sprintf("Can't finish data migration: %v", err), http.StatusBadRequest) return } - helpers.UpdateSessionFile() - - convm := session.ConvWithMetadata{ - SessionMetadata: sessionState.SessionMetadata, - Conv: *sessionState.Conv, + if err = conversion.UpdateDDLForeignKeys(ctx, adminClient, dbURI, sessionState.Conv, nil); err != nil { + log.Println("can't perform update schema on db") + http.Error(w, fmt.Sprintf("Can't perform update schema on db %s with foreign keys: %v", dbURI, err), http.StatusBadRequest) + return } w.WriteHeader(http.StatusOK) - json.NewEncoder(w).Encode(convm) log.Println("migration completed", "method", r.Method, "path", r.URL.Path, "remoteaddr", r.RemoteAddr) } @@ -1549,6 +1609,15 @@ type typeIssue struct { Brief string } +type sourceDestinationDetails struct { + DatabaseType string + ConnectionDetail string + SourceTableCount int + SpannerTableCount int + SourceIndexCount int + SpannerIndexCount int +} + func addTypeToList(convertedType string, spType string, issues []internal.SchemaIssue, l []typeIssue) []typeIssue { if convertedType == spType { if len(issues) > 0 { From e50c5474598322f8548bcfe3876e7e30bf9dba4d Mon Sep 17 00:00:00 2001 From: Shreya Khajanchi Date: Mon, 11 Jul 2022 12:42:34 +0530 Subject: [PATCH 10/15] fixed config issue --- .../target-details-form/target-details-form.component.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/ui/src/app/components/target-details-form/target-details-form.component.ts b/ui/src/app/components/target-details-form/target-details-form.component.ts index ec9a7dcc5d..2d56812fc9 100644 --- a/ui/src/app/components/target-details-form/target-details-form.component.ts +++ b/ui/src/app/components/target-details-form/target-details-form.component.ts @@ -19,6 +19,7 @@ export class TargetDetailsFormComponent implements OnInit { this.targetDetailsForm = this.fb.group({ targetDb: ['', Validators.required], dialect: ['',Validators.required], + streamingConfig: [''], }) } From ea5c9bb8ae88126851d35c6d9fc889d4ceedc502 Mon Sep 17 00:00:00 2001 From: Shreya Khajanchi Date: Mon, 11 Jul 2022 13:00:35 +0530 Subject: [PATCH 11/15] fixed config issue --- webv2/web.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/webv2/web.go b/webv2/web.go index 37eae916d5..82a2aa9e30 100644 --- a/webv2/web.go +++ b/webv2/web.go @@ -92,8 +92,8 @@ type driverConfig struct { } type targetDetails struct { - TargetDB string `json:TargetDB` - StreamingCfg string `json:StreamingCfg` + TargetDB string `json:"TargetDB"` + StreamingCfg string `json:"StreamingConfig"` } // databaseConnection creates connection with database when using @@ -1018,6 +1018,7 @@ func migrate(w http.ResponseWriter, r *http.Request) { if details.StreamingCfg != "" { sourceProfileString = sourceProfileString + fmt.Sprintf(",streamingCfg=%v", details.StreamingCfg) } + fmt.Println(sourceProfileString) sourceProfile, err := profiles.NewSourceProfile(sourceProfileString, sessionState.Driver) if err != nil { log.Println("can't create source profile") From a0a42a9be15808234fbfc8449c2eb23fcfd6cda9 Mon Sep 17 00:00:00 2001 From: Deep1998 Date: Mon, 11 Jul 2022 17:01:42 +0530 Subject: [PATCH 12/15] Minor fix --- common/utils/utils.go | 9 ++-- sources/mysql/infoschema.go | 3 ++ streaming/streaming.go | 6 +++ test_data/mysqldump.test.out | 99 ++++++++++++++++++++++++++++++++++++ 4 files changed, 112 insertions(+), 5 deletions(-) diff --git a/common/utils/utils.go b/common/utils/utils.go index 3181ffd5bf..8254521330 100644 --- a/common/utils/utils.go +++ b/common/utils/utils.go @@ -179,6 +179,9 @@ func WriteToGCS(filePath, fileName, data string) error { return err } defer client.Close() + if filePath[len(filePath)-1] != '/' { + filePath = filePath + "/" + } u, err := url.Parse(filePath) if err != nil { fmt.Printf("parseFilePath: unable to parse file path %s", filePath) @@ -189,12 +192,8 @@ func WriteToGCS(filePath, fileName, data string) error { return err } bucketName := u.Host - prefix := u.Path[1:] - if prefix[len(prefix)-1] != '/' { - prefix = prefix + "/" - } bucket := client.Bucket(bucketName) - obj := bucket.Object(prefix + fileName) + obj := bucket.Object(u.Path[1:] + fileName) w := obj.NewWriter(ctx) if _, err := fmt.Fprint(w, data); err != nil { diff --git a/sources/mysql/infoschema.go b/sources/mysql/infoschema.go index 6b88892441..97c5043558 100644 --- a/sources/mysql/infoschema.go +++ b/sources/mysql/infoschema.go @@ -357,11 +357,14 @@ func (isi InfoSchemaImpl) StartStreamingMigration(ctx context.Context, client *s err = fmt.Errorf("can't encode session state to JSON: %v", err) return err } + fmt.Printf("Writing session file to GCS...") err = utils.WriteToGCS(streamingCfg.TmpDir, "session.json", string(convJSON)) if err != nil { err = fmt.Errorf("error writing session file to GCS: %v", err) return err } + fmt.Println("Done") + err = streaming.StartDataflow(ctx, isi.SourceProfile, isi.TargetProfile, streamingCfg) if err != nil { err = fmt.Errorf("error starting dataflow: %v", err) diff --git a/streaming/streaming.go b/streaming/streaming.go index 3216323664..5f2d417210 100644 --- a/streaming/streaming.go +++ b/streaming/streaming.go @@ -19,6 +19,7 @@ import ( "fmt" "io/ioutil" "net/url" + "strings" "time" dataflow "cloud.google.com/go/dataflow/apiv1beta3" @@ -103,6 +104,7 @@ func VerifyAndUpdateCfg(streamingCfg *StreamingCfg, dbName string) error { if dfCfg.JobName == "" { // Update names to have more info like dbname. jobName, err := utils.GenerateName("hb-dataflow-" + dbName) + jobName = strings.Replace(jobName, "_", "-", -1) if err != nil { return fmt.Errorf("error generating stream name: %v", err) } @@ -110,6 +112,10 @@ func VerifyAndUpdateCfg(streamingCfg *StreamingCfg, dbName string) error { } filePath := streamingCfg.TmpDir + if filePath[len(filePath)-1] != '/' { + filePath = filePath + "/" + streamingCfg.TmpDir = filePath + } u, err := url.Parse(filePath) if err != nil { return fmt.Errorf("parseFilePath: unable to parse file path %s", filePath) diff --git a/test_data/mysqldump.test.out b/test_data/mysqldump.test.out index d35207a876..ac7b676b60 100644 --- a/test_data/mysqldump.test.out +++ b/test_data/mysqldump.test.out @@ -1,3 +1,65 @@ +-- MySQL dump 10.17 Distrib 10.3.23-MariaDB, for debian-linux-gnu (x86_64) +-- +-- Host: 127.0.0.1 Database: cart +-- ------------------------------------------------------ +-- Server version 5.7.25-google-log + +/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; +/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; +/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; +/*!40101 SET NAMES utf8mb4 */; +/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; +/*!40103 SET TIME_ZONE='+00:00' */; +/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; +/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; +/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; +/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; + +-- +-- Table structure for table `cart` +-- + +DROP TABLE IF EXISTS `cart`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `cart` ( + `user_id` varchar(20) NOT NULL, + `product_id` varchar(20) NOT NULL, + `quantity` bigint(20) DEFAULT NULL, + `last_modified` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`user_id`,`product_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; +/*!40101 SET character_set_client = @saved_cs_client */; + +CREATE INDEX idx ON `cart` (`quantity`); + +-- +-- Dumping data for table `cart` +-- + +LOCK TABLES `cart` WRITE; +/*!40000 ALTER TABLE `cart` DISABLE KEYS */; +INSERT INTO `cart` VALUES ('901e-a6cfc2b502dc','abc-123',1,'2020-07-20 05:10:26'),('901e-a6cfc2b502dc','axd-673',2,'2020-07-20 05:10:43'),('a86b-82493320a775','zxi-631',5,'2020-07-20 05:10:46'); +/*!40000 ALTER TABLE `cart` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table name `PRODUCTS` differs only case from the table `cart`. +-- This was added to cover more cases in our integration tests. +-- +DROP TABLE IF EXISTS `PRODUCTS`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `PRODUCTS` ( + `usr_id` varchar(20) NOT NULL, + `prod_id` varchar(20) NOT NULL, + PRIMARY KEY (`usr_id`,`prod_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Table structure for table `products` +-- DROP TABLE IF EXISTS `products`; /*!40101 SET @saved_cs_client = @@character_set_client */; @@ -26,3 +88,40 @@ LOCK TABLES `products` WRITE; INSERT INTO `products` VALUES ('abc-123','Blue suede shoes',141.99,'2020-06-06'),('axd-673','Antique typewriter',99.99,'2020-06-07'),('zxi-631','Glass vase',55.50,'2020-06-10'); /*!40000 ALTER TABLE `products` ENABLE KEYS */; UNLOCK TABLES; + +-- +-- Table structure for table `customers` +-- + +DROP TABLE IF EXISTS `customers`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `customers` ( + `c_id` varchar(20) NOT NULL, + `customer_profile` json DEFAULT NULL, + PRIMARY KEY (`c_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `customers` +-- + +LOCK TABLES `customers` WRITE; +/*!40000 ALTER TABLE `customers` DISABLE KEYS */; +INSERT INTO `customers` VALUES +('svd-124','{"first_name": "Lola", "last_name": "Dog", "location": "NYC", "online" : true, "friends" : 547}'), +('tel-595','{"first_name": "Ernie", "status": "Looking for treats", "location" : "Brooklyn"}'); +/*!40000 ALTER TABLE `customers` ENABLE KEYS */; +UNLOCK TABLES; +/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; + +/*!40101 SET SQL_MODE=@OLD_SQL_MODE */; +/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; +/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; +/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; +/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; +/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; +/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; + +-- Dump completed on 2020-08-10 17:40:01 \ No newline at end of file From 4de0466c5e2f388dd34c1bce150c43670096a213 Mon Sep 17 00:00:00 2001 From: Shreya Khajanchi Date: Tue, 12 Jul 2022 15:36:09 +0530 Subject: [PATCH 13/15] replaced lorem ipsum and modified snackbar --- ui/src/app/components/home/home.component.html | 6 +++--- .../prepare-migration/prepare-migration.component.ts | 6 +++++- .../session-listing/session-listing.component.html | 6 ++++-- ui/src/app/components/workspace/workspace.component.html | 4 ---- ui/src/index.html | 2 +- 5 files changed, 13 insertions(+), 11 deletions(-) diff --git a/ui/src/app/components/home/home.component.html b/ui/src/app/components/home/home.component.html index ed484fa76f..e5786b7ddd 100644 --- a/ui/src/app/components/home/home.component.html +++ b/ui/src/app/components/home/home.component.html @@ -1,9 +1,9 @@

Get started with HarbourBridge

- Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut - labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco - laboris nisi ut aliquip ex ea commodo consequat. + HarbourBridge is a stand-alone open source tool for Cloud Spanner evaluation and migration, + using data from an existing PostgreSQL, MySQL, SQL Server, Oracle or DynamoDB database. + Learn More.