diff --git a/cmd/data.go b/cmd/data.go index 4d31caab8f..a971b93698 100644 --- a/cmd/data.go +++ b/cmd/data.go @@ -26,10 +26,12 @@ import ( "github.com/cloudspannerecosystem/harbourbridge/common/utils" "github.com/cloudspannerecosystem/harbourbridge/conversion" "github.com/cloudspannerecosystem/harbourbridge/internal" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/cloudspannerecosystem/harbourbridge/profiles" "github.com/cloudspannerecosystem/harbourbridge/proto/migration" "github.com/google/subcommands" "github.com/google/uuid" + "go.uber.org/zap" ) // DataCmd struct with flags. @@ -42,6 +44,8 @@ type DataCmd struct { sessionJSON string filePrefix string // TODO: move filePrefix to global flags writeLimit int64 + dryRun bool + logLevel string } // Name returns the name of operation. @@ -75,6 +79,7 @@ func (cmd *DataCmd) SetFlags(f *flag.FlagSet) { f.BoolVar(&cmd.skipForeignKeys, "skip-foreign-keys", false, "Skip creating foreign keys after data migration is complete (ddl statements for foreign keys can still be found in the downloaded schema.ddl.txt file and the same can be applied separately)") f.StringVar(&cmd.filePrefix, "prefix", "", "File prefix for generated files") f.Int64Var(&cmd.writeLimit, "write-limit", defaultWritersLimit, "Write limit for writes to spanner") + f.BoolVar(&cmd.dryRun, "dry-run", false, "To validate the syntax of the command by running it in an air-gapped manner, such that no network calls are made.") } func (cmd *DataCmd) Execute(ctx context.Context, f *flag.FlagSet, _ ...interface{}) subcommands.ExitStatus { @@ -83,9 +88,20 @@ func (cmd *DataCmd) Execute(ctx context.Context, f *flag.FlagSet, _ ...interface var err error defer func() { if err != nil { - fmt.Printf("FATAL error: %v\n", err) + logger.Log.Fatal("FATAL error", zap.Error(err)) } }() + if cmd.dryRun { + fmt.Print("--dry-run flag is not implemented") + return subcommands.ExitFailure + } + err = logger.InitializeLogger(cmd.logLevel) + if err != nil { + fmt.Println("Error initialising logger, did you specify a valid log-level? [DEBUG, INFO, WARN, ERROR, FATAL]", err) + return subcommands.ExitFailure + } + defer logger.Log.Sync() + conv := internal.MakeConv() sourceProfile, err := profiles.NewSourceProfile(cmd.sourceProfile, cmd.source) diff --git a/cmd/schema.go b/cmd/schema.go index eb020826dd..3afd93c8c3 100644 --- a/cmd/schema.go +++ b/cmd/schema.go @@ -26,8 +26,10 @@ import ( "github.com/cloudspannerecosystem/harbourbridge/common/utils" "github.com/cloudspannerecosystem/harbourbridge/conversion" "github.com/cloudspannerecosystem/harbourbridge/internal" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/cloudspannerecosystem/harbourbridge/profiles" "github.com/google/subcommands" + "go.uber.org/zap" ) // SchemaCmd struct with flags. @@ -37,6 +39,7 @@ type SchemaCmd struct { target string targetProfile string filePrefix string // TODO: move filePrefix to global flags + logLevel string } // Name returns the name of operation. @@ -67,6 +70,7 @@ func (cmd *SchemaCmd) SetFlags(f *flag.FlagSet) { f.StringVar(&cmd.target, "target", "Spanner", "Specifies the target DB, defaults to Spanner (accepted values: `Spanner`)") f.StringVar(&cmd.targetProfile, "target-profile", "", "Flag for specifying connection profile for target database e.g., \"dialect=postgresql\"") f.StringVar(&cmd.filePrefix, "prefix", "", "File prefix for generated files") + f.StringVar(&cmd.logLevel, "log-level", "INFO", "Configure the logging level for the command (INFO, DEBUG), defaults to INFO") } func (cmd *SchemaCmd) Execute(ctx context.Context, f *flag.FlagSet, _ ...interface{}) subcommands.ExitStatus { @@ -75,9 +79,15 @@ func (cmd *SchemaCmd) Execute(ctx context.Context, f *flag.FlagSet, _ ...interfa var err error defer func() { if err != nil { - fmt.Printf("FATAL error: %v\n", err) + logger.Log.Fatal("FATAL error", zap.Error(err)) } }() + err = logger.InitializeLogger(cmd.logLevel) + if err != nil { + fmt.Println("Error initialising logger, did you specify a valid log-level? [DEBUG, INFO, WARN, ERROR, FATAL]", err) + return subcommands.ExitFailure + } + defer logger.Log.Sync() sourceProfile, err := profiles.NewSourceProfile(cmd.sourceProfile, cmd.source) if err != nil { diff --git a/cmd/schema_and_data.go b/cmd/schema_and_data.go index 4cb8ea239a..f40fd5a36d 100644 --- a/cmd/schema_and_data.go +++ b/cmd/schema_and_data.go @@ -26,10 +26,12 @@ import ( "github.com/cloudspannerecosystem/harbourbridge/common/utils" "github.com/cloudspannerecosystem/harbourbridge/conversion" "github.com/cloudspannerecosystem/harbourbridge/internal" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/cloudspannerecosystem/harbourbridge/profiles" "github.com/cloudspannerecosystem/harbourbridge/proto/migration" "github.com/google/subcommands" "github.com/google/uuid" + "go.uber.org/zap" ) // SchemaAndDataCmd struct with flags. @@ -41,6 +43,8 @@ type SchemaAndDataCmd struct { skipForeignKeys bool filePrefix string // TODO: move filePrefix to global flags writeLimit int64 + dryRun bool + logLevel string } // Name returns the name of operation. @@ -73,6 +77,8 @@ func (cmd *SchemaAndDataCmd) SetFlags(f *flag.FlagSet) { f.BoolVar(&cmd.skipForeignKeys, "skip-foreign-keys", false, "Skip creating foreign keys after data migration is complete (ddl statements for foreign keys can still be found in the downloaded schema.ddl.txt file and the same can be applied separately)") f.StringVar(&cmd.filePrefix, "prefix", "", "File prefix for generated files") f.Int64Var(&cmd.writeLimit, "write-limit", defaultWritersLimit, "Write limit for writes to spanner") + f.BoolVar(&cmd.dryRun, "dry-run", false, "To validate the syntax of the command by running it in an air-gapped manner, such that no network calls are made.") + f.StringVar(&cmd.logLevel, "log-level", "INFO", "Configure the logging level for the command (INFO, DEBUG), defaults to INFO") } func (cmd *SchemaAndDataCmd) Execute(ctx context.Context, f *flag.FlagSet, _ ...interface{}) subcommands.ExitStatus { @@ -81,9 +87,19 @@ func (cmd *SchemaAndDataCmd) Execute(ctx context.Context, f *flag.FlagSet, _ ... var err error defer func() { if err != nil { - fmt.Printf("FATAL error: %v\n", err) + logger.Log.Fatal("FATAL error", zap.Error(err)) } }() + if cmd.dryRun { + fmt.Print("--dry-run flag is not implemented") + return subcommands.ExitFailure + } + err = logger.InitializeLogger(cmd.logLevel) + if err != nil { + fmt.Println("Error initialising logger, did you specify a valid log-level? [DEBUG, INFO, WARN, ERROR, FATAL]", err) + return subcommands.ExitFailure + } + defer logger.Log.Sync() sourceProfile, err := profiles.NewSourceProfile(cmd.sourceProfile, cmd.source) if err != nil { diff --git a/common/utils/utils.go b/common/utils/utils.go index 4081b2d347..139ae8c771 100644 --- a/common/utils/utils.go +++ b/common/utils/utils.go @@ -237,6 +237,12 @@ func getInstances(ctx context.Context, project string) ([]string, error) { } func GetPassword() string { + calledFromGCloud := os.Getenv("GCLOUD_HB_PLUGIN") + if strings.EqualFold(calledFromGCloud, "true") { + fmt.Println("\n Please specify password in enviroment variables (recommended) or --source-profile " + + "(not recommended) while using HarbourBridge from gCloud CLI.") + return "" + } fmt.Print("Enter Password: ") bytePassword, err := terminal.ReadPassword(int(syscall.Stdin)) if err != nil { diff --git a/conversion/conversion.go b/conversion/conversion.go index 81e7ca5481..922865af30 100644 --- a/conversion/conversion.go +++ b/conversion/conversion.go @@ -43,14 +43,11 @@ import ( "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/session" dydb "github.com/aws/aws-sdk-go/service/dynamodb" - adminpb "google.golang.org/genproto/googleapis/spanner/admin/database/v1" - "google.golang.org/grpc/metadata" - "google.golang.org/protobuf/proto" - "github.com/cloudspannerecosystem/harbourbridge/common/constants" "github.com/cloudspannerecosystem/harbourbridge/common/metrics" "github.com/cloudspannerecosystem/harbourbridge/common/utils" "github.com/cloudspannerecosystem/harbourbridge/internal" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/cloudspannerecosystem/harbourbridge/profiles" "github.com/cloudspannerecosystem/harbourbridge/sources/common" "github.com/cloudspannerecosystem/harbourbridge/sources/csv" @@ -61,6 +58,10 @@ import ( "github.com/cloudspannerecosystem/harbourbridge/sources/sqlserver" "github.com/cloudspannerecosystem/harbourbridge/spanner/ddl" "github.com/cloudspannerecosystem/harbourbridge/spanner/writer" + "go.uber.org/zap" + adminpb "google.golang.org/genproto/googleapis/spanner/admin/database/v1" + "google.golang.org/grpc/metadata" + "google.golang.org/protobuf/proto" ) var ( @@ -368,6 +369,7 @@ func getSeekable(f *os.File) (*os.File, int64, error) { return f, n, err } internal.VerbosePrintln("Creating a tmp file with a copy of stdin because stdin is not seekable.") + logger.Log.Debug("Creating a tmp file with a copy of stdin because stdin is not seekable.") // Create file in os.TempDir. Its not clear this is a good idea e.g. if the // pg_dump/mysqldump output is large (tens of GBs) and os.TempDir points to a directory @@ -564,6 +566,8 @@ Recommended value is between 20-30.`) workers <- workerID }() internal.VerbosePrintf("Submitting new FK create request: %s\n", fkStmt) + logger.Log.Debug("Submitting new FK create request", zap.String("fkStmt", fkStmt)) + op, err := adminClient.UpdateDatabaseDdl(ctx, &adminpb.UpdateDatabaseDdlRequest{ Database: dbURI, Statements: []string{fkStmt}, @@ -579,6 +583,7 @@ Recommended value is between 20-30.`) return } internal.VerbosePrintln("Updated schema with statement: " + fkStmt) + logger.Log.Debug("Updated schema with statement", zap.String("fkStmt", fkStmt)) }(fkStmt, workerID) } // Wait for all the goroutines to finish. diff --git a/go.mod b/go.mod index 89058604ad..d86c32c51c 100644 --- a/go.mod +++ b/go.mod @@ -32,6 +32,7 @@ require ( github.com/pingcap/tidb/parser v0.0.0-20220411093434-32b9c14779c2 github.com/sijms/go-ora/v2 v2.2.17 github.com/stretchr/testify v1.7.0 + go.uber.org/zap v1.21.0 golang.org/x/crypto v0.0.0-20220214200702-86341886e292 golang.org/x/net v0.0.0-20220325170049-de3da57026de golang.org/x/tools v0.1.11-0.20220316014157-77aa08bb151a // indirect diff --git a/internal/convert.go b/internal/convert.go index 7ef775bcab..bc0a66ed3e 100644 --- a/internal/convert.go +++ b/internal/convert.go @@ -18,9 +18,11 @@ import ( "fmt" "time" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/cloudspannerecosystem/harbourbridge/proto/migration" "github.com/cloudspannerecosystem/harbourbridge/schema" "github.com/cloudspannerecosystem/harbourbridge/spanner/ddl" + "go.uber.org/zap" ) // Conv contains all schema and data conversion state. @@ -204,6 +206,8 @@ func (conv *Conv) WriteRow(srcTable, spTable string, spCols []string, spVals []i if conv.dataSink == nil { msg := "Internal error: ProcessDataRow called but dataSink not configured" VerbosePrintf("%s\n", msg) + logger.Log.Debug("Internal error: ProcessDataRow called but dataSink not configured") + conv.Unexpected(msg) conv.StatsAddBadRow(srcTable, conv.DataMode()) } else { @@ -342,6 +346,8 @@ func (conv *Conv) buildPrimaryKey(spTable string) string { // because we process dump data twice. func (conv *Conv) Unexpected(u string) { VerbosePrintf("Unexpected condition: %s\n", u) + logger.Log.Debug("Unexpected condition", zap.String("condition", u)) + // Limit size of unexpected map. If over limit, then only // update existing entries. if _, ok := conv.Stats.Unexpected[u]; ok || len(conv.Stats.Unexpected) < 1000 { @@ -390,6 +396,7 @@ func (conv *Conv) getStatementStat(s string) *statementStat { func (conv *Conv) SkipStatement(stmtType string) { if conv.SchemaMode() { // Record statement stats on first pass only. VerbosePrintf("Skipping statement: %s\n", stmtType) + logger.Log.Debug("Skipping statement", zap.String("stmtType", stmtType)) conv.getStatementStat(stmtType).Skip++ } } @@ -398,6 +405,7 @@ func (conv *Conv) SkipStatement(stmtType string) { func (conv *Conv) ErrorInStatement(stmtType string) { if conv.SchemaMode() { // Record statement stats on first pass only. VerbosePrintf("Error processing statement: %s\n", stmtType) + logger.Log.Debug("Error processing statement", zap.String("stmtType", stmtType)) conv.getStatementStat(stmtType).Error++ } } diff --git a/internal/convert_test.go b/internal/convert_test.go index 82832abd83..24b76c5462 100644 --- a/internal/convert_test.go +++ b/internal/convert_test.go @@ -18,14 +18,18 @@ import ( "testing" "github.com/stretchr/testify/assert" + "go.uber.org/zap" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/cloudspannerecosystem/harbourbridge/spanner/ddl" ) // This file contains very basic tests of Conv API functionality. // Most of the Conv APIs are also tested in process_test.go (where // they are tested using data from schema/data conversion). - +func init() { + logger.Log = zap.NewNop() +} func TestSetSchemaMode(t *testing.T) { conv := MakeConv() conv.SetSchemaMode() diff --git a/internal/mapping.go b/internal/mapping.go index 386176aba8..48ff1789fa 100644 --- a/internal/mapping.go +++ b/internal/mapping.go @@ -19,6 +19,7 @@ import ( "strconv" "strings" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/cloudspannerecosystem/harbourbridge/spanner/ddl" ) @@ -46,6 +47,7 @@ func GetSpannerTable(conv *Conv, srcTable string) (string, error) { spTable := getSpannerID(conv, srcTable) if spTable != srcTable { VerbosePrintf("Mapping source DB table %s to Spanner table %s\n", srcTable, spTable) + logger.Log.Debug(fmt.Sprintf("Mapping source DB table %s to Spanner table %s\n", srcTable, spTable)) } conv.ToSpanner[srcTable] = NameAndCols{Name: spTable, Cols: make(map[string]string)} conv.ToSource[spTable] = NameAndCols{Name: srcTable, Cols: make(map[string]string)} @@ -118,6 +120,7 @@ func GetSpannerCol(conv *Conv, srcTable, srcCol string, mustExist bool) (string, } if spCol != srcCol { VerbosePrintf("Mapping source DB col %s (table %s) to Spanner col %s\n", srcCol, srcTable, spCol) + logger.Log.Debug(fmt.Sprintf("Mapping source DB col %s (table %s) to Spanner col %s\n", srcCol, srcTable, spCol)) } conv.ToSpanner[srcTable].Cols[srcCol] = spCol conv.ToSource[sp.Name].Cols[spCol] = srcCol diff --git a/internal/progress.go b/internal/progress.go index 627150f81c..f26829f5c5 100644 --- a/internal/progress.go +++ b/internal/progress.go @@ -20,6 +20,9 @@ import ( "fmt" "strconv" "strings" + + "github.com/cloudspannerecosystem/harbourbridge/logger" + "go.uber.org/zap" ) // Progress provides console progress functionality. i.e. it reports what @@ -87,6 +90,7 @@ func (p *Progress) reportPct(firstCall bool) { fmt.Printf("%s: %2d%%\n", p.message, p.pct) return } + logger.Log.Debug(p.message, zap.Int("Progress", p.pct)) if firstCall { fmt.Printf("%s: %2d%%", p.message, p.pct) } else { @@ -102,6 +106,7 @@ func (p *Progress) reportFraction(firstCall bool) { fmt.Printf("%s: %d/%d\n", p.message, p.progress, p.total) return } + logger.Log.Debug(p.message, zap.Float32("Progress", float32(p.progress/p.total))) if firstCall { fmt.Printf("%s: %d/%d", p.message, p.progress, p.total) } else { diff --git a/logger/logger.go b/logger/logger.go new file mode 100644 index 0000000000..7314ef949d --- /dev/null +++ b/logger/logger.go @@ -0,0 +1,33 @@ +package logger + +import ( + "os" + + "go.uber.org/zap" + "go.uber.org/zap/zapcore" +) + +const LOG_FILE_NAME = "harbourbridge.log" + +var Log *zap.Logger + +func InitializeLogger(inputLogLevel string) error { + config := zap.NewProductionEncoderConfig() + config.EncodeTime = zapcore.ISO8601TimeEncoder + fileEncoder := zapcore.NewJSONEncoder(config) + consoleEncoder := zapcore.NewConsoleEncoder(config) + logFile, _ := os.OpenFile(LOG_FILE_NAME, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + writer := zapcore.AddSync(logFile) + zapLogLevel := new(zapcore.Level) + err := zapLogLevel.Set(inputLogLevel) + if err != nil { + return err + } + logLevel := zap.NewAtomicLevelAt(*zapLogLevel) + core := zapcore.NewTee( + zapcore.NewCore(fileEncoder, writer, logLevel), + zapcore.NewCore(consoleEncoder, zapcore.AddSync(os.Stdout), logLevel), + ) + Log = zap.New(core, zap.AddCaller(), zap.AddStacktrace(zapcore.ErrorLevel)) + return nil +} diff --git a/main.go b/main.go index c53e04e9e6..8b9c6103eb 100644 --- a/main.go +++ b/main.go @@ -34,6 +34,7 @@ import ( "github.com/cloudspannerecosystem/harbourbridge/common/constants" "github.com/cloudspannerecosystem/harbourbridge/common/utils" "github.com/cloudspannerecosystem/harbourbridge/internal" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/cloudspannerecosystem/harbourbridge/web" "github.com/cloudspannerecosystem/harbourbridge/webv2" "github.com/google/subcommands" @@ -119,6 +120,11 @@ func main() { fmt.Printf("\nWarning: Found usage of deprecated flags. Support for these " + "flags will be discontinued soon.\nIt is recommended to use Harbourbridge " + "using connection profiles. Checkout usage here: https://github.com/cloudspannerecosystem/harbourbridge/tree/master/cmd#command-line-flags\n\n") + err = logger.InitializeLogger("INFO") + if err != nil { + panic(fmt.Errorf("error initialising logger")) + } + defer logger.Log.Sync() // Running HB CLI in global command line mode. setupGlobalFlags() flag.Usage = usage diff --git a/sources/common/utils_test.go b/sources/common/utils_test.go index c259b8b04b..8937b15a49 100644 --- a/sources/common/utils_test.go +++ b/sources/common/utils_test.go @@ -18,9 +18,15 @@ import ( "testing" "github.com/cloudspannerecosystem/harbourbridge/internal" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/stretchr/testify/assert" + "go.uber.org/zap" ) +func init() { + logger.Log = zap.NewNop() +} + func TestToNotNull(t *testing.T) { conv := internal.MakeConv() assert.Equal(t, false, ToNotNull(conv, "YES")) diff --git a/sources/mysql/data_test.go b/sources/mysql/data_test.go index c48baae0ca..a736bc9039 100644 --- a/sources/mysql/data_test.go +++ b/sources/mysql/data_test.go @@ -23,12 +23,18 @@ import ( "cloud.google.com/go/civil" "cloud.google.com/go/spanner" "github.com/stretchr/testify/assert" + "go.uber.org/zap" "github.com/cloudspannerecosystem/harbourbridge/internal" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/cloudspannerecosystem/harbourbridge/schema" "github.com/cloudspannerecosystem/harbourbridge/spanner/ddl" ) +func init() { + logger.Log = zap.NewNop() +} + type spannerData struct { table string cols []string diff --git a/sources/mysql/mysqldump.go b/sources/mysql/mysqldump.go index 95a65aaa1c..95e7ca5710 100644 --- a/sources/mysql/mysqldump.go +++ b/sources/mysql/mysqldump.go @@ -22,6 +22,7 @@ import ( "strings" "github.com/cloudspannerecosystem/harbourbridge/internal" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/cloudspannerecosystem/harbourbridge/schema" "github.com/cloudspannerecosystem/harbourbridge/sources/common" "github.com/pingcap/tidb/parser" @@ -76,6 +77,7 @@ func processMySQLDump(conv *internal.Conv, r *internal.Reader) error { for _, stmt := range stmts { isInsert := processStatement(conv, stmt) internal.VerbosePrintf("Parsed SQL command at line=%d/fpos=%d: %d stmts (%d lines, %d bytes) Insert Statement=%v\n", startLine, startOffset, 1, r.LineNumber-startLine, len(b), isInsert) + logger.Log.Debug(fmt.Sprintf("Parsed SQL command at line=%d/fpos=%d: %d stmts (%d lines, %d bytes) Insert Statement=%v\n", startLine, startOffset, 1, r.LineNumber-startLine, len(b), isInsert)) } if r.EOF { break @@ -227,6 +229,8 @@ func processCreateTable(conv *internal.Conv, stmt *ast.CreateTableStmt) { } tableName, err := getTableName(stmt.Table) internal.VerbosePrintf("processing create table elem=%s stmt=%v\n", tableName, stmt) + logger.Log.Debug(fmt.Sprintf("processing create table elem=%s stmt=%v\n", tableName, stmt)) + if err != nil { logStmtError(conv, stmt, fmt.Errorf("can't get table name: %w", err)) return @@ -574,6 +578,8 @@ func handleParseError(conv *internal.Conv, chunk string, err error, l [][]byte) if conv.SchemaMode() { conv.Unexpected(fmt.Sprintf("Unsupported datatype '%s' encountered while parsing following statement at line number %d : \n%s", spatial, len(l), chunk)) internal.VerbosePrintf("Converting datatype '%s' to 'Text' and retrying to parse the statement\n", spatial) + logger.Log.Debug(fmt.Sprintf("Converting datatype '%s' to 'Text' and retrying to parse the statement\n", spatial)) + } return handleSpatialDatatype(conv, chunk, l) } diff --git a/sources/oracle/toddl_test.go b/sources/oracle/toddl_test.go index 137693abd4..d2e8e433e3 100644 --- a/sources/oracle/toddl_test.go +++ b/sources/oracle/toddl_test.go @@ -19,12 +19,18 @@ import ( "github.com/cloudspannerecosystem/harbourbridge/common/constants" "github.com/cloudspannerecosystem/harbourbridge/internal" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/cloudspannerecosystem/harbourbridge/schema" "github.com/cloudspannerecosystem/harbourbridge/sources/common" "github.com/cloudspannerecosystem/harbourbridge/spanner/ddl" "github.com/stretchr/testify/assert" + "go.uber.org/zap" ) +func init() { + logger.Log = zap.NewNop() +} + func TestToSpannerType(t *testing.T) { conv := internal.MakeConv() conv.SetSchemaMode() diff --git a/sources/postgres/infoschema_test.go b/sources/postgres/infoschema_test.go index fe61a5c7ea..66459f62ed 100644 --- a/sources/postgres/infoschema_test.go +++ b/sources/postgres/infoschema_test.go @@ -24,12 +24,18 @@ import ( "cloud.google.com/go/spanner" "github.com/DATA-DOG/go-sqlmock" "github.com/cloudspannerecosystem/harbourbridge/internal" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/cloudspannerecosystem/harbourbridge/schema" "github.com/cloudspannerecosystem/harbourbridge/sources/common" "github.com/cloudspannerecosystem/harbourbridge/spanner/ddl" "github.com/stretchr/testify/assert" + "go.uber.org/zap" ) +func init() { + logger.Log = zap.NewNop() +} + type mockSpec struct { query string args []driver.Value // Query args. diff --git a/sources/postgres/pgdump.go b/sources/postgres/pgdump.go index f64cc1f24d..28c98abeda 100644 --- a/sources/postgres/pgdump.go +++ b/sources/postgres/pgdump.go @@ -24,6 +24,7 @@ import ( pg_query "github.com/pganalyze/pg_query_go/v2" "github.com/cloudspannerecosystem/harbourbridge/internal" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/cloudspannerecosystem/harbourbridge/schema" "github.com/cloudspannerecosystem/harbourbridge/sources/common" ) @@ -70,6 +71,7 @@ func processPgDump(conv *internal.Conv, r *internal.Reader) error { } ci := processStatements(conv, stmts) internal.VerbosePrintf("Parsed SQL command at line=%d/fpos=%d: %d stmts (%d lines, %d bytes) ci=%v\n", startLine, startOffset, len(stmts), r.LineNumber-startLine, len(b), ci != nil) + logger.Log.Debug(fmt.Sprintf("Parsed SQL command at line=%d/fpos=%d: %d stmts (%d lines, %d bytes) ci=%v\n", startLine, startOffset, len(stmts), r.LineNumber-startLine, len(b), ci != nil)) if ci != nil { switch ci.stmt { case copyFrom: @@ -131,10 +133,12 @@ func readAndParseChunk(conv *internal.Conv, r *internal.Reader) ([]byte, []*pg_q func processCopyBlock(conv *internal.Conv, srcTable string, srcCols []string, r *internal.Reader) { internal.VerbosePrintf("Parsing COPY-FROM stdin block starting at line=%d/fpos=%d\n", r.LineNumber, r.Offset) + logger.Log.Debug(fmt.Sprintf("Parsing COPY-FROM stdin block starting at line=%d/fpos=%d\n", r.LineNumber, r.Offset)) for { b := r.ReadLine() if string(b) == "\\.\n" || string(b) == "\\.\r\n" { internal.VerbosePrintf("Parsed COPY-FROM stdin block ending at line=%d/fpos=%d\n", r.LineNumber, r.Offset) + logger.Log.Debug(fmt.Sprintf("Parsed COPY-FROM stdin block ending at line=%d/fpos=%d\n", r.LineNumber, r.Offset)) return } if r.EOF { @@ -269,6 +273,7 @@ func processAlterTableStmt(conv *internal.Conv, n *pg_query.AlterTableStmt) { // in verbose mode, but otherwise we just skip these statements. conv.SkipStatement(printNodeType(n)) internal.VerbosePrintf("Processing %v statement: table %s not found", printNodeType(n), table) + logger.Log.Debug(fmt.Sprintf("Processing %v statement: table %s not found", printNodeType(n), table)) } } @@ -289,6 +294,8 @@ func processCreateStmt(conv *internal.Conv, n *pg_query.CreateStmt) { conv.SkipStatement(printNodeType(n)) conv.Unexpected(fmt.Sprintf("Found inherited table %s -- we do not currently handle inherited tables", table)) internal.VerbosePrintf("Processing %v statement: table %s is inherited table", printNodeType(n), table) + logger.Log.Debug(fmt.Sprintf("Processing %v statement: table %s is inherited table", printNodeType(n), table)) + return } var constraints []constraint @@ -355,6 +362,8 @@ func processInsertStmt(conv *internal.Conv, n *pg_query.InsertStmt) *copyOrInser // for a table is that it is an inherited table - we skip all inherited tables. conv.SkipStatement(printNodeType(n)) internal.VerbosePrintf("Processing %v statement: table %s not found", printNodeType(n), table) + logger.Log.Debug(fmt.Sprintf("Processing %v statement: table %s is inherited table", printNodeType(n), table)) + return nil } conv.StatsAddRow(table, conv.SchemaMode()) @@ -399,6 +408,7 @@ func processCopyStmt(conv *internal.Conv, n *pg_query.CopyStmt) *copyOrInsert { // for a table is that it is an inherited table - we skip all inherited tables. conv.SkipStatement(printNodeType(n)) internal.VerbosePrintf("Processing %v statement: table %s not found", printNodeType(n), table) + logger.Log.Debug(fmt.Sprintf("Processing %v statement: table %s is inherited table", printNodeType(n), table)) return ©OrInsert{stmt: copyFrom, table: table, cols: []string{}} } var cols []string diff --git a/sources/sqlserver/infoschema_test.go b/sources/sqlserver/infoschema_test.go index e392497a9d..fb2bc7446e 100644 --- a/sources/sqlserver/infoschema_test.go +++ b/sources/sqlserver/infoschema_test.go @@ -21,11 +21,17 @@ import ( "github.com/DATA-DOG/go-sqlmock" "github.com/cloudspannerecosystem/harbourbridge/internal" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/cloudspannerecosystem/harbourbridge/sources/common" "github.com/cloudspannerecosystem/harbourbridge/spanner/ddl" "github.com/stretchr/testify/assert" + "go.uber.org/zap" ) +func init() { + logger.Log = zap.NewNop() +} + type mockSpec struct { query string args []driver.Value // Query args. diff --git a/spanner/writer/batchwriter.go b/spanner/writer/batchwriter.go index c82a78bd3a..2ad06fd4a9 100644 --- a/spanner/writer/batchwriter.go +++ b/spanner/writer/batchwriter.go @@ -25,6 +25,7 @@ import ( "unsafe" sp "cloud.google.com/go/spanner" + "github.com/cloudspannerecosystem/harbourbridge/logger" ) // Parameters used to control building batches to write to Spanner. @@ -131,6 +132,9 @@ func (bw *BatchWriter) Flush() { fmt.Printf("Starting write of %d rows to Spanner (%d bytes, %d mutations) [%d in progress]\n", len(m), bytes, count, atomic.LoadInt64(&bw.async.writes)) } + logger.Log.Debug(fmt.Sprintf("Starting write of %d rows to Spanner (%d bytes, %d mutations) [%d in progress]\n", + len(m), bytes, count, atomic.LoadInt64(&bw.async.writes))) + bw.startWrite(m) } else { time.Sleep(10 * time.Millisecond) @@ -218,7 +222,7 @@ func (bw *BatchWriter) errorStats(rows []*row, err error, retry bool) { if bw.verbose { fmt.Printf("Error while writing %d rows to Spanner: %v\n", len(rows), err) } - + logger.Log.Debug(fmt.Sprintf("Error while writing %d rows to Spanner: %v\n", len(rows), err)) bw.async.lock.Lock() defer bw.async.lock.Unlock() @@ -258,6 +262,9 @@ func (bw *BatchWriter) doWriteAndHandleErrors(rows []*row) { if hitRetryLimit && bw.verbose { fmt.Printf("Have hit %d retries: will not do any more\n", atomic.LoadInt64(&bw.async.retries)) } + if hitRetryLimit { + logger.Log.Debug(fmt.Sprintf("Have hit %d retries: will not do any more\n", atomic.LoadInt64(&bw.async.retries))) + } return } // Split into 10 pieces and retry. This is useful @@ -306,6 +313,8 @@ func (bw *BatchWriter) writeData() { fmt.Printf("Starting write of %d rows to Spanner (%d bytes, %d mutations) [%d in progress]\n", len(m), bytes, count, atomic.LoadInt64(&bw.async.writes)) } + logger.Log.Debug(fmt.Sprintf("Starting write of %d rows to Spanner (%d bytes, %d mutations) [%d in progress]\n", + len(m), bytes, count, atomic.LoadInt64(&bw.async.writes))) bw.startWrite(m) } else { if bw.rBytes < bw.bytesLimit { diff --git a/spanner/writer/batchwriter_test.go b/spanner/writer/batchwriter_test.go index 4f8a503cb6..aba8d125a3 100644 --- a/spanner/writer/batchwriter_test.go +++ b/spanner/writer/batchwriter_test.go @@ -25,9 +25,15 @@ import ( "time" sp "cloud.google.com/go/spanner" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/stretchr/testify/assert" + "go.uber.org/zap" ) +func init() { + logger.Log = zap.NewNop() +} + // TestFlush tests NewBatchWriter, AddRow and Flush. func TestFlush(t *testing.T) { tests := []struct { diff --git a/testing/conversion/conversion_test.go b/testing/conversion/conversion_test.go index f3c0c57ff5..bf1fb467f7 100644 --- a/testing/conversion/conversion_test.go +++ b/testing/conversion/conversion_test.go @@ -30,8 +30,10 @@ import ( "github.com/cloudspannerecosystem/harbourbridge/conversion" "github.com/cloudspannerecosystem/harbourbridge/internal" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/cloudspannerecosystem/harbourbridge/spanner/ddl" "github.com/stretchr/testify/assert" + "go.uber.org/zap" database "cloud.google.com/go/spanner/admin/database/apiv1" databasepb "google.golang.org/genproto/googleapis/spanner/admin/database/v1" @@ -45,6 +47,10 @@ var ( databaseAdmin *database.DatabaseAdminClient ) +func init() { + logger.Log = zap.NewNop() +} + func TestMain(m *testing.M) { cleanup := initTests() res := m.Run() diff --git a/testing/mysql/integration_test.go b/testing/mysql/integration_test.go index 1c1539a2f1..c090dd7719 100644 --- a/testing/mysql/integration_test.go +++ b/testing/mysql/integration_test.go @@ -29,8 +29,10 @@ import ( "github.com/cloudspannerecosystem/harbourbridge/common/constants" "github.com/cloudspannerecosystem/harbourbridge/common/utils" + "github.com/cloudspannerecosystem/harbourbridge/logger" "github.com/cloudspannerecosystem/harbourbridge/testing/common" "github.com/stretchr/testify/assert" + "go.uber.org/zap" "cloud.google.com/go/spanner" database "cloud.google.com/go/spanner/admin/database/apiv1" @@ -39,6 +41,10 @@ import ( databasepb "google.golang.org/genproto/googleapis/spanner/admin/database/v1" ) +func init() { + logger.Log = zap.NewNop() +} + var ( projectID string instanceID string