Skip to content

Commit

Permalink
Modify error message strings. (#2164)
Browse files Browse the repository at this point in the history
Modify error strings to include user-specific details only after a ":". This is because as part of sending error messages to callhome, we sanitize them by picking the segment of the error message before the first ":"
  • Loading branch information
makalaaneesh authored Jan 9, 2025
1 parent 95cc498 commit 073fdac
Show file tree
Hide file tree
Showing 37 changed files with 174 additions and 174 deletions.
12 changes: 6 additions & 6 deletions yb-voyager/cmd/analyzeSchema.go
Original file line number Diff line number Diff line change
Expand Up @@ -329,14 +329,14 @@ func checkStmtsUsingParser(sqlInfoArr []sqlInfo, fpath string, objType string) {
}
err = parserIssueDetector.ParseRequiredDDLs(sqlStmtInfo.formattedStmt)
if err != nil {
utils.ErrExit("error parsing stmt[%s]: %v", sqlStmtInfo.formattedStmt, err)
utils.ErrExit("error parsing stmt: [%s]: %v", sqlStmtInfo.formattedStmt, err)
}
if parserIssueDetector.IsGinIndexPresentInSchema {
summaryMap["INDEX"].details[GIN_INDEX_DETAILS] = true
}
ddlIssues, err := parserIssueDetector.GetDDLIssues(sqlStmtInfo.formattedStmt, targetDbVersion)
if err != nil {
utils.ErrExit("error getting ddl issues for stmt[%s]: %v", sqlStmtInfo.formattedStmt, err)
utils.ErrExit("error getting ddl issues for stmt: [%s]: %v", sqlStmtInfo.formattedStmt, err)
}
for _, i := range ddlIssues {
schemaAnalysisReport.Issues = append(schemaAnalysisReport.Issues, convertIssueInstanceToAnalyzeIssue(i, fpath, false))
Expand Down Expand Up @@ -852,7 +852,7 @@ func parseSqlFileForObjectType(path string, objType string) []sqlInfo {
reportNextSql := 0
file, err := os.ReadFile(path)
if err != nil {
utils.ErrExit("Error while reading %q: %s", path, err)
utils.ErrExit("Error while reading file: %q: %s", path, err)
}

lines := strings.Split(string(file), "\n")
Expand Down Expand Up @@ -1098,7 +1098,7 @@ func checkConversions(sqlInfoArr []sqlInfo, filePath string) {
for _, sqlStmtInfo := range sqlInfoArr {
parseTree, err := pg_query.Parse(sqlStmtInfo.stmt)
if err != nil {
utils.ErrExit("failed to parse the stmt %v: %v", sqlStmtInfo.stmt, err)
utils.ErrExit("failed to parse the stmt: %v: %v", sqlStmtInfo.stmt, err)
}

createConvNode, ok := parseTree.Stmts[0].Stmt.Node.(*pg_query.Node_CreateConversionStmt)
Expand Down Expand Up @@ -1202,7 +1202,7 @@ func generateAnalyzeSchemaReport(msr *metadb.MigrationStatusRecord, reportFormat

file, err := os.OpenFile(reportPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644)
if err != nil {
utils.ErrExit("Error while opening %q: %s", reportPath, err)
utils.ErrExit("Error while opening: %q: %s", reportPath, err)
}
defer func() {
if err := file.Close(); err != nil {
Expand All @@ -1212,7 +1212,7 @@ func generateAnalyzeSchemaReport(msr *metadb.MigrationStatusRecord, reportFormat

_, err = file.WriteString(finalReport)
if err != nil {
utils.ErrExit("failed to write report to %q: %s", reportPath, err)
utils.ErrExit("failed to write report to: %q: %s", reportPath, err)
}
fmt.Printf("-- find schema analysis report at: %s\n", reportPath)
return nil
Expand Down
4 changes: 2 additions & 2 deletions yb-voyager/cmd/archiveCommand.go
Original file line number Diff line number Diff line change
Expand Up @@ -59,12 +59,12 @@ func validateCommonArchiveFlags() {
func validateMoveToFlag() {
if moveDestination != "" {
if !utils.FileOrFolderExists(moveDestination) {
utils.ErrExit("move destination %q doesn't exists.\n", moveDestination)
utils.ErrExit("move destination doesn't exists: %q: \n", moveDestination)
} else {
var err error
moveDestination, err = filepath.Abs(moveDestination)
if err != nil {
utils.ErrExit("Failed to get absolute path for move destination %q: %v\n", moveDestination, err)
utils.ErrExit("Failed to get absolute path for move destination: %q: %v\n", moveDestination, err)
}
moveDestination = filepath.Clean(moveDestination)
fmt.Printf("Note: Using %q as move destination\n", moveDestination)
Expand Down
6 changes: 3 additions & 3 deletions yb-voyager/cmd/assessMigrationBulkCommand.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ var assessMigrationBulkCmd = &cobra.Command{
PreRun: func(cmd *cobra.Command, args []string) {
err := validateFleetConfigFile(fleetConfigPath)
if err != nil {
utils.ErrExit("%s", err.Error())
utils.ErrExit("validating fleet config file: %s", err.Error())
}
},

Expand Down Expand Up @@ -458,15 +458,15 @@ func validateBulkAssessmentDirFlag() {
utils.ErrExit(`ERROR: required flag "bulk-assessment-dir" not set`)
}
if !utils.FileOrFolderExists(bulkAssessmentDir) {
utils.ErrExit("bulk-assessment-dir %q doesn't exists.\n", bulkAssessmentDir)
utils.ErrExit("bulk-assessment-dir doesn't exists: %q\n", bulkAssessmentDir)
} else {
if bulkAssessmentDir == "." {
fmt.Println("Note: Using current directory as bulk-assessment-dir")
}
var err error
bulkAssessmentDir, err = filepath.Abs(bulkAssessmentDir)
if err != nil {
utils.ErrExit("Failed to get absolute path for bulk-assessment-dir %q: %v\n", exportDir, err)
utils.ErrExit("Failed to get absolute path for bulk-assessment-dir: %q: %v\n", exportDir, err)
}
bulkAssessmentDir = filepath.Clean(bulkAssessmentDir)
}
Expand Down
4 changes: 2 additions & 2 deletions yb-voyager/cmd/assessMigrationCommand.go
Original file line number Diff line number Diff line change
Expand Up @@ -648,7 +648,7 @@ func checkStartCleanForAssessMigration(metadataDirPassedByUser bool) {
utils.ErrExit("failed to start clean: %v", err)
}
} else {
utils.ErrExit("assessment metadata or reports files already exist in the assessment directory at '%s'. Use the --start-clean flag to clear the directory before proceeding.", assessmentDir)
utils.ErrExit("assessment metadata or reports files already exist in the assessment directory: '%s'. Use the --start-clean flag to clear the directory before proceeding.", assessmentDir)
}
}
}
Expand Down Expand Up @@ -1713,7 +1713,7 @@ func validateSourceDBTypeForAssessMigration() {
func validateAssessmentMetadataDirFlag() {
if assessmentMetadataDirFlag != "" {
if !utils.FileOrFolderExists(assessmentMetadataDirFlag) {
utils.ErrExit("assessment metadata directory %q provided with `--assessment-metadata-dir` flag does not exist", assessmentMetadataDirFlag)
utils.ErrExit("assessment metadata directory: %q provided with `--assessment-metadata-dir` flag does not exist", assessmentMetadataDirFlag)
} else {
log.Infof("using provided assessment metadata directory: %s", assessmentMetadataDirFlag)
}
Expand Down
20 changes: 10 additions & 10 deletions yb-voyager/cmd/common.go
Original file line number Diff line number Diff line change
Expand Up @@ -165,15 +165,15 @@ func getMappingForTableNameVsTableFileName(dataDirPath string, noWait bool) map[
fullTableName := fmt.Sprintf("%s.%s", schemaName, tableName)
table, err := namereg.NameReg.LookupTableName(fullTableName)
if err != nil {
utils.ErrExit("lookup table %s in name registry : %v", fullTableName, err)
utils.ErrExit("lookup table in name registry: %q: %v", fullTableName, err)
}
tableNameVsFileNameMap[table.ForKey()] = fileName
}
}

tocTextFileDataBytes, err := os.ReadFile(tocTextFilePath)
if err != nil {
utils.ErrExit("Failed to read file %q: %v", tocTextFilePath, err)
utils.ErrExit("Failed to read file: %q: %v", tocTextFilePath, err)
}

tocTextFileData := strings.Split(string(tocTextFileDataBytes), "\n")
Expand Down Expand Up @@ -208,7 +208,7 @@ func GetTableRowCount(filePath string) map[string]int64 {

fileBytes, err := os.ReadFile(filePath)
if err != nil {
utils.ErrExit("read file %q: %s", filePath, err)
utils.ErrExit("read file: %q: %s", filePath, err)
}

lines := strings.Split(strings.Trim(string(fileBytes), "\n"), "\n")
Expand Down Expand Up @@ -294,7 +294,7 @@ func displayExportedRowCountSnapshot(snapshotViaDebezium bool) {
for _, key := range keys {
table, err := namereg.NameReg.LookupTableName(key)
if err != nil {
utils.ErrExit("lookup table %s in name registry : %v", key, err)
utils.ErrExit("lookup table in name registry: %q: %v", key, err)
}
displayTableName := table.CurrentName.Unqualified.MinQuoted
//Using the ForOutput() as a key for leafPartitions map as we are populating the map in that way.
Expand Down Expand Up @@ -328,7 +328,7 @@ func displayExportedRowCountSnapshot(snapshotViaDebezium bool) {
}
table, err := namereg.NameReg.LookupTableName(fmt.Sprintf("%s.%s", tableStatus.SchemaName, tableStatus.TableName))
if err != nil {
utils.ErrExit("lookup table %s in name registry : %v", tableStatus.TableName, err)
utils.ErrExit("lookup table in name registry : %q: %v", tableStatus.TableName, err)
}
displayTableName := table.CurrentName.Unqualified.MinQuoted
partitions := leafPartitions[table.ForOutput()]
Expand Down Expand Up @@ -388,7 +388,7 @@ func displayImportedRowCountSnapshot(state *ImportDataState, tasks []*ImportFile
for _, tableName := range tableList {
tableRowCount, err := state.GetImportedSnapshotRowCountForTable(tableName)
if err != nil {
utils.ErrExit("could not fetch snapshot row count for table %q: %w", tableName, err)
utils.ErrExit("could not fetch snapshot row count for table: %q: %w", tableName, err)
}
snapshotRowCount.Put(tableName, tableRowCount)
}
Expand Down Expand Up @@ -441,7 +441,7 @@ func CreateMigrationProjectIfNotExists(dbType string, exportDir string) {
for _, subdir := range projectSubdirs {
err := exec.Command("mkdir", "-p", filepath.Join(projectDirPath, subdir)).Run()
if err != nil {
utils.ErrExit("couldn't create sub-directories under %q: %v", projectDirPath, err)
utils.ErrExit("couldn't create sub-directories under: %q: %v", projectDirPath, err)
}
}

Expand All @@ -458,7 +458,7 @@ func CreateMigrationProjectIfNotExists(dbType string, exportDir string) {

err := exec.Command("mkdir", "-p", filepath.Join(schemaDir, databaseObjectDirName)).Run()
if err != nil {
utils.ErrExit("couldn't create sub-directories under %q: %v", schemaDir, err)
utils.ErrExit("couldn't create sub-directories under: %q: %v", schemaDir, err)
}
}

Expand Down Expand Up @@ -929,15 +929,15 @@ func renameTableIfRequired(table string) (string, bool) {
}
defaultSchema, noDefaultSchema := GetDefaultPGSchema(schema, "|")
if noDefaultSchema && len(strings.Split(table, ".")) <= 1 {
utils.ErrExit("no default schema found to qualify table %s", table)
utils.ErrExit("no default schema found to qualify table: %s", table)
}
tableName := sqlname.NewSourceNameFromMaybeQualifiedName(table, defaultSchema)
fromTable := tableName.Qualified.Unquoted

if renameTablesMap[fromTable] != "" {
tableTup, err := namereg.NameReg.LookupTableName(renameTablesMap[fromTable])
if err != nil {
utils.ErrExit("lookup failed for the table %s", renameTablesMap[fromTable])
utils.ErrExit("lookup failed for the table: %s", renameTablesMap[fromTable])
}

return tableTup.ForMinOutput(), true
Expand Down
8 changes: 4 additions & 4 deletions yb-voyager/cmd/endMigrationCommand.go
Original file line number Diff line number Diff line change
Expand Up @@ -664,7 +664,7 @@ func cleanupExportDir() {
for _, subdir := range subdirs {
err := os.RemoveAll(filepath.Join(exportDir, subdir))
if err != nil {
utils.ErrExit("removing %s directory: %v", subdir, err)
utils.ErrExit("removing directory: %q: %v", subdir, err)
}
}
}
Expand Down Expand Up @@ -785,7 +785,7 @@ func stopVoyagerCommand(lockFile *lockfile.Lockfile, signal syscall.Signal) {
ongoingCmd := lockFile.GetCmdName()
ongoingCmdPID, err := lockFile.GetCmdPID()
if err != nil {
utils.ErrExit("getting PID of ongoing voyager command %q: %v", ongoingCmd, err)
utils.ErrExit("getting PID of ongoing voyager command: %q: %v", ongoingCmd, err)
}

fmt.Printf("stopping the ongoing command: %s\n", ongoingCmd)
Expand All @@ -811,7 +811,7 @@ func stopDataExportCommand(lockFile *lockfile.Lockfile) {
ongoingCmd := lockFile.GetCmdName()
ongoingCmdPID, err := lockFile.GetCmdPID()
if err != nil {
utils.ErrExit("getting PID of ongoing voyager command %q: %v", ongoingCmd, err)
utils.ErrExit("getting PID of ongoing voyager command: %q: %v", ongoingCmd, err)
}

fmt.Printf("stopping the ongoing command: %s\n", ongoingCmd)
Expand All @@ -830,7 +830,7 @@ func areOnDifferentFileSystems(path1 string, path2 string) bool {
err2 := syscall.Stat(path2, &stat2)

if err1 != nil || err2 != nil {
utils.ErrExit("getting file system info for %s and %s: %v, %v", path1, path2, err1, err2)
utils.ErrExit("getting file system info: for %s and %s: %v, %v", path1, path2, err1, err2)
}

return stat1.Dev != stat2.Dev
Expand Down
2 changes: 1 addition & 1 deletion yb-voyager/cmd/export.go
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,7 @@ func validateSSLMode() {
if source.DBType == ORACLE || slices.Contains(validSSLModes[source.DBType], source.SSLMode) {
return
} else {
utils.ErrExit("Error: Invalid sslmode: %q. Valid SSL modes are %v", source.SSLMode, validSSLModes[source.DBType])
utils.ErrExit("Invalid sslmode: %q. Valid SSL modes are %v", source.SSLMode, validSSLModes[source.DBType])
}
}

Expand Down
22 changes: 11 additions & 11 deletions yb-voyager/cmd/exportData.go
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ func exportData() bool {

res := source.DB().CheckSchemaExists()
if !res {
utils.ErrExit("schema %q does not exist", source.Schema)
utils.ErrExit("schema does not exist : %q", source.Schema)
}

if source.RunGuardrailsChecks {
Expand Down Expand Up @@ -301,7 +301,7 @@ func exportData() bool {
//Fine to lookup directly as this will root table in case of partitions
tuple, err := namereg.NameReg.LookupTableName(renamedTable)
if err != nil {
utils.ErrExit("lookup table name %s: %v", renamedTable, err)
utils.ErrExit("lookup table name: %s: %v", renamedTable, err)
}
currPartitions, ok := leafPartitions.Get(tuple)
if !ok {
Expand Down Expand Up @@ -759,7 +759,7 @@ func getInitialTableList() (map[string]string, []sqlname.NameTuple) {
if parent == "" {
tuple, err = namereg.NameReg.LookupTableName(fmt.Sprintf("%s.%s", schema, table))
if err != nil {
utils.ErrExit("lookup for table name %s failed err: %v", table, err)
utils.ErrExit("lookup for table name failed err: %s: %v", table, err)
}
}
fullTableList = append(fullTableList, tuple)
Expand Down Expand Up @@ -868,8 +868,8 @@ func exportDataOffline(ctx context.Context, cancel context.CancelFunc, finalTabl
log.Infoln("Cancel() being called, within exportDataOffline()")
cancel() //will cancel/stop both dump tool and progress bar
time.Sleep(time.Second * 5) //give sometime for the cancel to complete before this function returns
utils.ErrExit("yb-voyager encountered internal error. "+
"Check %s/logs/yb-voyager-export-data.log for more details.", exportDir)
utils.ErrExit("yb-voyager encountered internal error: "+
"Check: %s/logs/yb-voyager-export-data.log for more details.", exportDir)
}
}()

Expand All @@ -896,7 +896,7 @@ func exportDataOffline(ctx context.Context, cancel context.CancelFunc, finalTabl
for _, seq := range sequenceList {
seqTuple, err := namereg.NameReg.LookupTableName(seq)
if err != nil {
utils.ErrExit("lookup for sequence %s failed err: %v", seq, err)
utils.ErrExit("lookup for sequence failed: %s: err: %v", seq, err)
}
finalTableList = append(finalTableList, seqTuple)
}
Expand Down Expand Up @@ -1019,7 +1019,7 @@ func clearMigrationStateIfRequired() {
dbzm.IsMigrationInStreamingMode(exportDir) {
utils.PrintAndLog("Continuing streaming from where we left off...")
} else {
utils.ErrExit("%s/data directory is not empty, use --start-clean flag to clean the directories and start", exportDir)
utils.ErrExit("data directory is not empty, use --start-clean flag to clean the directories and start: %s", exportDir)
}
}
}
Expand Down Expand Up @@ -1047,7 +1047,7 @@ func extractTableListFromString(fullTableList []sqlname.NameTuple, flagTableList
result := lo.Filter(fullTableList, func(tableName sqlname.NameTuple, _ int) bool {
ok, err := tableName.MatchesPattern(pattern)
if err != nil {
utils.ErrExit("Invalid table name pattern %q: %s", pattern, err)
utils.ErrExit("Invalid table name pattern: %q: %s", pattern, err)
}
return ok
})
Expand All @@ -1064,7 +1064,7 @@ func extractTableListFromString(fullTableList []sqlname.NameTuple, flagTableList
}
if len(unknownTableNames) > 0 {
utils.PrintAndLog("Unknown table names %v in the %s list", unknownTableNames, listName)
utils.ErrExit("Valid table names are %v", lo.Map(fullTableList, func(tableName sqlname.NameTuple, _ int) string {
utils.ErrExit("Valid table names are: %v", lo.Map(fullTableList, func(tableName sqlname.NameTuple, _ int) string {
return tableName.ForOutput()
}))
}
Expand Down Expand Up @@ -1143,13 +1143,13 @@ func startFallBackSetupIfRequired() {
utils.PrintAndLog("Starting import data to source with command:\n %s", color.GreenString(cmdStr))
binary, lookErr := exec.LookPath(os.Args[0])
if lookErr != nil {
utils.ErrExit("could not find yb-voyager - %w", err)
utils.ErrExit("could not find yb-voyager: %w", err)
}
env := os.Environ()
env = slices.Insert(env, 0, "SOURCE_DB_PASSWORD="+source.Password)
execErr := syscall.Exec(binary, cmd, env)
if execErr != nil {
utils.ErrExit("failed to run yb-voyager import data to source - %w\n Please re-run with command :\n%s", err, cmdStr)
utils.ErrExit("failed to run yb-voyager import data to source: %w\n Please re-run with command :\n%s", err, cmdStr)
}
}

Expand Down
2 changes: 1 addition & 1 deletion yb-voyager/cmd/exportDataStatus.go
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ func startExportPB(progressContainer *mpb.Progress, mapKey string, quitChan chan
time.Sleep(100 * time.Millisecond)
break
} else if err != nil { //error other than EOF
utils.ErrExit("Error while reading file %s: %v", tableDataFile.Name(), err)
utils.ErrExit("Error while reading file: %s: %v", tableDataFile.Name(), err)
}
if isDataLine(line, source.DBType, &insideCopyStmt) {
tableMetadata.CountLiveRows += 1
Expand Down
8 changes: 4 additions & 4 deletions yb-voyager/cmd/exportDataStatusCommand.go
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ var exportDataStatusCmd = &cobra.Command{
reportFile := jsonfile.NewJsonFile[[]*exportTableMigStatusOutputRow](reportFilePath)
err := reportFile.Create(&rows)
if err != nil {
utils.ErrExit("creating into json file %s: %v", reportFilePath, err)
utils.ErrExit("creating into json file: %s: %v", reportFilePath, err)
}
fmt.Print(color.GreenString("Export data status report is written to %s\n", reportFilePath))
return
Expand Down Expand Up @@ -123,7 +123,7 @@ func runExportDataStatusCmdDbzm(streamChanges bool, leafPartitions map[string][]
exportStatusFilePath := filepath.Join(exportDir, "data", "export_status.json")
status, err := dbzm.ReadExportStatus(exportStatusFilePath)
if err != nil {
utils.ErrExit("Failed to read export status file %s: %v", exportStatusFilePath, err)
utils.ErrExit("Failed to read export status file: %s: %v", exportStatusFilePath, err)
}
if status == nil {
return nil, fmt.Errorf("export data has not started yet. Try running after export has started")
Expand All @@ -142,7 +142,7 @@ func runExportDataStatusCmdDbzm(streamChanges bool, leafPartitions map[string][]
func getSnapshotExportStatusRow(tableStatus *dbzm.TableExportStatus, leafPartitions map[string][]string, msr *metadb.MigrationStatusRecord) *exportTableMigStatusOutputRow {
nt, err := namereg.NameReg.LookupTableName(fmt.Sprintf("%s.%s", tableStatus.SchemaName, tableStatus.TableName))
if err != nil {
utils.ErrExit("lookup %s in name registry: %v", tableStatus.TableName, err)
utils.ErrExit("lookup in name registry: %s: %v", tableStatus.TableName, err)
}
//Using the ForOutput() as a key for leafPartitions map as we are populating the map in that way.
displayTableName := getDisplayName(nt, leafPartitions[nt.ForOutput()], msr.IsExportTableListSet)
Expand Down Expand Up @@ -183,7 +183,7 @@ func runExportDataStatusCmd(msr *metadb.MigrationStatusRecord, leafPartitions ma
if errors.Is(err, fs.ErrNotExist) {
return nil, fmt.Errorf("export data has not started yet. Try running after export has started")
}
utils.ErrExit("Failed to read export status file %s: %v", exportSnapshotStatusFilePath, err)
utils.ErrExit("Failed to read export status file: %s: %v", exportSnapshotStatusFilePath, err)
}

exportedSnapshotRow, exportedSnapshotStatus, err := getExportedSnapshotRowsMap(exportStatusSnapshot)
Expand Down
Loading

0 comments on commit 073fdac

Please sign in to comment.