From b9174eaa741cb7ac6c237ab8cd3c3cc63066fca5 Mon Sep 17 00:00:00 2001 From: Manjunath Davanam Date: Tue, 3 Sep 2024 12:40:38 +0530 Subject: [PATCH] V2 APIs changes (#21) * #OBS-I116: Dataset CRUD APIs test cases and fixes * #OBS-I116: Dataset update API Dedupe and denorm test cases fixes * #OBS-I116: Dataset Create api test case fixes * #OBS-I116: Dataset update extraction config api test case fixes * #OBS-I116: Dataset update api test cases * #OBS-I116: fix: linting fixes * #OBS-I116: lint fixes * #OBS-I116: Dataset status transition test cases * #OBS-I116: feat: Test cases and linting fixes * #OBS-I116: feat: Dataset status transition test cases fix * #OBS-I141: added a new metric to sum the response time * #OBS-I141: modified the url variable and access dataset_id from params * #OBS-I141: added helper function to get dataset_id for error cases * #OBS-I141: added telemetry for v2 api's * #OBS-I141: added a new metric to sum the response time * #OBS-I141: modified the url variable and access dataset_id from params * #OBS-I141: added helper function to get dataset_id for error cases * #OBS-I141: added telemetry for v2 api's * #OBS-I141: added telemetry for v2 api's * #OBS-I143: feat: dataset publish changes to deploy flink connectors * #OBS-I141: removed metric for sum of response time * #OBS-I141: removed usage of builtin kafka methods from telemetry file * #OBS-I146: feat: Retire fix * Issue #SBCOSS-12 fix: convert all SQL raw queries to prepared statements * Issue #SBCOSS-12 fix: tags is an array, so requires empty json for null case; dataset draft deletion requires deletion of transformation and source config drafts * #SBCOSS-23: feat: dataset publish changes for redeployment * #OBS-I146: fix: Test case fix for read api * #OBS-I146: fix: Test case fix for read api changes * #OBS-I146: fix: status transition test cases * #OBS-I146: fix: Test case script fix * #OBS-I146: fix: Type error fix * #OBS-I146: fix: Dataset read api test cases fixes * #OBS-I146: fix: Hudi spec generation test cases * #OBS-I146: fix: Test case and linting fix * #OBS-I173: fix: Dataset update changes to accept type changes * #OBS-I146: fix: linting fix * #OBS-I146: fix: linting fix * #OBS-I143: dataset publish changes fixes * #OBS-I143: inswert query fix * Issue #OBS-I144 fix: icon data as string; check default version * #OBS-143: fix: dataset publish fixes * #OBS-I181 - Updated the event structure --------- Co-authored-by: JeraldJF Co-authored-by: Rakshitha-D Co-authored-by: SurabhiAngadi Co-authored-by: Harish Kumar Gangula Co-authored-by: Aniket Sakinala Co-authored-by: Ravi Mula --- .github/workflows/pull_request.yaml | 1 - api-service/.eslintignore | 2 +- api-service/.eslintrc | 3 +- api-service/package.json | 5 +- api-service/src/app.ts | 3 + api-service/src/configs/Config.ts | 2 +- .../src/connections/grafanaConnection.ts | 2 +- api-service/src/controllers/Alerts/Alerts.ts | 16 +- api-service/src/controllers/Alerts/Metric.ts | 20 +- api-service/src/controllers/Alerts/Silence.ts | 18 +- .../DataIngestion/DataIngestionController.ts | 23 +- .../controllers/DatasetCopy/DatasetCopy.ts | 6 +- .../DatasetCopy/DatasetCopyHelper.ts | 6 +- .../DatasetImport/DatasetImport.ts | 6 +- .../DatasetImport/DatasetImportHelper.ts | 4 +- .../controllers/DatasetRead/DatasetRead.ts | 21 +- .../DatasetStatusTransition.ts | 28 +- .../DatasetUpdate/DatasetUpdate.ts | 5 +- .../DatasetUpdateValidationSchema.json | 6 +- .../GenerateDataSchema/GenerateDataSchema.ts | 26 +- .../NotificationChannel/Notification.ts | 20 +- .../exceptions/SchemaGenerationException.ts | 2 +- api-service/src/helpers/ResponseHandler.ts | 6 +- api-service/src/metrics/prometheus/helpers.ts | 23 +- api-service/src/metrics/prometheus/index.ts | 2 +- api-service/src/middlewares/errors.ts | 6 +- api-service/src/models/Alert.ts | 2 +- api-service/src/models/Metric.ts | 2 +- api-service/src/models/Notification.ts | 2 +- api-service/src/models/Silence.ts | 2 +- api-service/src/routes/AlertsRouter.ts | 2 +- api-service/src/routes/Router.ts | 22 +- api-service/src/services/CipherService.ts | 10 +- .../CloudServices/AWSStorageService.ts | 2 +- .../CloudServices/AzureStorageService.ts | 2 +- .../CloudServices/GCPStorageService.ts | 2 +- .../src/services/DatasetHealthService.ts | 6 +- api-service/src/services/DatasetService.ts | 104 ++--- api-service/src/services/DatasourceService.ts | 339 --------------- api-service/src/services/HealthService.ts | 4 +- .../SchemaGenerateService/ConfigSuggester.ts | 8 +- .../DataSchemaService.ts | 68 +-- .../SchemaGenerateService/SchemaAnalyser.ts | 50 +-- .../SchemaArrayValidator.ts | 14 +- .../SchemaGeneratorUtils.ts | 6 +- .../SchemaGenerateService/SchemaHandler.ts | 48 +-- .../SuggestionTemplate.ts | 4 +- .../SchemaGenerateService/Template.ts | 2 +- api-service/src/services/TableGenerator.ts | 109 +++-- api-service/src/services/WrapperService.ts | 1 - api-service/src/services/fs.ts | 7 +- .../managers/grafana/alert/helpers/index.ts | 18 +- .../services/managers/grafana/alert/index.ts | 26 +- .../src/services/managers/grafana/index.ts | 6 +- .../grafana/notification/channels/email.ts | 8 +- .../grafana/notification/channels/index.ts | 8 +- .../grafana/notification/channels/slack.ts | 7 +- .../grafana/notification/channels/teams.ts | 7 +- .../grafana/notification/helpers/index.ts | 20 +- .../managers/grafana/notification/index.ts | 3 +- .../grafana/silences/helpers/index.ts | 2 +- api-service/src/services/managers/index.ts | 14 +- .../managers/prometheus/alert/index.ts | 12 +- .../src/services/managers/prometheus/index.ts | 6 +- .../managers/prometheus/notification/index.ts | 8 +- .../managers/prometheus/silences/index.ts | 10 +- api-service/src/services/telemetry.ts | 12 +- api-service/src/telemetry/telemetryActions.ts | 5 +- .../DataIngestTest/DataIngestionTest.spec.ts | 19 +- .../DataOutTest/DataQueryTest.spec.ts | 8 +- .../DatasetManagement/DataOutTest/Fixtures.ts | 22 +- .../DatasetCreate/DatasetCreate.spec.ts | 62 +-- .../DatasetCreate/Fixtures.ts | 288 +++++++------ .../DatasetList/DatasetList.spec.ts | 85 +--- .../DatasetManagement/DatasetList/Fixtures.ts | 192 ++------- .../DatasetRead/DatasetRead.spec.ts | 230 +++++++---- .../DatasetManagement/DatasetRead/Fixtures.ts | 213 ++++------ .../DatasetDelete.spec.ts | 6 +- .../DatasetLive.spec.ts | 241 ++++++++++- .../DatasetReadyToPublish.spec.ts | 48 ++- .../DatasetRetire.spec.ts | 104 +---- .../DatasetStatusTransition.spec.ts | 17 +- .../DatasetStatusTransition/Fixtures.ts | 337 +++++++-------- .../DatasetUpdate/DatasetConnectors.spec.ts | 74 ++++ .../DatasetUpdate/DatasetDedup.spec.ts | 23 +- .../DatasetUpdate/DatasetDenorm.spec.ts | 108 +---- .../DatasetUpdate/DatasetExtraction.spec.ts | 36 +- .../DatasetUpdate/DatasetTags.spec.ts | 98 +---- .../DatasetTransformation.spec.ts | 201 +-------- .../DatasetUpdate/DatasetUpdate.spec.ts | 123 ++---- .../DatasetUpdate/DatasetValidation.spec.ts | 24 +- .../DatasetUpdate/Fixtures.ts | 258 ++++-------- .../GenerateSignedURL/Fixtures.ts | 16 +- .../GenerateSignedURL.spec.ts | 4 +- .../CreateTemplate/CreateTemplate.spec.ts | 6 +- .../DeleteTemplate/DeleteTemplate.spec.ts | 8 +- .../ListTemplates/ListTemplates.spec.ts | 6 +- .../ReadTemplate/ReadTemplate.spec.ts | 18 +- .../TemplateQuerying/TemplateQuerying.spec.ts | 42 +- .../UpdateTemplate/UpdateTemplate.spec.ts | 6 +- .../SqlWrapper/SqlWrapper.spec.ts | 4 +- api-service/src/types/ConfigModels.ts | 2 +- command-service/Dockerfile | 2 +- .../{flink => flink-connector}/Chart.lock | 0 .../{flink => flink-connector}/Chart.yaml | 0 .../charts/.helmignore | 0 .../charts/common/Chart.yaml | 0 .../charts/common/templates/_affinities.tpl | 0 .../charts/common/templates/_capabilities.tpl | 0 .../charts/common/templates/_configs.tpl | 0 .../charts/common/templates/_errors.tpl | 0 .../charts/common/templates/_images.tpl | 0 .../charts/common/templates/_ingress.tpl | 0 .../charts/common/templates/_labels.tpl | 0 .../charts/common/templates/_names.tpl | 0 .../charts/common/templates/_secrets.tpl | 0 .../charts/common/templates/_storage.tpl | 0 .../charts/common/templates/_tplvalues.tpl | 0 .../charts/common/templates/_utils.tpl | 0 .../charts/common/templates/_variables.tpl | 0 .../charts/common/templates/_warnings.tpl | 0 .../templates/validations/_cassandra.tpl | 0 .../common/templates/validations/_mariadb.tpl | 0 .../common/templates/validations/_mongodb.tpl | 0 .../common/templates/validations/_mysql.tpl | 0 .../templates/validations/_postgresql.tpl | 0 .../common/templates/validations/_redis.tpl | 0 .../templates/validations/_validations.tpl | 0 .../charts/common/values.yaml | 0 .../templates/NOTES.txt | 0 .../templates/_base_serviceAccount.tpl | 0 .../templates/_helpers.tpl | 0 .../templates/_image_flink.tpl | 7 +- .../templates/_namespace.tpl | 0 .../templates/configmap.yaml | 0 .../templates/deployment.yaml | 95 ++--- .../templates/hpa.yaml | 0 .../templates/ingress.yaml | 0 .../templates/service.yaml | 0 .../templates/serviceaccount.yaml | 0 .../templates/servicemonitor.yaml | 0 .../{flink => flink-connector}/values.yaml | 38 +- .../spark-connector-cron/Chart.yaml | 2 +- .../src/command/alert_manager_command.py | 10 +- .../src/command/connector_command.py | 276 ++++++++++--- .../src/command/connector_registry.py | 149 +++++-- .../src/command/dataset_command.py | 17 +- command-service/src/command/db_command.py | 387 ++++++++++++------ command-service/src/command/druid_command.py | 3 +- command-service/src/config/service_config.yml | 2 +- command-service/src/service/db_service.py | 16 +- 151 files changed, 2368 insertions(+), 2817 deletions(-) create mode 100644 api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetConnectors.spec.ts rename command-service/helm-charts/{flink => flink-connector}/Chart.lock (100%) rename command-service/helm-charts/{flink => flink-connector}/Chart.yaml (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/.helmignore (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/Chart.yaml (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/_affinities.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/_capabilities.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/_configs.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/_errors.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/_images.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/_ingress.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/_labels.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/_names.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/_secrets.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/_storage.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/_tplvalues.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/_utils.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/_variables.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/_warnings.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/validations/_cassandra.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/validations/_mariadb.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/validations/_mongodb.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/validations/_mysql.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/validations/_postgresql.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/validations/_redis.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/templates/validations/_validations.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/charts/common/values.yaml (100%) rename command-service/helm-charts/{flink => flink-connector}/templates/NOTES.txt (100%) rename command-service/helm-charts/{flink => flink-connector}/templates/_base_serviceAccount.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/templates/_helpers.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/templates/_image_flink.tpl (57%) rename command-service/helm-charts/{flink => flink-connector}/templates/_namespace.tpl (100%) rename command-service/helm-charts/{flink => flink-connector}/templates/configmap.yaml (100%) rename command-service/helm-charts/{flink => flink-connector}/templates/deployment.yaml (76%) rename command-service/helm-charts/{flink => flink-connector}/templates/hpa.yaml (100%) rename command-service/helm-charts/{flink => flink-connector}/templates/ingress.yaml (100%) rename command-service/helm-charts/{flink => flink-connector}/templates/service.yaml (100%) rename command-service/helm-charts/{flink => flink-connector}/templates/serviceaccount.yaml (100%) rename command-service/helm-charts/{flink => flink-connector}/templates/servicemonitor.yaml (100%) rename command-service/helm-charts/{flink => flink-connector}/values.yaml (90%) diff --git a/.github/workflows/pull_request.yaml b/.github/workflows/pull_request.yaml index 12e967e2..8b4fc54d 100644 --- a/.github/workflows/pull_request.yaml +++ b/.github/workflows/pull_request.yaml @@ -24,5 +24,4 @@ jobs: cd api-service npm install npm run actions:test - npm run actions:test:v2 npm run lint \ No newline at end of file diff --git a/api-service/.eslintignore b/api-service/.eslintignore index f56aadad..e528fbc0 100644 --- a/api-service/.eslintignore +++ b/api-service/.eslintignore @@ -4,6 +4,6 @@ docs coverage @types .nyc_output -src/v2/tests +src/tests src/v1 dist \ No newline at end of file diff --git a/api-service/.eslintrc b/api-service/.eslintrc index 598451ab..ca799240 100644 --- a/api-service/.eslintrc +++ b/api-service/.eslintrc @@ -9,7 +9,8 @@ "plugin:@typescript-eslint/eslint-recommended", "plugin:@typescript-eslint/recommended" ], - "rules": { + "rules": { + "@typescript-eslint/no-unused-vars": ["error", { "argsIgnorePattern": "^_" }], "@typescript-eslint/no-explicit-any": ["off"], "@typescript-eslint/no-useless-escape": ["off"], "@typescript-eslint/quotes": [ diff --git a/api-service/package.json b/api-service/package.json index f760df95..1d49c029 100644 --- a/api-service/package.json +++ b/api-service/package.json @@ -5,9 +5,8 @@ "main": "dist/app.js", "scripts": { "start": "ts-node ./src/app.ts", - "test": "source .env.test && nyc mocha ./src/v1/test/*.spec.ts --exit && nyc mocha ./src/v2/tests/**/*.spec.ts --exit", - "actions:test": "nyc mocha ./src/v1/test/*.spec.ts --exit", - "actions:test:v2": "nyc mocha ./src/v2/tests/**/*.spec.ts --exit", + "test": "source .env.test && nyc mocha ./src/tests/**/*.spec.ts --exit", + "actions:test": "nyc mocha ./src/tests/**/*.spec.ts --exit", "build": "rm -rf dist && tsc --declaration -P . && cp package.json ./dist/package.json", "package": "npm run build && cd dist && npm pack . && cd ..", "lint": "eslint . --ext .ts", diff --git a/api-service/src/app.ts b/api-service/src/app.ts index ec2a10cd..962331e9 100644 --- a/api-service/src/app.ts +++ b/api-service/src/app.ts @@ -8,13 +8,16 @@ import { errorHandler, obsrvErrorHandler } from "./middlewares/errors"; import { ResponseHandler } from "./helpers/ResponseHandler"; import { config } from "./configs/Config"; import { alertsRouter } from "./routes/AlertsRouter"; +import { interceptAuditEvents } from "./services/telemetry"; const app: Application = express(); + app.use(bodyParser.json({ limit: config.body_parser_limit})); app.use(express.text()); app.use(express.json()); app.use(errorHandler) +app.use(interceptAuditEvents()); app.use("/v2/", v2Router); app.use("/", druidProxyRouter); app.use("/alerts/v1", alertsRouter); diff --git a/api-service/src/configs/Config.ts b/api-service/src/configs/Config.ts index 28f83b1e..38fe624c 100644 --- a/api-service/src/configs/Config.ts +++ b/api-service/src/configs/Config.ts @@ -109,7 +109,7 @@ export const config = { "encryption_algorithm": process.env.encryption_algorithm || "aes-256-ecb", }, "grafana_config": { - "dialect": process.env.dialet || 'postgres', + "dialect": process.env.dialet || "postgres", "url": process.env.grafana_url || "http://localhost:8000", "access_token": process.env.grafana_token || "" } diff --git a/api-service/src/connections/grafanaConnection.ts b/api-service/src/connections/grafanaConnection.ts index 76df0cea..b70e6009 100644 --- a/api-service/src/connections/grafanaConnection.ts +++ b/api-service/src/connections/grafanaConnection.ts @@ -5,6 +5,6 @@ const grafanaHttpClient = axios.create({ baseURL: config.grafana_config.url }); -grafanaHttpClient.defaults.headers.common['Authorization'] = config.grafana_config.access_token; +grafanaHttpClient.defaults.headers.common["Authorization"] = config.grafana_config.access_token; export { grafanaHttpClient }; \ No newline at end of file diff --git a/api-service/src/controllers/Alerts/Alerts.ts b/api-service/src/controllers/Alerts/Alerts.ts index 056a2475..278ac586 100644 --- a/api-service/src/controllers/Alerts/Alerts.ts +++ b/api-service/src/controllers/Alerts/Alerts.ts @@ -17,9 +17,9 @@ const createAlertHandler = async (req: Request, res: Response, next: NextFunctio updateTelemetryAuditEvent({ request: req, object: { id: response?.dataValues?.id, ...telemetryObject } }); ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: { id: response.dataValues.id } }); } catch (error: any) { - let errorMessage = _.get(error, 'message') - if (_.get(error, 'name') == "SequelizeUniqueConstraintError") { - errorMessage = _.get(error, 'parent.detail') + let errorMessage = _.get(error, "message") + if (_.get(error, "name") == "SequelizeUniqueConstraintError") { + errorMessage = _.get(error, "parent.detail") } next(errorResponse((httpStatus.INTERNAL_SERVER_ERROR, { message: errorMessage }))) } @@ -44,7 +44,7 @@ const publishAlertHandler = async (req: Request, res: Response, next: NextFuncti const transformAlerts = async (alertModel: any) => { const alert = alertModel?.toJSON(); - const status = _.get(alert, 'status'); + const status = _.get(alert, "status"); if (status !== "live") return alert; return getAlertsMetadata(alert); } @@ -108,13 +108,13 @@ const updateAlertHandler = async (req: Request, res: Response, next: NextFunctio await retireAlertSilence(alertId); } const updatedPayload = getAlertPayload({ ...req.body, manager: rulePayload?.manager }); - await Alert.update({ ...updatedPayload, status: 'draft' }, { where: { id: alertId } }); + await Alert.update({ ...updatedPayload, status: "draft" }, { where: { id: alertId } }); updateTelemetryAuditEvent({ request: req, currentRecord: rulePayload, object: { id: alertId, ...telemetryObject } }); ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: { id: alertId } }); } catch (error: any) { - let errorMessage = _.get(error, 'message') - if (_.get(error, 'name') == "SequelizeUniqueConstraintError") { - errorMessage = _.get(error, 'parent.detail') + let errorMessage = _.get(error, "message") + if (_.get(error, "name") == "SequelizeUniqueConstraintError") { + errorMessage = _.get(error, "parent.detail") } next(errorResponse((httpStatus.INTERNAL_SERVER_ERROR, { message: errorMessage }))) } diff --git a/api-service/src/controllers/Alerts/Metric.ts b/api-service/src/controllers/Alerts/Metric.ts index 212d5b13..6f80f16f 100644 --- a/api-service/src/controllers/Alerts/Metric.ts +++ b/api-service/src/controllers/Alerts/Metric.ts @@ -10,15 +10,15 @@ const telemetryObject = { type: "metric", ver: "1.0.0" }; const createMetricHandler = async (req: Request, res: Response, next: NextFunction) => { try { - const { component } = req?.body; + const { component } = req.body; const transformComponent = _.toLower(component); const metricsBody = await Metrics.create({ ...(req.body), component: transformComponent }); updateTelemetryAuditEvent({ request: req, object: { id: metricsBody?.dataValues?.id, ...telemetryObject } }); ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: { id: metricsBody.dataValues.id } }); } catch (error: any) { - let errorMessage = _.get(error, 'message') - if (_.get(error, 'name') == "SequelizeUniqueConstraintError") { - errorMessage = _.get(error, 'parent.detail') + let errorMessage = _.get(error, "message") + if (_.get(error, "name") == "SequelizeUniqueConstraintError") { + errorMessage = _.get(error, "parent.detail") } next(errorResponse((httpStatus.INTERNAL_SERVER_ERROR, { message: errorMessage }))) } @@ -30,7 +30,7 @@ const listMetricsHandler = async (req: Request, res: Response, next: NextFunctio const metricsPayload = await Metrics.findAll({ limit: limit, offset: offset, ...(filters && { where: filters }) }); ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: { metrics: metricsPayload, count: metricsPayload.length } }); } catch (error) { - const errorMessage = _.get(error, 'message') + const errorMessage = _.get(error, "message") next(errorResponse((httpStatus.INTERNAL_SERVER_ERROR, { message: errorMessage }))) } } @@ -50,9 +50,9 @@ const updateMetricHandler = async (req: Request, res: Response, next: NextFuncti }); ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: { id } }); } catch (error) { - let errorMessage = _.get(error, 'message') - if (_.get(error, 'name') == "SequelizeUniqueConstraintError") { - errorMessage = _.get(error, 'parent.detail') + let errorMessage = _.get(error, "message") + if (_.get(error, "name") == "SequelizeUniqueConstraintError") { + errorMessage = _.get(error, "parent.detail") } next(errorResponse((httpStatus.INTERNAL_SERVER_ERROR, { message: errorMessage }))) } @@ -66,7 +66,7 @@ const deleteMetricHandler = async (req: Request, res: Response, next: NextFuncti await record.destroy(); ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: { id } }); } catch (error) { - const errorMessage = _.get(error, 'message') + const errorMessage = _.get(error, "message") next(errorResponse((httpStatus.INTERNAL_SERVER_ERROR, { message: errorMessage }))) } } @@ -78,7 +78,7 @@ const deleteMultipleMetricHandler = async (req: Request, res: Response, next: Ne await Metrics.destroy({ where: filters }); ResponseHandler.successResponse(req, res, { status: httpStatus.OK, data: {} }); } catch (error) { - const errorMessage = _.get(error, 'message') + const errorMessage = _.get(error, "message") next(errorResponse((httpStatus.INTERNAL_SERVER_ERROR, { message: errorMessage }))) } } diff --git a/api-service/src/controllers/Alerts/Silence.ts b/api-service/src/controllers/Alerts/Silence.ts index c578dcbc..32e2c531 100644 --- a/api-service/src/controllers/Alerts/Silence.ts +++ b/api-service/src/controllers/Alerts/Silence.ts @@ -18,8 +18,8 @@ const createHandler = async (request: Request, response: Response, next: NextFun const grafanaResponse = await createSilence(payload); if (!grafanaResponse) return next({ message: httpStatus[httpStatus.INTERNAL_SERVER_ERROR], statusCode: httpStatus.INTERNAL_SERVER_ERROR }) - let start_date = new Date(startDate); - let end_date = new Date(endDate); + const start_date = new Date(startDate); + const end_date = new Date(endDate); const silenceBody = { id: grafanaResponse.silenceId, manager: grafanaResponse.manager, @@ -31,7 +31,7 @@ const createHandler = async (request: Request, response: Response, next: NextFun updateTelemetryAuditEvent({ request, object: { id: sileneResponse?.dataValues?.id, ...telemetryObject } }); ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: { id: sileneResponse.dataValues.id } }) } catch (err) { - const error = errorResponse(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, 'message') || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) + const error = errorResponse(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, "message") || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) next(error); } } @@ -45,11 +45,11 @@ const transformSilences = async (silenceModel: any) => { const listHandler = async (request: Request, response: Response, next: NextFunction) => { try { const silences = await Silence.findAll(); - const count = _.get(silences, 'length'); + const count = _.get(silences, "length"); const transformedSilences = await Promise.all(silences.map(transformSilences)); ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: { transformedSilences, ...(count && { count }) } }); } catch (err) { - const error = errorResponse(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, 'message') || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) + const error = errorResponse(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, "message") || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) next(error); } } @@ -62,7 +62,7 @@ const fetchHandler = async (request: Request, response: Response, next: NextFunc if (!silenceModel) return next({ message: httpStatus[httpStatus.NOT_FOUND], statusCode: httpStatus.NOT_FOUND }); ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: transformedSilence }); } catch (err) { - const error = errorResponse(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, 'message') || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) + const error = errorResponse(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, "message") || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) next(error); } } @@ -86,7 +86,7 @@ const updateHandler = async (request: Request, response: Response, next: NextFun const silenceResponse = await Silence.update(updatedSilence, { where: { id } }) ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: { silenceResponse } }) } catch (err) { - const error = errorResponse(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, 'message') || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) + const error = errorResponse(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, "message") || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) next(error); } } @@ -97,13 +97,13 @@ const deleteHandler = async (request: Request, response: Response, next: NextFun const silenceModel = await Silence.findOne({ where: { id } }); if (!silenceModel) return next({ message: httpStatus[httpStatus.NOT_FOUND], statusCode: httpStatus.NOT_FOUND }); const silenceObject = silenceModel?.toJSON(); - if (silenceObject?.status === 'expired') return next({ message: "Silence is already expired", statusCode: httpStatus.BAD_REQUEST }); + if (silenceObject?.status === "expired") return next({ message: "Silence is already expired", statusCode: httpStatus.BAD_REQUEST }); await deleteSilence(silenceObject); await silenceModel.destroy(); updateTelemetryAuditEvent({ request, object: { id, ...telemetryObject }, currentRecord: silenceObject }); ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: { id } }) } catch (err) { - const error = errorResponse(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, 'message') || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) + const error = errorResponse(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, "message") || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) next(error); } } diff --git a/api-service/src/controllers/DataIngestion/DataIngestionController.ts b/api-service/src/controllers/DataIngestion/DataIngestionController.ts index 49e1ebf6..415849fe 100644 --- a/api-service/src/controllers/DataIngestion/DataIngestionController.ts +++ b/api-service/src/controllers/DataIngestion/DataIngestionController.ts @@ -29,7 +29,7 @@ const dataIn = async (req: Request, res: Response) => { try { const requestBody = req.body; const datasetId = req.params.datasetId.trim(); - + const isValidSchema = schemaValidation(requestBody, validationSchema) if (!isValidSchema?.isValid) { logger.error({ apiId, message: isValidSchema?.message, code: "DATA_INGESTION_INVALID_INPUT" }) @@ -68,18 +68,23 @@ const addMetadataToEvents = (datasetId: string, payload: any) => { const obsrvMeta = { syncts: now, flags: {}, timespans: {}, error: {}, source: source }; if (Array.isArray(validData)) { const payloadRef = validData.map((event: any) => { - event = _.set(event, "obsrv_meta", obsrvMeta); - event = _.set(event, "dataset", datasetId); - event = _.set(event, "msgid", mid); - return event + const payload = { + event, + "obsrv_meta": obsrvMeta, + "dataset": datasetId, + "msgid": mid + } + return payload; }) return payloadRef; } else { - _.set(validData, "msgid", mid); - _.set(validData, "obsrv_meta", obsrvMeta); - _.set(validData, "dataset", datasetId); - return validData + return ({ + "event": validData, + "obsrv_meta": obsrvMeta, + "dataset": datasetId, + "msgid": mid + }); } } diff --git a/api-service/src/controllers/DatasetCopy/DatasetCopy.ts b/api-service/src/controllers/DatasetCopy/DatasetCopy.ts index b33b614e..9308dd71 100644 --- a/api-service/src/controllers/DatasetCopy/DatasetCopy.ts +++ b/api-service/src/controllers/DatasetCopy/DatasetCopy.ts @@ -18,7 +18,7 @@ const validateRequest = (req: Request) => { } } -const fetchDataset = async (req: Request, newDatasetId: string) => { +const fetchDataset = async (req: Request) => { const datasetId = _.get(req, "body.request.source.datasetId"); const isLive = _.get(req, "body.request.source.isLive"); @@ -39,10 +39,10 @@ const datasetCopy = async (req: Request, res: Response) => { validateRequest(req); const newDatasetId = _.get(req, "body.request.destination.datasetId"); - const dataset = await fetchDataset(req, newDatasetId); + const dataset = await fetchDataset(req); updateRecords(dataset, newDatasetId) const response = await datasetService.createDraftDataset(dataset).catch(err => { - if (err?.name === 'SequelizeUniqueConstraintError') { + if (err?.name === "SequelizeUniqueConstraintError") { throw obsrvError(newDatasetId, "DATASET_ALREADY_EXISTS", `Dataset with id ${newDatasetId} already exists`, "BAD_REQUEST", 400); } throw obsrvError(newDatasetId, "DATASET_COPY_FAILURE", `Failed to clone dataset`, "INTERNAL_SERVER_ERROR", 500); diff --git a/api-service/src/controllers/DatasetCopy/DatasetCopyHelper.ts b/api-service/src/controllers/DatasetCopy/DatasetCopyHelper.ts index de4c8087..7b50626c 100644 --- a/api-service/src/controllers/DatasetCopy/DatasetCopyHelper.ts +++ b/api-service/src/controllers/DatasetCopy/DatasetCopyHelper.ts @@ -6,13 +6,13 @@ const version = defaultDatasetConfig.version; export const updateRecords = (datasetRecord: Record, newDatasetId: string): void => { const dataset_id = newDatasetId; - _.set(datasetRecord, 'api_version', "v2") - _.set(datasetRecord, 'status', DatasetStatus.Draft) + _.set(datasetRecord, "api_version", "v2") + _.set(datasetRecord, "status", DatasetStatus.Draft) _.set(datasetRecord, "dataset_id", dataset_id) _.set(datasetRecord, "id", dataset_id) _.set(datasetRecord, "name", dataset_id) _.set(datasetRecord, "version_key", Date.now().toString()) - _.set(datasetRecord, 'version', version); + _.set(datasetRecord, "version", version); _.set(datasetRecord, "entry_topic", config.telemetry_service_config.kafka.topics.createDataset) _.set(datasetRecord, "router_config", { topic: newDatasetId }) } diff --git a/api-service/src/controllers/DatasetImport/DatasetImport.ts b/api-service/src/controllers/DatasetImport/DatasetImport.ts index d916ed2b..e390d8bd 100644 --- a/api-service/src/controllers/DatasetImport/DatasetImport.ts +++ b/api-service/src/controllers/DatasetImport/DatasetImport.ts @@ -25,9 +25,9 @@ const datasetImport = async (req: Request, res: Response) => { const importDataset = async (dataset: Record, overwrite: string | any) => { const dataset_id = _.get(dataset,"dataset_id") const response = await datasetService.createDraftDataset(dataset).catch(err => { return err }) - if (response?.name === 'SequelizeUniqueConstraintError') { + if (response?.name === "SequelizeUniqueConstraintError") { if (overwrite === "true") { - const overwriteRes = await datasetService.updateDraftDataset(dataset).catch(err=>{ + const overwriteRes = await datasetService.updateDraftDataset(dataset).catch(()=>{ throw obsrvError(dataset_id, "DATASET_IMPORT_FAILURE", `Failed to import dataset: ${dataset_id} as overwrite failed`, "INTERNAL_SERVER_ERROR", 500); }) return _.omit(overwriteRes, ["message"]) @@ -44,7 +44,7 @@ const importDataset = async (dataset: Record, overwrite: string | a const getResponseData = (ignoredConfigs: Record) => { const { ignoredConnectors, ignoredTransformations, ignoredDenorms } = ignoredConfigs; let successMsg = "Dataset is imported successfully"; - let partialIgnored: Record = {}; + const partialIgnored: Record = {}; if (ignoredConnectors.length || ignoredTransformations.length || ignoredDenorms.length) { successMsg = "Dataset is partially imported"; diff --git a/api-service/src/controllers/DatasetImport/DatasetImportHelper.ts b/api-service/src/controllers/DatasetImport/DatasetImportHelper.ts index 849e4fdd..7d06a196 100644 --- a/api-service/src/controllers/DatasetImport/DatasetImportHelper.ts +++ b/api-service/src/controllers/DatasetImport/DatasetImportHelper.ts @@ -34,7 +34,7 @@ export const datasetImportValidation = async (payload: Record): Pro throw obsrvError("", "DATASET_IMPORT_INVALID_CONFIGS", isRequestValid.message, "BAD_REQUEST", 400) } - let datasetConfig = payload.request; + const datasetConfig = payload.request; const connectors = _.get(datasetConfig, "connectors_config", []); const transformations = _.get(datasetConfig, "transformations_config", []); @@ -100,7 +100,7 @@ export const migrateExportedDatasetV1 = (requestPayload: Record) => const { dataset_id, timestamp_key = "", data_key = "", type: datasetType } = _.get(datasetPayload, "data.metadata") const type = datasetType === "master-dataset" ? DatasetType.master : DatasetType.event - let dataset: Record = { + const dataset: Record = { dataset_id, id: dataset_id, name: dataset_id, type, version_key: Date.now().toString(), api_version: "v2", diff --git a/api-service/src/controllers/DatasetRead/DatasetRead.ts b/api-service/src/controllers/DatasetRead/DatasetRead.ts index 28909ff3..2822e87b 100644 --- a/api-service/src/controllers/DatasetRead/DatasetRead.ts +++ b/api-service/src/controllers/DatasetRead/DatasetRead.ts @@ -11,16 +11,13 @@ export const apiId = "api.datasets.read"; export const errorCode = "DATASET_READ_FAILURE" // TODO: Move this to a config -const defaultFields = ["dataset_id", "name", "type", "status", "tags", "version", "api_version", "dataset_config"] +export const defaultFields = ["dataset_id", "name", "type", "status", "tags", "version", "api_version", "dataset_config"] const validateRequest = (req: Request) => { const { dataset_id } = req.params; - const fields = req.query.fields; - if (fields && typeof fields !== 'string') { - throw obsrvError(dataset_id, "DATASET_INVALID_FIELDS_VAL", `The specified fields [${fields}] in the query param is not a string.`, "BAD_REQUEST", 400); - } - const fieldValues = fields ? _.split(fields, ",") : []; + const { fields } = req.query; + const fieldValues = fields ? _.split(fields as string, ",") : []; const invalidFields = _.difference(fieldValues, Object.keys(DatasetDraft.getAttributes())); if (!_.isEmpty(invalidFields)) { throw obsrvError(dataset_id, "DATASET_INVALID_FIELDS", `The specified fields [${invalidFields}] in the dataset cannot be found.`, "BAD_REQUEST", 400); @@ -48,21 +45,21 @@ const datasetRead = async (req: Request, res: Response) => { } const readDraftDataset = async (datasetId: string, attributes: string[]): Promise => { - + const attrs = _.union(attributes, ["dataset_config", "api_version", "type", "id"]) const draftDataset = await datasetService.getDraftDataset(datasetId, attrs); - if(draftDataset) { // Contains a draft + if (draftDataset) { // Contains a draft const apiVersion = _.get(draftDataset, ["api_version"]); const dataset: any = (apiVersion === "v2") ? draftDataset : await datasetService.migrateDraftDataset(datasetId, draftDataset) - return _.pick(dataset, attributes); + return _.pick(dataset, attributes); } const liveDataset = await datasetService.getDataset(datasetId, undefined, true); - if(liveDataset) { + if (liveDataset) { const dataset = await datasetService.createDraftDatasetFromLive(liveDataset) - return _.pick(dataset, attributes); + return _.pick(dataset, attributes); } - + return null; } diff --git a/api-service/src/controllers/DatasetStatusTransition/DatasetStatusTransition.ts b/api-service/src/controllers/DatasetStatusTransition/DatasetStatusTransition.ts index 375bb58b..8506afd3 100644 --- a/api-service/src/controllers/DatasetStatusTransition/DatasetStatusTransition.ts +++ b/api-service/src/controllers/DatasetStatusTransition/DatasetStatusTransition.ts @@ -92,10 +92,10 @@ const deleteDataset = async (dataset: Record) => { const readyForPublish = async (dataset: Record) => { - let draftDataset: any = await datasetService.getDraftDataset(dataset.dataset_id) + const draftDataset: any = await datasetService.getDraftDataset(dataset.dataset_id) let defaultConfigs: any = _.cloneDeep(defaultDatasetConfig) defaultConfigs = _.omit(defaultConfigs, ["router_config"]) - if (draftDataset?.type === 'master') { + if (draftDataset?.type === "master") { defaultConfigs = _.omit(defaultConfigs, "dataset_config.keys_config.data_key"); } _.mergeWith(draftDataset,defaultConfigs,draftDataset, (objValue, srcValue) => { @@ -112,7 +112,7 @@ const readyForPublish = async (dataset: Record) => { statusCode: 400 } } - _.set(draftDataset, 'status', DatasetStatus.ReadyToPublish) + _.set(draftDataset, "status", DatasetStatus.ReadyToPublish) await datasetService.updateDraftDataset(draftDataset) } @@ -139,7 +139,7 @@ const validateAndUpdateDenormConfig = async (draftDataset: Record) // 1. Check if there are denorm fields and dependent master datasets are published const denormConfig = _.get(draftDataset, "denorm_config") if(denormConfig && !_.isEmpty(denormConfig.denorm_fields)) { - const datasetIds = _.map(denormConfig.denorm_fields, 'dataset_id') + const datasetIds = _.map(denormConfig.denorm_fields, "dataset_id") if(_.includes(datasetIds, draftDataset.id)) { throw { code: "SELF_REFERENCING_MASTER_DATA", @@ -151,7 +151,7 @@ const validateAndUpdateDenormConfig = async (draftDataset: Record) const masterDatasets = await datasetService.findDatasets({id: datasetIds, type: "master"}, ["id", "status", "dataset_config", "api_version"]) const masterDatasetsStatus = _.map(denormConfig.denorm_fields, (denormField) => { const md = _.find(masterDatasets, (master) => { return denormField.dataset_id === master.id }) - let datasetStatus : Record = { + const datasetStatus : Record = { dataset_id: denormField.dataset_id, exists: (md) ? true : false, isLive: (md) ? md.status === "Live" : false, @@ -159,16 +159,16 @@ const validateAndUpdateDenormConfig = async (draftDataset: Record) } if(!_.isEmpty(md)){ if(md.api_version === "v2") - datasetStatus['denorm_field'] = _.merge(denormField, {redis_db: md.dataset_config.cache_config.redis_db}); + datasetStatus["denorm_field"] = _.merge(denormField, {redis_db: md.dataset_config.cache_config.redis_db}); else - datasetStatus['denorm_field'] = _.merge(denormField, {redis_db: md.dataset_config.redis_db}); + datasetStatus["denorm_field"] = _.merge(denormField, {redis_db: md.dataset_config.redis_db}); } return datasetStatus; }) const invalidMasters = _.filter(masterDatasetsStatus, {isLive: false}) if(_.size(invalidMasters) > 0) { - const invalidIds = _.map(invalidMasters, 'dataset_id') + const invalidIds = _.map(invalidMasters, "dataset_id") throw { code: "DEPENDENT_MASTER_DATA_NOT_LIVE", message: `The datasets with id:${invalidIds} are not in published status`, @@ -181,14 +181,14 @@ const validateAndUpdateDenormConfig = async (draftDataset: Record) draftDataset["denorm_config"] = { redis_db_host: defaultDatasetConfig.denorm_config.redis_db_host, redis_db_port: defaultDatasetConfig.denorm_config.redis_db_port, - denorm_fields: _.map(masterDatasetsStatus, 'denorm_field') + denorm_fields: _.map(masterDatasetsStatus, "denorm_field") } } } const updateMasterDataConfig = async (draftDataset: Record) => { - if (draftDataset.type === 'master') { - let dataset_config = _.get(draftDataset, "dataset_config") + if (draftDataset.type === "master") { + const dataset_config = _.get(draftDataset, "dataset_config") const datasetCacheConfig = _.get(defaultDatasetConfig, "dataset_config.cache_config") draftDataset.dataset_config = { ...dataset_config, cache_config: datasetCacheConfig } if (draftDataset.dataset_config.cache_config.redis_db === 0) { @@ -202,7 +202,7 @@ const updateMasterDataConfig = async (draftDataset: Record) => { } } const nextRedisDB = parseInt(_.get(results, "[0].nextval")) || 3; - _.set(draftDataset, 'dataset_config.cache_config.redis_db', nextRedisDB) + _.set(draftDataset, "dataset_config.cache_config.redis_db", nextRedisDB) } } } @@ -223,13 +223,13 @@ const canRetireIfMasterDataset = async (dataset: Record) => { const draftDatasets = await datasetService.findDraftDatasets({ status: [DatasetStatus.ReadyToPublish, DatasetStatus.Draft] }, ["denorm_config", "id", "status"]) || [] const allDatasets = _.union(liveDatasets, draftDatasets) const extractDenormFields = _.map(allDatasets, function(depDataset) { - return {dataset_id: _.get(depDataset, 'id'), status: _.get(depDataset, 'status'), denorm_datasets: _.map(_.get(depDataset, 'denorm_config.denorm_fields'), 'dataset_id')} + return {dataset_id: _.get(depDataset, "id"), status: _.get(depDataset, "status"), denorm_datasets: _.map(_.get(depDataset, "denorm_config.denorm_fields"), "dataset_id")} }) const deps = _.filter(extractDenormFields, function(depDS) { return _.includes(depDS.denorm_datasets, dataset.id)}) if (_.size(deps) > 0) { const denormErrMsg = `Failed to retire dataset as it is in use. Please retire or delete dependent datasets before retiring this dataset` - throw obsrvError(dataset.id, "DATASET_IN_USE", denormErrMsg, "BAD_REQUEST", 400, undefined, _.map(deps, function(o) { return _.omit(o, 'denorm_datasets')})) + throw obsrvError(dataset.id, "DATASET_IN_USE", denormErrMsg, "BAD_REQUEST", 400, undefined, _.map(deps, function(o) { return _.omit(o, "denorm_datasets")})) } } } diff --git a/api-service/src/controllers/DatasetUpdate/DatasetUpdate.ts b/api-service/src/controllers/DatasetUpdate/DatasetUpdate.ts index 25c9a633..274ad3c4 100644 --- a/api-service/src/controllers/DatasetUpdate/DatasetUpdate.ts +++ b/api-service/src/controllers/DatasetUpdate/DatasetUpdate.ts @@ -9,6 +9,7 @@ import { datasetService } from "../../services/DatasetService"; import { schemaValidation } from "../../services/ValidationService"; import DatasetUpdate from "./DatasetUpdateValidationSchema.json"; import { obsrvError } from "../../types/ObsrvError"; +import logger from "../../logger"; export const apiId = "api.datasets.update"; export const invalidInputErrCode = "DATASET_UPDATE_INPUT_INVALID" @@ -25,6 +26,7 @@ const validateRequest = async (req: Request) => { const datasetBody = req.body.request const { dataset_id, version_key, ...rest } = datasetBody if (_.isEmpty(rest)) { + logger.error({ apiId, message: `Provide atleast one field in addition to the dataset_id:${dataset_id} and version_key:${version_key} to update the dataset` }) throw obsrvError(datasetId, "DATASET_UPDATE_NO_FIELDS", "Provide atleast one field in addition to the dataset_id to update the dataset", "BAD_REQUEST", 400) } @@ -63,7 +65,7 @@ const datasetUpdate = async (req: Request, res: Response) => { const mergeDraftDataset = (datasetModel: Model | null, datasetReq: any): Record => { - let dataset: Record = { + const dataset: Record = { version_key: Date.now().toString(), name: datasetReq.name || _.get(datasetModel, ["name"]), id: _.get(datasetModel, ["id"]) @@ -79,6 +81,7 @@ const mergeDraftDataset = (datasetModel: Model | null, datasetReq: any if(datasetReq.connectors_config) dataset["connectors_config"] = mergeConnectorsConfig(_.get(datasetModel, ["connectors_config"]), datasetReq.connectors_config) if(datasetReq.tags) dataset["tags"] = mergeTags(_.get(datasetModel, ["tags"]), datasetReq.tags) if(datasetReq.sample_data) dataset["sample_data"] = datasetReq.sample_data + if(datasetReq.type) dataset["type"] = datasetReq.type return dataset; } diff --git a/api-service/src/controllers/DatasetUpdate/DatasetUpdateValidationSchema.json b/api-service/src/controllers/DatasetUpdate/DatasetUpdateValidationSchema.json index 4dbc3e89..b31fe674 100644 --- a/api-service/src/controllers/DatasetUpdate/DatasetUpdateValidationSchema.json +++ b/api-service/src/controllers/DatasetUpdate/DatasetUpdateValidationSchema.json @@ -32,6 +32,10 @@ "version_key": { "type": "string" }, + "type": { + "type": "string", + "enum": ["event", "transaction", "master"] + }, "name": { "type": "string", "minLength": 1 @@ -199,7 +203,7 @@ } }, "additionalProperties": false - }, + }, "keys_config": { "type": "object", "properties": { diff --git a/api-service/src/controllers/GenerateDataSchema/GenerateDataSchema.ts b/api-service/src/controllers/GenerateDataSchema/GenerateDataSchema.ts index 309dfe39..a37e5d13 100644 --- a/api-service/src/controllers/GenerateDataSchema/GenerateDataSchema.ts +++ b/api-service/src/controllers/GenerateDataSchema/GenerateDataSchema.ts @@ -41,7 +41,7 @@ const schemaGenerate = (sample: Map[], config: Record) const schemaInference = new SchemaInference(); const schemaArrayValidator = new SchemaArrayValidator(); if (isJsonSchema) { - let result = process(sample, dataset) + const result = process(sample, dataset) result.schema = removeNonIndexColumns(result.schema) result.schema = removeFormats(result.schema) return result @@ -50,7 +50,7 @@ const schemaGenerate = (sample: Map[], config: Record) schema = schemaArrayValidator.validate(schema) const schemaCardinalityAnalyser = new SchemaCardinalityAnalyser(sample, schema) rollupInfo = schemaCardinalityAnalyser.analyse() - let result = process(schema, dataset) + const result = process(schema, dataset) result.schema = removeNonIndexColumns(result.schema) result.schema = removeFormats(result.schema) return result @@ -80,34 +80,34 @@ const checkJsonSchema = (sample: Map): boolean => { const removeNonIndexColumns = (schema: any) => { if (schema.properties) { - Object.entries(schema.properties).map(([key, property]: any) => { - _.unset(schema, 'required'); + Object.entries(schema.properties).map(([, property]: any) => { + _.unset(schema, "required"); removeNonIndexColumns(property) }); } else if (schema.items) { removeNonIndexColumns(schema.items) } if (Array.isArray(schema.required) && schema.required.length === 0) { - _.unset(schema, 'required'); + _.unset(schema, "required"); } return schema } const removeFormats = (schema: any) => { if (schema.properties) { - Object.entries(schema.properties).map(([key, property]: any) => { + Object.entries(schema.properties).map(([, property]: any) => { // Removing format to avoid schema validation issues - const isDateTypeField = ['date-time', 'date', 'epoch'].includes((property as any).format); - if (isDateTypeField && _.get(property, 'data_type') === 'string') { - _.set(property, 'data_type', _.get(property, 'format')); - } else if (isDateTypeField && _.get(property, 'data_type') === 'integer') { - _.set(property, 'data_type', 'epoch'); + const isDateTypeField = ["date-time", "date", "epoch"].includes((property as any).format); + if (isDateTypeField && _.get(property, "data_type") === "string") { + _.set(property, "data_type", _.get(property, "format")); + } else if (isDateTypeField && _.get(property, "data_type") === "integer") { + _.set(property, "data_type", "epoch"); } - _.unset(property, 'format'); + _.unset(property, "format"); removeFormats(property) }); } else if (schema.items) { - _.unset(schema.items, 'format'); + _.unset(schema.items, "format"); removeFormats(schema.items) } return schema diff --git a/api-service/src/controllers/NotificationChannel/Notification.ts b/api-service/src/controllers/NotificationChannel/Notification.ts index 93c5a875..f6db4fd4 100644 --- a/api-service/src/controllers/NotificationChannel/Notification.ts +++ b/api-service/src/controllers/NotificationChannel/Notification.ts @@ -4,7 +4,7 @@ import httpStatus from "http-status"; import createError from "http-errors"; import { ResponseHandler } from "../../helpers/ResponseHandler"; import { publishNotificationChannel, testNotificationChannel, updateNotificationChannel } from "../../services/managers"; -import _ from 'lodash'; +import _ from "lodash"; import { updateTelemetryAuditEvent } from "../../services/telemetry"; const telemetryObject = { type: "notificationChannel", ver: "1.0.0" }; @@ -16,7 +16,7 @@ const createHandler = async (request: Request, response: Response, next: NextFun updateTelemetryAuditEvent({ request, object: { id: notificationBody?.dataValues?.id, ...telemetryObject } }); ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: { id: notificationBody.dataValues.id } }) } catch (err) { - const error = createError(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, 'message') || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) + const error = createError(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, "message") || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) next(error); } } @@ -29,13 +29,13 @@ const updateHandler = async (request: Request, response: Response, next: NextFun const notificationPayload = notificationPayloadModel?.toJSON(); if (!notificationPayload) return next({ message: httpStatus[httpStatus.NOT_FOUND], statusCode: httpStatus.NOT_FOUND }); updateTelemetryAuditEvent({ request, object: { id, ...telemetryObject }, currentRecord: notificationPayload }); - if (_.get(notificationPayload, 'status') === "live") { + if (_.get(notificationPayload, "status") === "live") { await updateNotificationChannel(notificationPayload); } await Notification.update({ ...updatedPayload, status: "draft" }, { where: { id } }); ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: { id } }); } catch (err) { - const error = createError(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, 'message') || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) + const error = createError(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, "message") || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) next(error); } } @@ -44,10 +44,10 @@ const listHandler = async (request: Request, response: Response, next: NextFunct try { const { limit, filters, offset } = request.body?.request || {}; const notifications = await Notification.findAll({ limit: limit, offset: offset, ...(filters && { where: filters }) }); - const count = _.get(notifications, 'length'); + const count = _.get(notifications, "length"); ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: { notifications, ...(count && { count }) } }); } catch (err) { - const error = createError(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, 'message') || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) + const error = createError(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, "message") || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) next(error); } } @@ -61,7 +61,7 @@ const fetchHandler = async (request: Request, response: Response, next: NextFunc updateTelemetryAuditEvent({ request, object: { id, ...telemetryObject }, currentRecord: notificationPayload }); ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: notificationPayloadModel?.toJSON() }); } catch (err) { - const error = createError(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, 'message') || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) + const error = createError(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, "message") || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) next(error); } } @@ -77,7 +77,7 @@ const retireHandler = async (request: Request, response: Response, next: NextFun await Notification.update({ status: "retired" }, { where: { id } }) ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: { id } }); } catch (err) { - const error = createError(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, 'message') || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) + const error = createError(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, "message") || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) next(error); } } @@ -94,7 +94,7 @@ const publishHandler = async (request: Request, response: Response, next: NextFu Notification.update({ status: "live" }, { where: { id } }); ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: { id, status: "published" } }); } catch (err) { - const error = createError(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, 'message') || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) + const error = createError(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, "message") || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) next(error); } } @@ -114,7 +114,7 @@ const testNotifationChannelHandler = async (request: Request, response: Response } ResponseHandler.successResponse(request, response, { status: httpStatus.OK, data: { id, status: "Notification Sent" } }); } catch (err) { - const error = createError(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, 'message') || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) + const error = createError(httpStatus.INTERNAL_SERVER_ERROR, _.get(err, "message") || httpStatus[httpStatus.INTERNAL_SERVER_ERROR]) next(error); } } diff --git a/api-service/src/exceptions/SchemaGenerationException.ts b/api-service/src/exceptions/SchemaGenerationException.ts index 44805da4..fc545e81 100644 --- a/api-service/src/exceptions/SchemaGenerationException.ts +++ b/api-service/src/exceptions/SchemaGenerationException.ts @@ -2,7 +2,7 @@ export class SchemaGenerationException extends Error { statusCode: number; constructor(message: string, code: number) { super(message); - this.name = 'SchemaGenerationException'; + this.name = "SchemaGenerationException"; this.statusCode = code; } } \ No newline at end of file diff --git a/api-service/src/helpers/ResponseHandler.ts b/api-service/src/helpers/ResponseHandler.ts index 3bac60b7..99aee816 100644 --- a/api-service/src/helpers/ResponseHandler.ts +++ b/api-service/src/helpers/ResponseHandler.ts @@ -1,7 +1,7 @@ import { NextFunction, Request, Response } from "express"; import httpStatus from "http-status"; import { IResponse, Result } from "../types/DatasetModels"; -import { onFailure, onSuccess } from "../metrics/prometheus/helpers"; +import { onFailure, onObsrvFailure, onSuccess } from "../metrics/prometheus/helpers"; import moment from "moment"; import _ from "lodash"; import { ObsrvError } from "../types/ObsrvError"; @@ -42,7 +42,7 @@ const ResponseHandler = { const resmsgid = _.get(res, "resmsgid") const response = ResponseHandler.refactorResponse({ id, msgid, params: { status: "FAILED" }, responseCode: errCode || httpStatus["500_NAME"], resmsgid, result: data }) res.status(statusCode || httpStatus.INTERNAL_SERVER_ERROR).json({ ...response, error: { code, message } }); - entity && onFailure(req, res) + entity && onObsrvFailure(req,res,error) }, setApiId: (id: string) => (req: Request, res: Response, next: NextFunction) => { @@ -57,7 +57,7 @@ const ResponseHandler = { }, goneResponse: (req: Request, res: Response) => { - const { id, entity } = req as any; + const { id } = req as any; res.status(httpStatus.GONE).json({ id: id, ver: "v1", ts: Date.now(), params: { status: "FAILED", errmsg: "v1 APIs have been replace by /v2 APIs. Please refer to this link for more information" }, responseCode: httpStatus["410_NAME"] }) } } diff --git a/api-service/src/metrics/prometheus/helpers.ts b/api-service/src/metrics/prometheus/helpers.ts index 5e0442c1..05051461 100644 --- a/api-service/src/metrics/prometheus/helpers.ts +++ b/api-service/src/metrics/prometheus/helpers.ts @@ -2,6 +2,7 @@ import { NextFunction, Response } from "express"; import { incrementApiCalls, incrementFailedApiCalls, incrementSuccessfulApiCalls, setQueryResponseTime } from "."; import _ from "lodash"; import { Entity, Metric } from "../../types/MetricModel"; +import { ObsrvError } from "../../types/ObsrvError"; export const onRequest = ({ entity = Entity.Management }: any) => (req: any, res: Response, next: NextFunction) => { const startTime = Date.now(); @@ -19,7 +20,7 @@ export const onSuccess = (req: any, res: Response) => { const { duration = 0, metricLabels }: Metric = getMetricLabels(req, res) const { statusCode = 200 } = res const labels = { ...metricLabels, status: statusCode } - duration && setQueryResponseTime({ duration, labels }); + duration && setQueryResponseTime({ duration, labels }) incrementApiCalls({ labels }) incrementSuccessfulApiCalls({ labels }) } @@ -28,7 +29,7 @@ export const onFailure = (req: any, res: Response) => { const { duration = 0, metricLabels }: Metric = getMetricLabels(req, res) const { statusCode = 500 } = res const labels = { ...metricLabels, status: statusCode } - duration && setQueryResponseTime({ duration, labels }); + duration && setQueryResponseTime({ duration, labels }) incrementApiCalls({ labels }) incrementFailedApiCalls({ labels }); } @@ -37,18 +38,28 @@ export const onGone = (req: any, res: Response) => { const { duration = 0, metricLabels }: Metric = getMetricLabels(req, res) const { statusCode = 410 } = res const labels = { ...metricLabels, status: statusCode } - duration && setQueryResponseTime({ duration, labels }); + duration && setQueryResponseTime({ duration, labels }) + incrementApiCalls({ labels }) + incrementFailedApiCalls({ labels }); +} + +export const onObsrvFailure = (req: any, res: Response,error: ObsrvError) => { + const { duration = 0, metricLabels }: Metric = getMetricLabels(req, res) + metricLabels.dataset_id = error.datasetId + const { statusCode = 404 } = res + const labels = { ...metricLabels, status: statusCode } + duration && setQueryResponseTime({ duration, labels }) incrementApiCalls({ labels }) incrementFailedApiCalls({ labels }); } const getMetricLabels = (req: any, res: Response) => { - const { id, entity, url, startTime } = req; + const { id, entity, originalUrl, startTime } = req; const { statusCode = 200 } = res const request_size = req.socket.bytesRead const response_size = res.getHeader("content-length"); - const dataset_id = _.get(req, "dataset_id") || null + const dataset_id = _.get(req, ["body", "request", "dataset_id"]) || _.get(req, ["params", "dataset_id"]) || null const duration = getDuration(startTime); - const metricLabels = { entity, id, endpoint: url, dataset_id, status: statusCode, request_size, response_size } + const metricLabels = { entity, id, endpoint: originalUrl, dataset_id, status: statusCode, request_size, response_size } return { duration, metricLabels } } diff --git a/api-service/src/metrics/prometheus/index.ts b/api-service/src/metrics/prometheus/index.ts index da143f9f..173527ad 100644 --- a/api-service/src/metrics/prometheus/index.ts +++ b/api-service/src/metrics/prometheus/index.ts @@ -31,5 +31,5 @@ const metricsScrapeHandler = async (req: any, res: any, next: NextFunction) => { } } -export { metricsScrapeHandler, incrementApiCalls, incrementFailedApiCalls, setQueryResponseTime, incrementSuccessfulApiCalls }; +export { metricsScrapeHandler, incrementApiCalls, incrementFailedApiCalls, setQueryResponseTime, incrementSuccessfulApiCalls}; diff --git a/api-service/src/middlewares/errors.ts b/api-service/src/middlewares/errors.ts index 568aaa67..ce3200ab 100644 --- a/api-service/src/middlewares/errors.ts +++ b/api-service/src/middlewares/errors.ts @@ -4,15 +4,15 @@ import { ResponseHandler } from "../helpers/ResponseHandler"; import _ from "lodash"; import { ObsrvError } from "../types/ObsrvError"; -export const errorHandler = (err: Error, req: Request, res: Response, next: NextFunction) => { +export const errorHandler = (err: Error, req: Request, res: Response, _next: NextFunction) => { logger.error({ path: req.url, req: req.body , ...err }) - let errorMessage = {name: err.name, message: err.message}; + const errorMessage = {name: err.name, message: err.message}; ResponseHandler.errorResponse(errorMessage, req, res); }; -export const obsrvErrorHandler = (obsrvErr: ObsrvError, req: Request, res: Response, next: NextFunction) => { +export const obsrvErrorHandler = (obsrvErr: ObsrvError, req: Request, res: Response, _next: NextFunction) => { logger.error({ path: req.url, req: req.body, resmsgid: _.get(res, "resmsgid") , ...obsrvErr }) ResponseHandler.obsrvErrorResponse(obsrvErr, req, res); diff --git a/api-service/src/models/Alert.ts b/api-service/src/models/Alert.ts index 7f98cde5..e52952b8 100644 --- a/api-service/src/models/Alert.ts +++ b/api-service/src/models/Alert.ts @@ -1,6 +1,6 @@ import { DataTypes } from "sequelize"; import { sequelize } from "../connections/databaseConnection"; -import { v4 as uuidv4 } from 'uuid'; +import { v4 as uuidv4 } from "uuid"; export const Alert = sequelize.define("alerts", { id: { diff --git a/api-service/src/models/Metric.ts b/api-service/src/models/Metric.ts index 605ef291..ecc4cb27 100644 --- a/api-service/src/models/Metric.ts +++ b/api-service/src/models/Metric.ts @@ -1,6 +1,6 @@ import { DataTypes } from "sequelize"; import { sequelize } from "../connections/databaseConnection"; -import { v4 as uuidv4 } from 'uuid'; +import { v4 as uuidv4 } from "uuid"; export const Metrics = sequelize.define("metrics", { id: { diff --git a/api-service/src/models/Notification.ts b/api-service/src/models/Notification.ts index dc2bee5a..6203032c 100644 --- a/api-service/src/models/Notification.ts +++ b/api-service/src/models/Notification.ts @@ -1,6 +1,6 @@ import { DataTypes } from "sequelize"; import { sequelize } from "../connections/databaseConnection"; -import { v4 } from 'uuid'; +import { v4 } from "uuid"; export const Notification = sequelize.define("notificationchannel", { id: { diff --git a/api-service/src/models/Silence.ts b/api-service/src/models/Silence.ts index 3c272db1..d78d3f89 100644 --- a/api-service/src/models/Silence.ts +++ b/api-service/src/models/Silence.ts @@ -1,6 +1,6 @@ import { DataTypes } from "sequelize"; import { sequelize } from "../connections/databaseConnection"; -import { v4 as uuid } from 'uuid'; +import { v4 as uuid } from "uuid"; export const Silence = sequelize.define("silences", { id: { diff --git a/api-service/src/routes/AlertsRouter.ts b/api-service/src/routes/AlertsRouter.ts index 9efab170..01c843dd 100644 --- a/api-service/src/routes/AlertsRouter.ts +++ b/api-service/src/routes/AlertsRouter.ts @@ -1,5 +1,5 @@ import express from "express"; -import notificationHandler from '../controllers/NotificationChannel/Notification'; +import notificationHandler from "../controllers/NotificationChannel/Notification"; import { setDataToRequestObject } from "../middlewares/setDataToRequestObject"; import customAlertHandler from "../controllers/Alerts/Alerts"; import metricAliasHandler from "../controllers/Alerts/Metric"; diff --git a/api-service/src/routes/Router.ts b/api-service/src/routes/Router.ts index 3f2d623a..1faaafb0 100644 --- a/api-service/src/routes/Router.ts +++ b/api-service/src/routes/Router.ts @@ -27,16 +27,18 @@ import DatasetCopy from "../controllers/DatasetCopy/DatasetCopy"; import ConnectorsList from "../controllers/ConnectorsList/ConnectorsList"; import ConnectorsRead from "../controllers/ConnectorsRead/ConnectorsRead"; import DatasetImport from "../controllers/DatasetImport/DatasetImport"; +import {OperationType, telemetryAuditStart} from "../services/telemetry"; +import telemetryActions from "../telemetry/telemetryActions"; export const router = express.Router(); -router.post("/data/in/:datasetId", setDataToRequestObject("api.data.in"), onRequest({ entity: Entity.Data_in }), dataIn); +router.post("/data/in/:datasetId", setDataToRequestObject("api.data.in"), onRequest({ entity: Entity.Data_in }), telemetryAuditStart({action: telemetryActions.createDataset, operationType: OperationType.CREATE}), dataIn); router.post("/data/query/:datasetId", setDataToRequestObject("api.data.out"), onRequest({ entity: Entity.Data_out }), dataOut); -router.post("/datasets/create", setDataToRequestObject("api.datasets.create"), onRequest({ entity: Entity.Management }), DatasetCreate) -router.patch("/datasets/update", setDataToRequestObject("api.datasets.update"), onRequest({ entity: Entity.Management }), DatasetUpdate) -router.get("/datasets/read/:dataset_id", setDataToRequestObject("api.datasets.read"), onRequest({ entity: Entity.Management }), DatasetRead) -router.post("/datasets/list", setDataToRequestObject("api.datasets.list"), onRequest({ entity: Entity.Management }), DatasetList) -router.get("/data/exhaust/:datasetId", setDataToRequestObject("api.data.exhaust"), onRequest({ entity: Entity.Management }), dataExhaust); +router.post("/datasets/create", setDataToRequestObject("api.datasets.create"), onRequest({ entity: Entity.Management }),telemetryAuditStart({action: telemetryActions.createDataset, operationType: OperationType.CREATE}), DatasetCreate) +router.patch("/datasets/update", setDataToRequestObject("api.datasets.update"), onRequest({ entity: Entity.Management }),telemetryAuditStart({action: telemetryActions.updateDataset, operationType: OperationType.UPDATE}), DatasetUpdate) +router.get("/datasets/read/:dataset_id", setDataToRequestObject("api.datasets.read"), onRequest({ entity: Entity.Management }), telemetryAuditStart({action: telemetryActions.readDataset, operationType: OperationType.GET}), DatasetRead) +router.post("/datasets/list", setDataToRequestObject("api.datasets.list"), onRequest({ entity: Entity.Management }), telemetryAuditStart({action: telemetryActions.listDatasets, operationType: OperationType.LIST}), DatasetList) +router.get("/data/exhaust/:datasetId", setDataToRequestObject("api.data.exhaust"), onRequest({ entity: Entity.Management }), telemetryAuditStart({action: telemetryActions.datasetExhaust, operationType: OperationType.GET}), dataExhaust); router.post("/template/create", setDataToRequestObject("api.query.template.create"), createQueryTemplate); router.get("/template/read/:templateId", setDataToRequestObject("api.query.template.read"), readQueryTemplate); router.delete("/template/delete/:templateId", setDataToRequestObject("api.query.template.delete"), deleteQueryTemplate); @@ -45,14 +47,14 @@ router.patch("/template/update/:templateId", setDataToRequestObject("api.query.t router.post("/schema/validate", setDataToRequestObject("api.schema.validator"), eventValidation); router.post("/template/query/:templateId", setDataToRequestObject("api.query.template.query"), queryTemplate); router.post("/files/generate-url", setDataToRequestObject("api.files.generate-url"), onRequest({ entity: Entity.Management }), GenerateSignedURL); -router.post("/datasets/status-transition", setDataToRequestObject("api.datasets.status-transition"), onRequest({ entity: Entity.Management }), DatasetStatusTansition); +router.post("/datasets/status-transition", setDataToRequestObject("api.datasets.status-transition"), onRequest({ entity: Entity.Management }), telemetryAuditStart({action: telemetryActions.createTransformation, operationType: OperationType.CREATE}), DatasetStatusTansition); router.post("/datasets/health", setDataToRequestObject("api.dataset.health"), onRequest({ entity: Entity.Management }), datasetHealth); router.post("/datasets/reset/:datasetId", setDataToRequestObject("api.dataset.reset"), onRequest({ entity: Entity.Management }), datasetReset); router.post("/datasets/dataschema", setDataToRequestObject("api.datasets.dataschema"), onRequest({ entity: Entity.Management }), DataSchemaGenerator); router.get("/datasets/export/:dataset_id", setDataToRequestObject("api.datasets.export"), onRequest({ entity: Entity.Management }), DatasetExport); -router.post("/datasets/copy", setDataToRequestObject("api.datasets.copy"), onRequest({ entity: Entity.Management }), DatasetCopy); -router.post("/connectors/list", setDataToRequestObject("api.connectors.list"), onRequest({ entity: Entity.Management }), ConnectorsList); -router.get("/connectors/read/:id", setDataToRequestObject("api.connectors.read"), onRequest({entity: Entity.Management }), ConnectorsRead); +router.post("/datasets/copy", setDataToRequestObject("api.datasets.copy"), onRequest({ entity: Entity.Management }), telemetryAuditStart({action: telemetryActions.copyDataset, operationType: OperationType.CREATE}), DatasetCopy); +router.post("/connectors/list", setDataToRequestObject("api.connectors.list"), onRequest({ entity: Entity.Management }), telemetryAuditStart({action: telemetryActions.listConnectors, operationType: OperationType.GET}), ConnectorsList); +router.get("/connectors/read/:id", setDataToRequestObject("api.connectors.read"), onRequest({entity: Entity.Management }), telemetryAuditStart({action: telemetryActions.readConnectors, operationType: OperationType.GET}), ConnectorsRead); router.post("/datasets/import", setDataToRequestObject("api.datasets.import"), onRequest({ entity: Entity.Management }), DatasetImport); //Wrapper Service diff --git a/api-service/src/services/CipherService.ts b/api-service/src/services/CipherService.ts index b0b86c25..5f85198d 100644 --- a/api-service/src/services/CipherService.ts +++ b/api-service/src/services/CipherService.ts @@ -1,5 +1,5 @@ -import crypto from 'crypto'; -import { config } from '../configs/Config'; +import crypto from "crypto"; +import { config } from "../configs/Config"; class CipherService { public encrypt(data: string) { @@ -8,10 +8,10 @@ class CipherService { config.encryption_config.encryption_key, "", ) - const toEncrypt = Buffer.from(data, 'utf8'); + const toEncrypt = Buffer.from(data, "utf8"); let encryptedString = cipher.update(toEncrypt); encryptedString = Buffer.concat([encryptedString, cipher.final()]) - return encryptedString.toString('base64'); + return encryptedString.toString("base64"); } public decrypt(data: string) { @@ -20,7 +20,7 @@ class CipherService { config.encryption_config.encryption_key, "", ) - const encryptedText = Buffer.from(data, 'base64'); + const encryptedText = Buffer.from(data, "base64"); let decryptedString = decipher.update(encryptedText); decryptedString = Buffer.concat([decryptedString, decipher.final()]) return decryptedString.toString(); diff --git a/api-service/src/services/CloudServices/AWSStorageService.ts b/api-service/src/services/CloudServices/AWSStorageService.ts index e538298f..d83b65b1 100644 --- a/api-service/src/services/CloudServices/AWSStorageService.ts +++ b/api-service/src/services/CloudServices/AWSStorageService.ts @@ -67,7 +67,7 @@ export class AWSStorageService implements ICloudService { async getSignedUrls(container: any, filesList: any) { const signedUrlsPromises = this.generateSignedURLs(container, filesList) const signedUrlsList = await Promise.all(signedUrlsPromises); - const periodWiseFiles: any = {}; + const periodWiseFiles: { [key: string]: string[] } = {}; const files: any[] = []; // Formatting response signedUrlsList.map(async (fileObject) => { diff --git a/api-service/src/services/CloudServices/AzureStorageService.ts b/api-service/src/services/CloudServices/AzureStorageService.ts index c90af51e..b4472aac 100644 --- a/api-service/src/services/CloudServices/AzureStorageService.ts +++ b/api-service/src/services/CloudServices/AzureStorageService.ts @@ -130,7 +130,7 @@ export class AzureStorageService implements ICloudService { const signedUrlsPromises = this.generateSignedURLs(container, filesList) const signedUrlsList = await Promise.all(signedUrlsPromises); const files: any[] = [] - const periodWiseFiles: any = {}; + const periodWiseFiles: { [key: string]: string[] } = {}; // Formatting response signedUrlsList.map(async (fileObject) => { const fileDetails = _.keys(fileObject); diff --git a/api-service/src/services/CloudServices/GCPStorageService.ts b/api-service/src/services/CloudServices/GCPStorageService.ts index 90fcaeb0..b07e3e4e 100644 --- a/api-service/src/services/CloudServices/GCPStorageService.ts +++ b/api-service/src/services/CloudServices/GCPStorageService.ts @@ -74,7 +74,7 @@ export class GCPStorageService implements ICloudService { return generateSignedUrls() .then(signedUrlList => { - const periodWiseFiles: any = {}; + const periodWiseFiles: { [key: string]: string[] } = {}; const files: any = []; const signedUrls = _.flattenDeep(_.map(signedUrlList, url => { const values = _.values(url) diff --git a/api-service/src/services/DatasetHealthService.ts b/api-service/src/services/DatasetHealthService.ts index 92672c47..5ea8b6df 100644 --- a/api-service/src/services/DatasetHealthService.ts +++ b/api-service/src/services/DatasetHealthService.ts @@ -19,9 +19,9 @@ const prometheusQueries = { dedupFailure: "sum(sum_over_time(flink_taskmanager_job_task_operator_PipelinePreprocessorJob_DATASETID_dedup_failed_count[1d]))", denormFailure: "sum(sum_over_time(flink_taskmanager_job_task_operator_DenormalizerJob_DATASETID_denorm_failed[1d]))", transformationFailure: "sum(sum_over_time(flink_taskmanager_job_task_operator_TransformerJob_DATASETID_transform_failed_count[1d]))", - queriesCount: 'sum(sum_over_time(node_total_api_calls{entity="data-out", dataset_id="DATASETID"}[1d]))', - avgQueryResponseTimeInSec: 'avg(avg_over_time(node_query_response_time{entity="data-out", dataset_id="DATASETID"}[1d]))/1000', - queriesFailedCount: 'sum(sum_over_time(node_failed_api_calls{entity="data-out", dataset_id="DATASETID"}[1d]))' + queriesCount: "sum(sum_over_time(node_total_api_calls{entity=\"data-out\", dataset_id=\"DATASETID\"}[1d]))", + avgQueryResponseTimeInSec: "avg(avg_over_time(node_query_response_time{entity=\"data-out\", dataset_id=\"DATASETID\"}[1d]))/1000", + queriesFailedCount: "sum(sum_over_time(node_failed_api_calls{entity=\"data-out\", dataset_id=\"DATASETID\"}[1d]))" } export const getDatasetHealth = async (categories: any, dataset: any) => { diff --git a/api-service/src/services/DatasetService.ts b/api-service/src/services/DatasetService.ts index 25b120f2..50b42668 100644 --- a/api-service/src/services/DatasetService.ts +++ b/api-service/src/services/DatasetService.ts @@ -25,7 +25,7 @@ class DatasetService { } findDatasets = async (where?: Record, attributes?: string[], order?: any): Promise => { - return Dataset.findAll({where, attributes, order, raw: true}) + return Dataset.findAll({ where, attributes, order, raw: true }) } getDuplicateDenormKey = (denormConfig: Record): Array => { @@ -39,9 +39,9 @@ class DatasetService { } checkDatasetExists = async (dataset_id: string): Promise => { - const draft = await DatasetDraft.findOne({ where: { dataset_id }, attributes:["id"], raw: true }); + const draft = await DatasetDraft.findOne({ where: { dataset_id }, attributes: ["id"], raw: true }); if (draft === null) { - const live = await Dataset.findOne({ where: { id: dataset_id }, attributes:["id"], raw: true }); + const live = await Dataset.findOne({ where: { id: dataset_id }, attributes: ["id"], raw: true }); return !(live === null) } else { return true; @@ -53,7 +53,7 @@ class DatasetService { } findDraftDatasets = async (where?: Record, attributes?: string[], order?: any): Promise => { - return DatasetDraft.findAll({where, attributes, order, raw: true}) + return DatasetDraft.findAll({ where, attributes, order, raw: true }) } getDraftTransformations = async (dataset_id: string, attributes?: string[]) => { @@ -68,7 +68,7 @@ class DatasetService { return DatasetSourceConfig.findAll({ where: { dataset_id }, attributes, raw: true }); } - getConnectors = async (dataset_id: string, attributes?: string[]): Promise> => { + getConnectors = async (dataset_id: string, attributes?: string[]): Promise> => { return ConnectorInstances.findAll({ where: { dataset_id }, attributes, raw: true }); } @@ -78,7 +78,7 @@ class DatasetService { updateDraftDataset = async (draftDataset: Record): Promise> => { - await DatasetDraft.update(draftDataset, { where: { id: draftDataset.id }}); + await DatasetDraft.update(draftDataset, { where: { id: draftDataset.id } }); const responseData = { message: "Dataset is updated successfully", id: draftDataset.id, version_key: draftDataset.version_key }; logger.info({ draftDataset, message: `Dataset updated successfully with id:${draftDataset.id}`, response: responseData }); return responseData; @@ -96,7 +96,7 @@ class DatasetService { const dataset_id = _.get(dataset, "id") const draftDataset = await this.migrateDatasetV1(dataset_id, dataset); const transaction = await sequelize.transaction(); - + try { await DatasetDraft.update(draftDataset, { where: { id: dataset_id }, transaction }); await DatasetTransformationsDraft.destroy({ where: { dataset_id }, transaction }); @@ -112,7 +112,7 @@ class DatasetService { migrateDatasetV1 = async (dataset_id: string, dataset: Record): Promise => { const status = _.get(dataset, "status") - let draftDataset: Record = { + const draftDataset: Record = { api_version: "v2", version_key: Date.now().toString() } @@ -150,9 +150,9 @@ class DatasetService { return draftDataset; } - getTransformationCategory = (section: string):string => { + getTransformationCategory = (section: string): string => { - switch(section) { + switch (section) { case "pii": return "pii"; case "additionalFields": @@ -163,15 +163,15 @@ class DatasetService { } createDraftDatasetFromLive = async (dataset: Model) => { - - let draftDataset:any = _.omit(dataset, ["created_date", "updated_date", "published_date"]); - const dataset_config:any = _.get(dataset, "dataset_config"); - const api_version:any = _.get(dataset, "api_version"); - if(api_version === "v1") { + + const draftDataset: any = _.omit(dataset, ["created_date", "updated_date", "published_date"]); + const dataset_config: any = _.get(dataset, "dataset_config"); + const api_version: any = _.get(dataset, "api_version"); + if (api_version === "v1") { draftDataset["dataset_config"] = { - indexing_config: {olap_store_enabled: true, lakehouse_enabled: false, cache_enabled: (_.get(dataset, "type") === "master")}, - keys_config: {data_key: dataset_config.data_key, timestamp_key: dataset_config.timestamp_key}, - cache_config: {redis_db_host: dataset_config.redis_db_host, redis_db_port: dataset_config.redis_db_port, redis_db: dataset_config.redis_db} + indexing_config: { olap_store_enabled: true, lakehouse_enabled: false, cache_enabled: (_.get(dataset, "type") === "master") }, + keys_config: { data_key: dataset_config.data_key, timestamp_key: dataset_config.timestamp_key }, + cache_config: { redis_db_host: dataset_config.redis_db_host, redis_db_port: dataset_config.redis_db_port, redis_db: dataset_config.redis_db } } const connectors = await this.getConnectorsV1(draftDataset.dataset_id, ["id", "connector_type", "connector_config"]); draftDataset["connectors_config"] = _.map(connectors, (config) => { @@ -205,14 +205,14 @@ class DatasetService { } const denormConfig = _.get(draftDataset, "denorm_config") if (denormConfig && !_.isEmpty(denormConfig.denorm_fields)) { - const masterDatasets = await datasetService.findDatasets({ status: DatasetStatus.Live, type: "master" }, ["id","dataset_id", "status", "dataset_config", "api_version"]) + const masterDatasets = await datasetService.findDatasets({ status: DatasetStatus.Live, type: "master" }, ["id", "dataset_id", "status", "dataset_config", "api_version"]) if (_.isEmpty(masterDatasets)) { throw { code: "DEPENDENT_MASTER_DATA_NOT_FOUND", message: `The dependent dataset not found`, errCode: "NOT_FOUND", statusCode: 404 } } const updatedDenormFields = _.map(denormConfig.denorm_fields, field => { const { redis_db, denorm_out_field, denorm_key } = field let masterConfig = _.find(masterDatasets, data => _.get(data, "dataset_config.cache_config.redis_db") === redis_db) - if(!masterConfig){ + if (!masterConfig) { masterConfig = _.find(masterDatasets, data => _.get(data, "dataset_config.redis_db") === redis_db) } if (_.isEmpty(masterConfig)) { @@ -225,8 +225,8 @@ class DatasetService { draftDataset["version_key"] = Date.now().toString() draftDataset["version"] = _.add(_.get(dataset, ["version"]), 1); // increment the dataset version draftDataset["status"] = DatasetStatus.Draft - await DatasetDraft.create(draftDataset); - return await this.getDraftDataset(draftDataset.dataset_id); + const result = await DatasetDraft.create(draftDataset); + return _.get(result, "dataValues") } getNextRedisDBIndex = async () => { @@ -238,12 +238,12 @@ class DatasetService { const { id } = dataset const transaction = await sequelize.transaction() try { - await DatasetTransformationsDraft.destroy({ where: { dataset_id: id } , transaction}) - await DatasetSourceConfigDraft.destroy({ where: { dataset_id: id } , transaction}) - await DatasourceDraft.destroy({ where: { dataset_id: id } , transaction}) - await DatasetDraft.destroy({ where: { id } , transaction}) + await DatasetTransformationsDraft.destroy({ where: { dataset_id: id }, transaction }) + await DatasetSourceConfigDraft.destroy({ where: { dataset_id: id }, transaction }) + await DatasourceDraft.destroy({ where: { dataset_id: id }, transaction }) + await DatasetDraft.destroy({ where: { id }, transaction }) await transaction.commit() - } catch (err:any) { + } catch (err: any) { await transaction.rollback() throw obsrvError(dataset.id, "FAILED_TO_DELETE_DATASET", err.message, "SERVER_ERROR", 500, err) } @@ -255,18 +255,18 @@ class DatasetService { try { await Dataset.update({ status: DatasetStatus.Retired }, { where: { id: dataset.id }, transaction }); await DatasetSourceConfig.update({ status: DatasetStatus.Retired }, { where: { dataset_id: dataset.id }, transaction }); - await Datasource.update({ status: DatasetStatus.Retired }, { where: { dataset_id: dataset.id } , transaction}); - await DatasetTransformations.update({ status: DatasetStatus.Retired }, { where: { dataset_id: dataset.id } , transaction}); + await Datasource.update({ status: DatasetStatus.Retired }, { where: { dataset_id: dataset.id }, transaction }); + await DatasetTransformations.update({ status: DatasetStatus.Retired }, { where: { dataset_id: dataset.id }, transaction }); await transaction.commit(); await this.deleteDruidSupervisors(dataset); - } catch(err:any) { + } catch (err: any) { await transaction.rollback(); throw obsrvError(dataset.id, "FAILED_TO_RETIRE_DATASET", err.message, "SERVER_ERROR", 500, err); } } findDatasources = async (where?: Record, attributes?: string[], order?: any): Promise => { - return Datasource.findAll({where, attributes, order, raw: true}) + return Datasource.findAll({ where, attributes, order, raw: true }) } private deleteDruidSupervisors = async (dataset: Record) => { @@ -290,26 +290,26 @@ class DatasetService { const indexingConfig = draftDataset.dataset_config.indexing_config; const transaction = await sequelize.transaction() try { - await DatasetDraft.update(draftDataset, { where: { id: draftDataset.id } , transaction}) - if(indexingConfig.olap_store_enabled) { + await DatasetDraft.update(draftDataset, { where: { id: draftDataset.id }, transaction }) + if (indexingConfig.olap_store_enabled) { await this.createDruidDataSource(draftDataset, transaction); } - if(indexingConfig.lakehouse_enabled) { + if (indexingConfig.lakehouse_enabled) { const liveDataset = await this.getDataset(draftDataset.dataset_id, ["id", "api_version"], true); - if(liveDataset && liveDataset.api_version === "v2") { + if (liveDataset && liveDataset.api_version === "v2") { await this.updateHudiDataSource(draftDataset, transaction) } else { await this.createHudiDataSource(draftDataset, transaction) } } await transaction.commit() - } catch(err:any) { + } catch (err: any) { await transaction.rollback() throw obsrvError(draftDataset.id, "FAILED_TO_PUBLISH_DATASET", err.message, "SERVER_ERROR", 500, err); } await executeCommand(draftDataset.id, "PUBLISH_DATASET"); - + } private createDruidDataSource = async (draftDataset: Record, transaction: Transaction) => { @@ -317,8 +317,8 @@ class DatasetService { const allFields = await tableGenerator.getAllFields(draftDataset, "druid"); const draftDatasource = this.createDraftDatasource(draftDataset, "druid"); const ingestionSpec = tableGenerator.getDruidIngestionSpec(draftDataset, allFields, draftDatasource.datasource_ref); - _.set(draftDatasource, 'ingestion_spec', ingestionSpec) - await DatasourceDraft.create(draftDatasource, {transaction}) + _.set(draftDatasource, "ingestion_spec", ingestionSpec) + await DatasourceDraft.create(draftDatasource, { transaction }) } private createHudiDataSource = async (draftDataset: Record, transaction: Transaction) => { @@ -326,26 +326,26 @@ class DatasetService { const allFields = await tableGenerator.getAllFields(draftDataset, "hudi"); const draftDatasource = this.createDraftDatasource(draftDataset, "hudi"); const ingestionSpec = tableGenerator.getHudiIngestionSpecForCreate(draftDataset, allFields, draftDatasource.datasource_ref); - _.set(draftDatasource, 'ingestion_spec', ingestionSpec) - await DatasourceDraft.create(draftDatasource, {transaction}) + _.set(draftDatasource, "ingestion_spec", ingestionSpec) + await DatasourceDraft.create(draftDatasource, { transaction }) } private updateHudiDataSource = async (draftDataset: Record, transaction: Transaction) => { const allFields = await tableGenerator.getAllFields(draftDataset, "hudi"); const draftDatasource = this.createDraftDatasource(draftDataset, "hudi"); - const dsId = _.join([draftDataset.dataset_id,"events","hudi"], "_") - const liveDatasource = await Datasource.findOne({where: {id: dsId}, attributes: ["ingestion_spec"], raw: true}) as unknown as Record + const dsId = _.join([draftDataset.dataset_id, "events", "hudi"], "_") + const liveDatasource = await Datasource.findOne({ where: { id: dsId }, attributes: ["ingestion_spec"], raw: true }) as unknown as Record const ingestionSpec = tableGenerator.getHudiIngestionSpecForUpdate(draftDataset, liveDatasource?.ingestion_spec, allFields, draftDatasource?.datasource_ref); - _.set(draftDatasource, 'ingestion_spec', ingestionSpec) - await DatasourceDraft.create(draftDatasource, {transaction}) + _.set(draftDatasource, "ingestion_spec", ingestionSpec) + await DatasourceDraft.create(draftDatasource, { transaction }) } - private createDraftDatasource = (draftDataset: Record, type: string) : Record => { + private createDraftDatasource = (draftDataset: Record, type: string): Record => { - const datasource = _.join([draftDataset.dataset_id,"events"], "_") + const datasource = _.join([draftDataset.dataset_id, "events"], "_") return { - id: _.join([datasource,type], '_'), + id: _.join([datasource, type], "_"), datasource: draftDataset.dataset_id, dataset_id: draftDataset.dataset_id, datasource_ref: datasource, @@ -356,15 +356,15 @@ class DatasetService { } export const getLiveDatasetConfigs = async (dataset_id: string) => { - - let datasetRecord = await datasetService.getDataset(dataset_id, undefined, true) + + const datasetRecord = await datasetService.getDataset(dataset_id, undefined, true) const transformations = await datasetService.getTransformations(dataset_id, ["field_key", "transformation_function", "mode"]) const connectors = await datasetService.getConnectors(dataset_id, ["id", "connector_id", "connector_config", "operations_config"]) - if(!_.isEmpty(transformations)){ + if (!_.isEmpty(transformations)) { datasetRecord["transformations_config"] = transformations } - if(!_.isEmpty(connectors)){ + if (!_.isEmpty(connectors)) { datasetRecord["connectors_config"] = connectors } return datasetRecord; diff --git a/api-service/src/services/DatasourceService.ts b/api-service/src/services/DatasourceService.ts index 96c8f294..812a5611 100644 --- a/api-service/src/services/DatasourceService.ts +++ b/api-service/src/services/DatasourceService.ts @@ -1,37 +1,4 @@ -import _ from "lodash"; -import { ingestionConfig } from "../configs/IngestionConfig"; import { Datasource } from "../models/Datasource"; -import { DatasourceDraft } from "../models/DatasourceDraft"; -import { DatasetTransformationsDraft } from "../models/TransformationDraft"; -import { DatasetTransformations } from "../models/Transformation"; -import { DatasetStatus } from "../types/DatasetModels"; -import { Dataset } from "../models/Dataset"; - -export const DEFAULT_TIMESTAMP = { - indexValue: "obsrv_meta.syncts", - rootPath: "obsrv_meta", - label: "syncts", - path: "obsrv_meta.properties.syncts", -} - -const defaultTsObject = [ - { - "column": DEFAULT_TIMESTAMP.rootPath, - "type": "object", - "key": `properties.${DEFAULT_TIMESTAMP.rootPath}`, - "ref": `properties.${DEFAULT_TIMESTAMP.rootPath}`, - "isModified": true, - "required": false, - }, - { - "column": DEFAULT_TIMESTAMP.label, - "type": "integer", - "key": `properties.${DEFAULT_TIMESTAMP.path}`, - "ref": `properties.${DEFAULT_TIMESTAMP.path}`, - "isModified": true, - "required": false, - } -] export const getDatasourceList = async (datasetId: string, raw = false) => { const dataSource = await Datasource.findAll({ @@ -43,320 +10,14 @@ export const getDatasourceList = async (datasetId: string, raw = false) => { return dataSource } -export const getDraftDatasourceList = async (datasetId: string, raw = false) => { - const dataSource = await DatasourceDraft.findAll({ - where: { - dataset_id: datasetId, - }, - raw: raw - }); - return dataSource -} - -export const getDatasource = async (datasetId: string) => { - const dataSource = await Datasource.findOne({ - where: { - dataset_id: datasetId, - }, - }); - return dataSource -} - -export const getUpdatedSchema = async (configs: Record) => { - const { id, transformation_config, denorm_config, data_schema, action, indexCol = ingestionConfig.indexCol["Event Arrival Time"] } = configs - const existingTransformations = await DatasetTransformationsDraft.findAll({ where: { dataset_id: id }, raw: true }) - let resultantTransformations: any[] = [] - if (action === "edit") { - const toDeleteTransformations = _.compact(_.map(transformation_config, config => { - if (_.includes(["update", "remove"], _.get(config, "action"))) { - return _.get(config, ["value", "field_key"]) - } - })) - const updatedExistingTransformations = _.compact(_.map(existingTransformations, configs => { - if (!_.includes(toDeleteTransformations, _.get(configs, "field_key"))) { - return configs - } - })) || [] - const newTransformations = _.compact(_.map(transformation_config, config => { - if (_.includes(["update", "add"], _.get(config, "action"))) { - return config - } - })) || [] - resultantTransformations = [...updatedExistingTransformations, ...newTransformations] - } - if (action === "create") { - resultantTransformations = transformation_config || [] - } - let denormFields = _.get(denorm_config, "denorm_fields") - let updatedColumns = flattenSchema(data_schema) - const transformedFields = _.filter(resultantTransformations, field => _.get(field, ["metadata", "section"]) === "transformation") - let additionalFields = _.filter(resultantTransformations, field => _.get(field, ["metadata", "section"]) === "additionalFields") - updatedColumns = _.map(updatedColumns, (item) => { - const transformedData = _.find(transformedFields, { field_key: item.column }); - if (transformedData) { - const data = _.get(transformedData, "metadata") - return { - ...item, - type: _.get(data, "_transformedFieldSchemaType") || "string", - isModified: true, - ...data - }; - } - return item; - }); - denormFields = _.size(denormFields) ? await formatDenormFields(denormFields) : [] - additionalFields = formatNewFields(additionalFields, null); - let ingestionPayload = { schema: [...updatedColumns, ...denormFields, ...additionalFields] }; - if (indexCol === ingestionConfig.indexCol["Event Arrival Time"]) - ingestionPayload = { schema: [...updatedColumns, ...defaultTsObject, ...denormFields, ...additionalFields] }; - const updatedIngestionPayload = updateJSONSchema(data_schema, ingestionPayload) - return updatedIngestionPayload -} - -export const updateJSONSchema = (schema: Record, updatePayload: Record) => { - const clonedOriginal = _.cloneDeep(schema); - const modifiedRows = _.filter(_.get(updatePayload, "schema"), ["isModified", true]); - _.forEach(modifiedRows, modifiedRow => { - const { isDeleted = false, required = false, key, type, description = null, arrival_format, data_type, isModified = false } = modifiedRow; - if (isDeleted) { - deleteItemFromSchema(clonedOriginal, `${key}`, false); - } else { - updateTypeInSchema(clonedOriginal, `${key}`, type, true); - updateFormatInSchema(clonedOriginal, `${key}`, arrival_format); - updateDataTypeInSchema(clonedOriginal, `${key}`, data_type, isModified); - descriptionInSchema(clonedOriginal, `${key}`, description); - changeRequiredPropertyInSchema(clonedOriginal, `${key}`, required); - } - }); - return clonedOriginal; -} - -const updateDataTypeInSchema = (schema: Record, schemaPath: string, data_type: string, isModified: boolean) => { - const existing = _.get(schema, schemaPath); - if (isModified) { - const validDateFormats = ["date-time", "date", "epoch"] - if (!_.includes(validDateFormats, data_type)) { - _.unset(existing, "format"); - } else { - data_type === "epoch" ? _.set(existing, "format", "date-time") : _.set(existing, "format", data_type) - } - } - _.set(schema, schemaPath, { ...existing, data_type }); -} - - -const descriptionInSchema = (schema: Record, schemaPath: string, description: string) => { - const existing = _.get(schema, schemaPath); - if (description) _.set(schema, schemaPath, { ...existing, description }); -} - -const updateFormatInSchema = (schema: Record, schemaPath: string, arrival_format: string) => { - const existing = _.get(schema, schemaPath); - _.set(schema, schemaPath, { ...existing, arrival_format }); -} - -const updateTypeInSchema = (schema: Record, schemaPath: string, type: string, removeSuggestions: boolean = false) => { - const existing = _.get(schema, schemaPath); - if (removeSuggestions) { - _.unset(existing, "suggestions"); - _.unset(existing, "oneof"); - _.unset(existing, "arrivalOneOf") - } - _.set(schema, schemaPath, { ...existing, type }); -} - - -const deleteItemFromSchema = (schema: Record, schemaKeyPath: string, required: boolean) => { - if (_.has(schema, schemaKeyPath)) { - _.unset(schema, schemaKeyPath); - changeRequiredPropertyInSchema(schema, schemaKeyPath, required); - } -} - -const getPathToRequiredKey = (schema: Record, schemaKeyPath: string, schemaKey: string) => { - const regExStr = `properties.${schemaKey}`; - const regex = `(.${regExStr})`; - const [pathToRequiredKey] = _.split(schemaKeyPath, new RegExp(regex, "g")); - if (pathToRequiredKey === schemaKeyPath) return "required" - return `${pathToRequiredKey}.required` -} - -const changeRequiredPropertyInSchema = (schema: Record, schemaKeyPath: string, required: boolean) => { - const schemaKey = _.last(_.split(schemaKeyPath, ".")); - if (schemaKey) { - const pathToRequiredProperty = getPathToRequiredKey(schema, schemaKeyPath, schemaKey); - const existingRequiredKeys = _.get(schema, pathToRequiredProperty) || []; - if (required) { - // add to required property. - const updatedRequiredKeys = _.includes(existingRequiredKeys, schemaKey) ? existingRequiredKeys : [...existingRequiredKeys, schemaKey]; - _.set(schema, pathToRequiredProperty, updatedRequiredKeys); - } else { - // remove from required property. - const updatedRequiredKeys = _.difference(existingRequiredKeys, [schemaKey]); - if (_.size(updatedRequiredKeys) > 0) - _.set(schema, pathToRequiredProperty, updatedRequiredKeys); - } - } -} -export const formatNewFields = (newFields: Record, dataMappings: any) => { - if (newFields.length > 0) { - const final = _.map(newFields, (item: any) => { - const columnKey = _.join(_.map(_.split(_.get(item, "field_key"), "."), payload => `properties.${payload}`), ".") - return { - "column": item.field_key, - "type": _.get(item, ["metadata", "_transformedFieldSchemaType"]) || "string", - "key": columnKey, - "ref": columnKey, - "isModified": true, - "required": false, - "data_type": _.get(item, ["metadata", "_transformedFieldDataType"]), - ...(dataMappings && { "arrival_format": getArrivalFormat(_.get(item, "_transformedFieldSchemaType"), dataMappings) || _.get(item, "arrival_format") }) - } - }); - return final; - } - else return []; -} -const getArrivalFormat = (data_type: string | undefined, dataMappings: Record) => { - let result = null; - if (data_type) { - _.forEach(dataMappings, (value, key) => { - if (_.includes(_.get(value, "arrival_format"), data_type)) { - result = key; - } - }); - } - return result; -} -export const updateDenormDerived = (schemaColumns: any, columns: any, fixedPrefix: string): any[] => { - const result = _.map(columns, (column: any) => { - const isExistingColumn = _.find(schemaColumns, ["column", column.field_key]); - if (isExistingColumn) { - return { - ...isExistingColumn, - "type": _.get(column, "metadata._transformedFieldSchemaType"), - "data_type": _.get(column, "metadata._transformedFieldDataType"), - "required": false, - "isModified": true, - ..._.get(column, "metadata"), - }; - } else { - const columnKey = _.join(_.map(_.split(_.get(column, "field_key"), "."), payload => `properties.${payload}`), ".") - return { - "column": `${fixedPrefix}.${column.field_key}`, - "type": _.get(column, "metadata._transformedFieldSchemaType"), - "key": `properties.${fixedPrefix}.${columnKey}`, - "ref": `properties.${fixedPrefix}.${columnKey}`, - "required": false, - "isModified": true, - "data_type": _.get(column, "metadata._transformedFieldDataType"), - ..._.get(column, "metadata"), - }; - } - }); - return _.concat(schemaColumns, result); -} -const processDenormConfigurations = async (item: any) => { - const denormFieldsData: any = []; - const redis_db = _.get(item, "redis_db"); - const denorm_out_field = _.get(item, "denorm_out_field"); - const dataset: any = await Dataset.findOne({ where: { "dataset_config": { "redis_db": redis_db } }, raw: true }) || [] - const transformations = _.size(dataset) ? await DatasetTransformations.findAll({ where: { status: DatasetStatus.Live, dataset_id: _.get(dataset, "dataset_id") }, raw: true }) : [] - let schema = flattenSchema(_.get(dataset, "data_schema"), denorm_out_field, true); - schema = updateDenormDerived(schema, _.get(transformations, "data.result"), denorm_out_field,); - denormFieldsData.push({ - "column": denorm_out_field, - "type": "object", - "key": `properties.${denorm_out_field}`, - "ref": `properties.${denorm_out_field}`, - "isModified": true, - "required": false, - "arrival_format": "object", - "data_type": "object", - }); - denormFieldsData.push(...schema); - return denormFieldsData; -} -export const formatDenormFields = async (denormFields: any) => { - if (denormFields.length > 0) { - const final = _.map(denormFields, (item: any) => { - return processDenormConfigurations(item); - }); - return Promise.all(final).then((data) => _.flatten(data)); - } - else return []; -} -const addRequiredFields = ( - type: string, - result: Record, - schemaObject: Record, - required: string[], -) => { - const requiredFields = _.get(schemaObject, "required") || []; - _.map(result, (item) => { - if (type === "array" || type === "object") { - if (required && required.includes(item.key.replace("properties.", ""))) item.required = true; - else if (requiredFields.includes(item.key.replace("properties.", ""))) item.required = true; - else item.required = false; - } - else if (requiredFields.includes(item.key.replace("properties.", ""))) item.required = true; - else item.required = false; - }) -} -const flatten = (schemaObject: Record, rollup: boolean = false) => { - const schemaObjectData = schemaObject; - const result: Record = {}; - const getKeyName = (prefix: string, key: string) => prefix ? `${prefix}.${key}` : key; - const flattenHelperFn = (propertySchema: Record, prefix: string, ref: string, arrayChild = false) => { - const { type, properties, items, required = false, ...rest } = propertySchema || {}; - if (type === "object" && properties) { - if (prefix !== "" && !arrayChild) result[prefix] = { type, key: ref, ref, properties, items, parent: true, ...rest }; - for (const [key, value] of Object.entries(properties)) { - flattenHelperFn(value as Record, getKeyName(prefix, key), getKeyName(ref, `properties.${key}`)); - } - } else if (type === "array" && items && !rollup) { - if (prefix !== "") result[prefix] = { type, key: ref, ref, properties, items, parent: true, ...rest }; - if (["array", "object"].includes(items?.type)) { - flattenHelperFn(items, prefix, getKeyName(ref, `items`), true) - } else { - result[prefix] = { type, key: ref, ref, properties, items, ...rest }; - } - } else { - result[prefix] = { type, key: ref, ref, properties, items, ...rest }; - } - addRequiredFields(type, result, schemaObjectData, required); - } - flattenHelperFn(schemaObjectData, "", ""); - return result; -} -export const flattenSchema = (schema: Record, fixedPrefix?: string | undefined, modified?: boolean, rollup: boolean = false) => { - const flattend = flatten(schema, rollup); - if (fixedPrefix) - return _.map(flattend, (value, key) => { - const { key: propertyKey, ref } = value; - const keySplit = _.split(propertyKey, "."); - const refSplit = _.split(ref, "."); - keySplit.splice(1, 0, fixedPrefix, "properties"); - refSplit.splice(1, 0, fixedPrefix, "properties"); - const data = { - column: `${fixedPrefix}.${key}`, - ...value, - key: keySplit.join("."), - ref: refSplit.join("."), - }; - if (modified) { data.isModified = true; data.required = false; } - return data; - }); - return _.map(flattend, (value, key) => ({ column: key, ...value })); -} \ No newline at end of file diff --git a/api-service/src/services/HealthService.ts b/api-service/src/services/HealthService.ts index 273ceb00..c10a9ea6 100644 --- a/api-service/src/services/HealthService.ts +++ b/api-service/src/services/HealthService.ts @@ -1,9 +1,9 @@ -import { Request, Response, NextFunction } from "express" +import { Request, Response } from "express" import { ResponseHandler } from "../helpers/ResponseHandler" class HealthService { - async checkDruidHealth(req: Request, res: Response, next: NextFunction) { + async checkDruidHealth(req: Request, res: Response) { ResponseHandler.successResponse(req, res, { status: 200, data: {} }) } diff --git a/api-service/src/services/SchemaGenerateService/ConfigSuggester.ts b/api-service/src/services/SchemaGenerateService/ConfigSuggester.ts index 14a006d4..8834129d 100644 --- a/api-service/src/services/SchemaGenerateService/ConfigSuggester.ts +++ b/api-service/src/services/SchemaGenerateService/ConfigSuggester.ts @@ -31,20 +31,20 @@ export class ConfigSuggestor { private analyzeConflicts(conflicts: ConflictTypes[]): DataSetConfig { const typeFormatsConflict: ConflictTypes[] = _.filter(conflicts, (o) => !_.isEmpty(o.formats)); - const ingestionConfig: IngestionConfig = this.ingestionConfig(typeFormatsConflict) + const ingestionConfig: IngestionConfig = this.ingestionConfig() const processingConfig: DatasetProcessing = this.processingConfig(typeFormatsConflict) return { "indexConfiguration": ingestionConfig, "processing": processingConfig } } - private ingestionConfig(conflicts: ConflictTypes[]): any { + private ingestionConfig(): any { return { "index": Object.assign(ingestionConfig.indexCol), "rollupSuggestions": this.rollupInfo }; } private processingConfig(conflicts: ConflictTypes[]): any { let dedupKeys = _.filter(conflicts, (o) => _.upperCase(o.formats.resolution["type"]) === "DEDUP").map(v => v.formats.property) let matchedDedupFields = [] - let dedupOrderProperty: string = "cardinality" - let dedupOrder: any = "desc" + const dedupOrderProperty: string = "cardinality" + const dedupOrder: any = "desc" if (!_.isUndefined(this.rollupInfo.summary)) { for (const key of Object.keys(this.rollupInfo.summary)) { if (!this.rollupInfo.summary[key].index) { diff --git a/api-service/src/services/SchemaGenerateService/DataSchemaService.ts b/api-service/src/services/SchemaGenerateService/DataSchemaService.ts index 8a77344d..67bde9e7 100644 --- a/api-service/src/services/SchemaGenerateService/DataSchemaService.ts +++ b/api-service/src/services/SchemaGenerateService/DataSchemaService.ts @@ -5,34 +5,34 @@ import moment from "moment"; import { SchemaGenerationException } from "../../exceptions/SchemaGenerationException"; const DATE_FORMATS = [ - 'MM/DD/YYYY','DD/MM/YYYY', 'YYYY-MM-DD', 'YYYY-DD-MM', 'YYYY/MM/DD', - 'DD-MM-YYYY', 'MM-DD-YYYY', 'MM-DD-YYYY HH:mm:ss', 'YYYY/MM/DD HH:mm:ss', - 'YYYY-MM-DD HH:mm:ss', 'YYYY-DD-MM HH:mm:ss', 'DD/MM/YYYY HH:mm:ss', - 'DD-MM-YYYY HH:mm:ss', 'MM-DD-YYYY HH:mm:ss.SSS', 'YYYY-MM-DD HH:mm:ss.SSS', - 'YYYY-DD-MM HH:mm:ss.SSS', 'YYYY/MM/DD HH:mm:ss.SSS', 'DD/MM/YYYY HH:mm:ss.SSS', - 'DD-MM-YYYY HH:mm:ss.SSS', 'DD-MM-YYYYTHH:mm:ss.SSSZ', 'YYYY-MM-DDTHH:mm:ss.SSSZ', - 'YYYY-DD-MMTHH:mm:ss.SSSZ', 'YYYY/MM/DDTHH:mm:ss.SSSZ', 'DD/MM/YYYYTHH:mm:ss.SSSZ', - 'YYYY-DD-MMTHH:mm:ss.SSS', 'YYYY/MM/DDTHH:mm:ss.SSS', 'DD/MM/YYYYTHH:mm:ss.SSS', - 'MM-DD-YYYYTHH:mm:ss.SSSZ', 'DD-MM-YYYYTHH:mm:ssZ', 'YYYY-MM-DDTHH:mm:ssZ', - 'YYYY-DD-MMTHH:mm:ssZ', 'YYYY/MM/DDTHH:mm:ssZ', 'DD/MM/YYYYTHH:mm:ssZ', 'MM-DD-YYYYTHH:mm:ssZ', - 'MM-DD-YYYYTHH:mm:ss', 'DD-MM-YYYYTHH:mm:ss', 'YYYY-MM-DDTHH:mm:ss', 'YYYY-DD-MMTHH:mm:ss', - 'YYYY/MM/DDTHH:mm:ss', 'DD/MM/YYYYTHH:mm:ss', 'DD-MM-YYYY HH:mm:ss.SSSZ', 'YYYY-MM-DD HH:mm:ss.SSSZ', - 'YYYY-DD-MM HH:mm:ss.SSSZ', 'YYYY/MM/DD HH:mm:ss.SSSZ', 'DD/MM/YYYY HH:mm:ss.SSSZ', - 'MM-DD-YYYY HH:mm:ss.SSSZ', 'DD-MM-YYYY HH:mm:ssZ', 'YYYY-MM-DD HH:mm:ssZ', 'YYYY-DD-MM HH:mm:ssZ', - 'YYYY/MM/DD HH:mm:ssZ', 'DD/MM/YYYY HH:mm:ssZ', 'MM-DD-YYYY HH:mm:ssZ', 'DD-MM-YYYYTHH:mm:ss.SSSSSSZ', - 'YYYY-MM-DDTHH:mm:ss.SSSSSSZ', 'YYYY-DD-MMTHH:mm:ss.SSSSSSZ', 'YYYY/MM/DDTHH:mm:ss.SSSSSSZ', - 'DD/MM/YYYYTHH:mm:ss.SSSSSSZ', 'MM-DD-YYYYTHH:mm:ss.SSSSSSZ', 'DD/MM/YYYYTHH:mm:ss.SSSSSS', - 'YYYY-DD-MMTHH:mm:ss.SSSSSS', 'YYYY/MM/DDTHH:mm:ss.SSSSSS', 'YYYY-MM-DDTHH:mm:ss.SSSSSS', - 'MM-DD-YYYYTHH:mm:ss.SSSSSS', 'DD-MM-YYYYTHH:mm:ss.SSSSSS', 'DD-MM-YYYY HH:mm:ss.SSSSSS', - 'YYYY-MM-DD HH:mm:ss.SSSSSS', 'YYYY-DD-MM HH:mm:ss.SSSSSS', 'YYYY/MM/DD HH:mm:ss.SSSSSS', - 'DD/MM/YYYY HH:mm:ss.SSSSSS', 'MM-DD-YYYY HH:mm:ss.SSSSSS', 'DD-MM-YYYY HH:mm:ss.SSSSSSZ', - 'YYYY-MM-DDTHH:mm:ss.SSSSSSSSSZ', 'YYYY-DD-MMTHH:mm:ss.SSSSSSSSSZ', 'YYYY/MM/DDTHH:mm:ss.SSSSSSSSSZ', - 'DD/MM/YYYYTHH:mm:ss.SSSSSSSSSZ', 'MM-DD-YYYYTHH:mm:ss.SSSSSSSSSZ', 'DD/MM/YYYYTHH:mm:ss.SSSSSSSSS', - 'YYYY-DD-MMTHH:mm:ss.SSSSSSSSS', 'YYYY/MM/DDTHH:mm:ss.SSSSSSSSS', 'YYYY-MM-DDTHH:mm:ss.SSSSSSSSS', - 'MM-DD-YYYYTHH:mm:ss.SSSSSSSSS', 'DD-MM-YYYYTHH:mm:ss.SSSSSSSSS', 'DD-MM-YYYY HH:mm:ss.SSSSSSSSS', - 'YYYY-MM-DD HH:mm:ss.SSSSSSSSS', 'YYYY-DD-MM HH:mm:ss.SSSSSSSSS', 'YYYY/MM/DD HH:mm:ss.SSSSSSSSS', - 'DD/MM/YYYY HH:mm:ss.SSSSSSSSS', 'MM-DD-YYYY HH:mm:ss.SSSSSSSSS', 'DD-MM-YYYY HH:mm:ss.SSSSSSSSSZ', - 'DD-MM-YYYYTHH:mm:ss.SSSSSSSSSZ', + "MM/DD/YYYY","DD/MM/YYYY", "YYYY-MM-DD", "YYYY-DD-MM", "YYYY/MM/DD", + "DD-MM-YYYY", "MM-DD-YYYY", "MM-DD-YYYY HH:mm:ss", "YYYY/MM/DD HH:mm:ss", + "YYYY-MM-DD HH:mm:ss", "YYYY-DD-MM HH:mm:ss", "DD/MM/YYYY HH:mm:ss", + "DD-MM-YYYY HH:mm:ss", "MM-DD-YYYY HH:mm:ss.SSS", "YYYY-MM-DD HH:mm:ss.SSS", + "YYYY-DD-MM HH:mm:ss.SSS", "YYYY/MM/DD HH:mm:ss.SSS", "DD/MM/YYYY HH:mm:ss.SSS", + "DD-MM-YYYY HH:mm:ss.SSS", "DD-MM-YYYYTHH:mm:ss.SSSZ", "YYYY-MM-DDTHH:mm:ss.SSSZ", + "YYYY-DD-MMTHH:mm:ss.SSSZ", "YYYY/MM/DDTHH:mm:ss.SSSZ", "DD/MM/YYYYTHH:mm:ss.SSSZ", + "YYYY-DD-MMTHH:mm:ss.SSS", "YYYY/MM/DDTHH:mm:ss.SSS", "DD/MM/YYYYTHH:mm:ss.SSS", + "MM-DD-YYYYTHH:mm:ss.SSSZ", "DD-MM-YYYYTHH:mm:ssZ", "YYYY-MM-DDTHH:mm:ssZ", + "YYYY-DD-MMTHH:mm:ssZ", "YYYY/MM/DDTHH:mm:ssZ", "DD/MM/YYYYTHH:mm:ssZ", "MM-DD-YYYYTHH:mm:ssZ", + "MM-DD-YYYYTHH:mm:ss", "DD-MM-YYYYTHH:mm:ss", "YYYY-MM-DDTHH:mm:ss", "YYYY-DD-MMTHH:mm:ss", + "YYYY/MM/DDTHH:mm:ss", "DD/MM/YYYYTHH:mm:ss", "DD-MM-YYYY HH:mm:ss.SSSZ", "YYYY-MM-DD HH:mm:ss.SSSZ", + "YYYY-DD-MM HH:mm:ss.SSSZ", "YYYY/MM/DD HH:mm:ss.SSSZ", "DD/MM/YYYY HH:mm:ss.SSSZ", + "MM-DD-YYYY HH:mm:ss.SSSZ", "DD-MM-YYYY HH:mm:ssZ", "YYYY-MM-DD HH:mm:ssZ", "YYYY-DD-MM HH:mm:ssZ", + "YYYY/MM/DD HH:mm:ssZ", "DD/MM/YYYY HH:mm:ssZ", "MM-DD-YYYY HH:mm:ssZ", "DD-MM-YYYYTHH:mm:ss.SSSSSSZ", + "YYYY-MM-DDTHH:mm:ss.SSSSSSZ", "YYYY-DD-MMTHH:mm:ss.SSSSSSZ", "YYYY/MM/DDTHH:mm:ss.SSSSSSZ", + "DD/MM/YYYYTHH:mm:ss.SSSSSSZ", "MM-DD-YYYYTHH:mm:ss.SSSSSSZ", "DD/MM/YYYYTHH:mm:ss.SSSSSS", + "YYYY-DD-MMTHH:mm:ss.SSSSSS", "YYYY/MM/DDTHH:mm:ss.SSSSSS", "YYYY-MM-DDTHH:mm:ss.SSSSSS", + "MM-DD-YYYYTHH:mm:ss.SSSSSS", "DD-MM-YYYYTHH:mm:ss.SSSSSS", "DD-MM-YYYY HH:mm:ss.SSSSSS", + "YYYY-MM-DD HH:mm:ss.SSSSSS", "YYYY-DD-MM HH:mm:ss.SSSSSS", "YYYY/MM/DD HH:mm:ss.SSSSSS", + "DD/MM/YYYY HH:mm:ss.SSSSSS", "MM-DD-YYYY HH:mm:ss.SSSSSS", "DD-MM-YYYY HH:mm:ss.SSSSSSZ", + "YYYY-MM-DDTHH:mm:ss.SSSSSSSSSZ", "YYYY-DD-MMTHH:mm:ss.SSSSSSSSSZ", "YYYY/MM/DDTHH:mm:ss.SSSSSSSSSZ", + "DD/MM/YYYYTHH:mm:ss.SSSSSSSSSZ", "MM-DD-YYYYTHH:mm:ss.SSSSSSSSSZ", "DD/MM/YYYYTHH:mm:ss.SSSSSSSSS", + "YYYY-DD-MMTHH:mm:ss.SSSSSSSSS", "YYYY/MM/DDTHH:mm:ss.SSSSSSSSS", "YYYY-MM-DDTHH:mm:ss.SSSSSSSSS", + "MM-DD-YYYYTHH:mm:ss.SSSSSSSSS", "DD-MM-YYYYTHH:mm:ss.SSSSSSSSS", "DD-MM-YYYY HH:mm:ss.SSSSSSSSS", + "YYYY-MM-DD HH:mm:ss.SSSSSSSSS", "YYYY-DD-MM HH:mm:ss.SSSSSSSSS", "YYYY/MM/DD HH:mm:ss.SSSSSSSSS", + "DD/MM/YYYY HH:mm:ss.SSSSSSSSS", "MM-DD-YYYY HH:mm:ss.SSSSSSSSS", "DD-MM-YYYY HH:mm:ss.SSSSSSSSSZ", + "DD-MM-YYYYTHH:mm:ss.SSSSSSSSSZ", ]; export class SchemaInference { @@ -49,17 +49,17 @@ export class SchemaInference { if (extracted) { return this.inferSchema(extracted); } else { - throw new SchemaGenerationException('Unable to extract the batch data.', httpStatus.BAD_REQUEST); + throw new SchemaGenerationException("Unable to extract the batch data.", httpStatus.BAD_REQUEST); } } else { - throw new SchemaGenerationException('Extraction key not found.', httpStatus.BAD_REQUEST); + throw new SchemaGenerationException("Extraction key not found.", httpStatus.BAD_REQUEST); } }) } private validateEpoch(schema: any, sample: any, path: any) { Object.entries(sample).map(([key, value]) => { - if (value && typeof value == 'object') { + if (value && typeof value == "object") { this.validateEpoch(schema, value, `${path}.${key}.properties`) } const { isValidTimestamp, type } = this.isValidTimestamp(value); @@ -76,9 +76,9 @@ export class SchemaInference { isValidTimestamp(value: any) { const dataType = typeof value; + const epochRegex = /^\d+$/ig; switch (dataType) { - case 'string': - const epochRegex = /^\d+$/ig; + case "string": if(epochRegex.test(value)){ const parsedValue = parseInt(value, 10); // Timestamp should be greater than Jan 01 2000 00:00:00 UTC/GMT in seconds @@ -90,7 +90,7 @@ export class SchemaInference { isValidTimestamp: moment(value, DATE_FORMATS, true).isValid(), type: "date-time" } - case 'number': + case "number": // Timestamp should be greater than Jan 01 2000 00:00:00 UTC/GMT in seconds return { isValidTimestamp: value >= 946684800 && moment(value).isValid(), diff --git a/api-service/src/services/SchemaGenerateService/SchemaAnalyser.ts b/api-service/src/services/SchemaGenerateService/SchemaAnalyser.ts index 14ee0e3d..a3a24b1d 100644 --- a/api-service/src/services/SchemaGenerateService/SchemaAnalyser.ts +++ b/api-service/src/services/SchemaGenerateService/SchemaAnalyser.ts @@ -25,8 +25,8 @@ export class SchemaAnalyser { const result: FlattenSchema[] = _.flatten(this.schemas.map(element => { return this.flattenSchema(new Map(Object.entries(element))); })) - const conflicts = Object.entries(_.groupBy(result, 'path')).map(([key, value]) => { - return this.getSchemaConflictTypes(this.getOccurance(value, key)) + const conflicts = Object.entries(_.groupBy(result, "path")).map(([, value]) => { + return this.getSchemaConflictTypes(this.getOccurance(value)) }) return _.filter(conflicts, obj => (!_.isEmpty(obj.schema) || !_.isEmpty(obj.required) || !_.isEmpty(obj.formats))) } @@ -43,7 +43,7 @@ export class SchemaAnalyser { let schemaConflicts = this.findDataTypeConflicts(occuranceObj,) const requiredConflicts = (_.size(this.schemas) > this.minimumSchemas) ? this.findOptionalPropConflicts(occuranceObj) : {} const formatConflict = this.findFormatConflicts(occuranceObj) - if(_.size(_.keys(schemaConflicts)) > 0) { + if (_.size(_.keys(schemaConflicts)) > 0) { schemaConflicts = { ...schemaConflicts, path: updatedPath } } return { "schema": schemaConflicts, "required": requiredConflicts, "formats": formatConflict, "absolutePath": updatedPath } @@ -53,14 +53,14 @@ export class SchemaAnalyser { * Method to get the data type conflicts */ private findDataTypeConflicts(occurance: Occurance): Conflict { - if(_.includes(_.keys(occurance.dataType), "null") && _.size(occurance.dataType) === 1) { + if (_.includes(_.keys(occurance.dataType), "null") && _.size(occurance.dataType) === 1) { return { type: constants.SCHEMA_RESOLUTION_TYPE.NULL_FIELD, // Should be used only to return the name of field instead of path // property: Object.keys(occurance.property)[0], property: _.replace(Object.keys(occurance.path)[0], "$.", ""), conflicts: occurance.dataType, - resolution: { "value": occurance.dataType, "type": constants.SCHEMA_RESOLUTION_TYPE.NULL_FIELD }, + resolution: { "value": occurance.dataType, "type": constants.SCHEMA_RESOLUTION_TYPE.NULL_FIELD }, values: _.keys(occurance.dataType), severity: constants.SEVERITY["MUST-FIX"], path: _.replace(Object.keys(occurance.absolutePath)[0], "$.", ""), @@ -123,7 +123,7 @@ export class SchemaAnalyser { */ private findOptionalPropConflicts(occurance: Occurance): Conflict { const maxOccurance: number = 1 - const requiredCount = _.map(occurance.property, (value, key) => { + const requiredCount = _.map(occurance.property, (value) => { return value })[0] @@ -148,9 +148,9 @@ export class SchemaAnalyser { * * Method to get the occurance of the given key from the given object */ - private getOccurance(arrayOfObjects: object[], key: string): Occurance { - const result = _(arrayOfObjects).flatMap(obj => _.toPairs(obj)).groupBy(([key, value]) => key) - .mapValues(group => _.countBy(group, ([key, value]) => value)).value(); + private getOccurance(arrayOfObjects: object[]): Occurance { + const result = _(arrayOfObjects).flatMap(obj => _.toPairs(obj)).groupBy(([key]) => key) + .mapValues(group => _.countBy(group, ([, value]) => value)).value(); return { property: result.property, dataType: result.dataType, isRequired: result.isRequired, path: result.path, absolutePath: result.absolutePath, format: result.formate }; } @@ -158,26 +158,26 @@ export class SchemaAnalyser { * Method to iterate over the schema object in a recursive and flatten the required properties */ public flattenSchema(sample: Map): FlattenSchema[] { - let array = new Array(); + const array: any[] = []; const recursive = (data: any, path: string, requiredProps: string[], schemaPath: string) => { _.map(data, (value, key) => { - let isMultipleTypes = ''; - if(_.has(value, 'anyOf')) isMultipleTypes = 'anyOf'; - if(_.has(value, 'oneOf')) isMultipleTypes = 'oneOf'; - if (_.isPlainObject(value) && (_.has(value, 'properties'))) { - array.push(this._flattenSchema(key, value.type, _.includes(requiredProps, key), `${path}.${key}`, `${schemaPath}.properties.${key}`, value['format'])) - recursive(value['properties'], `${path}.${key}`, value['required'], `${schemaPath}.properties.${key}`); - } else if(_.isPlainObject(value)) { - if (value.type === 'array') { - array.push(this._flattenSchema(key, value.type, _.includes(requiredProps, key), `${path}.${key}`, `${schemaPath}.properties.${key}`, value['format'])) - if (_.has(value, 'items') && _.has(value["items"], 'properties')) { - recursive(value["items"]['properties'], `${path}.${key}[*]`, value["items"]['required'], `${schemaPath}.properties.${key}.items`); + let isMultipleTypes = ""; + if (_.has(value, "anyOf")) isMultipleTypes = "anyOf"; + if (_.has(value, "oneOf")) isMultipleTypes = "oneOf"; + if (_.isPlainObject(value) && (_.has(value, "properties"))) { + array.push(this._flattenSchema(key, value.type, _.includes(requiredProps, key), `${path}.${key}`, `${schemaPath}.properties.${key}`, value["format"])) + recursive(value["properties"], `${path}.${key}`, value["required"], `${schemaPath}.properties.${key}`); + } else if (_.isPlainObject(value)) { + if (value.type === "array") { + array.push(this._flattenSchema(key, value.type, _.includes(requiredProps, key), `${path}.${key}`, `${schemaPath}.properties.${key}`, value["format"])) + if (_.has(value, "items") && _.has(value["items"], "properties")) { + recursive(value["items"]["properties"], `${path}.${key}[*]`, value["items"]["required"], `${schemaPath}.properties.${key}.items`); } - } else if(isMultipleTypes != '') { - array.push(this._flattenSchema(key, value[isMultipleTypes][0].type, _.includes(requiredProps, key), `${path}.${key}`, `${schemaPath}.properties.${key}`, value['format'])) - array.push(this._flattenSchema(key, value[isMultipleTypes][1].type, _.includes(requiredProps, key), `${path}.${key}`, `${schemaPath}.properties.${key}`, value['format'])) + } else if (isMultipleTypes != "") { + array.push(this._flattenSchema(key, value[isMultipleTypes][0].type, _.includes(requiredProps, key), `${path}.${key}`, `${schemaPath}.properties.${key}`, value["format"])) + array.push(this._flattenSchema(key, value[isMultipleTypes][1].type, _.includes(requiredProps, key), `${path}.${key}`, `${schemaPath}.properties.${key}`, value["format"])) } else { - array.push(this._flattenSchema(key, value.type, _.includes(requiredProps, key), `${path}.${key}`, `${schemaPath}.properties.${key}`, value['format'])) + array.push(this._flattenSchema(key, value.type, _.includes(requiredProps, key), `${path}.${key}`, `${schemaPath}.properties.${key}`, value["format"])) } } }) diff --git a/api-service/src/services/SchemaGenerateService/SchemaArrayValidator.ts b/api-service/src/services/SchemaGenerateService/SchemaArrayValidator.ts index cc52309e..4bd19c91 100644 --- a/api-service/src/services/SchemaGenerateService/SchemaArrayValidator.ts +++ b/api-service/src/services/SchemaGenerateService/SchemaArrayValidator.ts @@ -4,7 +4,7 @@ export class SchemaArrayValidator { public validate(schemas: any) { _.map(schemas, (schema: any, index: number) => { Object.entries(schema).map(([schemaKey, schemaValue]) => { - if (typeof schemaValue === 'object') { + if (typeof schemaValue === "object") { this.handleNestedObject(index, `${schemaKey}`, schemaValue, schemas); } }); @@ -13,15 +13,15 @@ export class SchemaArrayValidator { } private checkForInvalidArray(value: any) { - if (_.has(value, 'items') && _.has(value, 'properties')) - _.unset(value, 'properties'); + if (_.has(value, "items") && _.has(value, "properties")) + _.unset(value, "properties"); } private handleNestedObject(index: any, path: string, value: any, schemas: any) { Object.entries(value).map(([nestedKey, nestedValue]: any) => { - if (typeof nestedValue === 'object') { + if (typeof nestedValue === "object") { this.handleNestedObject(index, `${path}.${nestedKey}`, nestedValue, schemas) - } else if (nestedValue.type === 'array' && (nestedValue.items != false)) { + } else if (nestedValue.type === "array" && (nestedValue.items != false)) { this.checkForInvalidArray(nestedValue); let isValidArray = true; if(_.isEqual(_.get(schemas[0], `${path}.${nestedKey}.type`), _.get(schemas[index], `${path}.${nestedKey}.type`))) { @@ -33,14 +33,14 @@ export class SchemaArrayValidator { if (!isValidArray) { this.deleteItemsAndSetAdditionalProperties(schemas, `${path}.${nestedKey}`) } - } else if (nestedValue.type === 'array' && (nestedValue.items == false)) { + } else if (nestedValue.type === "array" && (nestedValue.items == false)) { this.deleteItemsAndSetAdditionalProperties(schemas, `${path}.${nestedKey}`) } }) } private deleteItemsAndSetAdditionalProperties(schemas: any, path: string) { - _.map((schemas), (schema: any, index: number) => { + _.map((schemas), (schema: any) => { if (!isUndefined(_.get(schema, path))) { _.unset(schema, `${path}`) _.set(schema, `${path}.type`, "array"); diff --git a/api-service/src/services/SchemaGenerateService/SchemaGeneratorUtils.ts b/api-service/src/services/SchemaGenerateService/SchemaGeneratorUtils.ts index 832ce7a5..99f15ea2 100644 --- a/api-service/src/services/SchemaGenerateService/SchemaGeneratorUtils.ts +++ b/api-service/src/services/SchemaGenerateService/SchemaGeneratorUtils.ts @@ -6,21 +6,21 @@ import { UniqueValues, FieldSchema, RollupSummary } from "../../types/SchemaMode export const generateRollupSummary = (uniqueValues: UniqueValues) => { const summary: RollupSummary = {}; Object.entries(uniqueValues).map(([field, value]) => { - let data: Record = {}; + const data: Record = {}; _.map(value, (item: string) => { if (!_.has(data, [field, item])) _.set(data, [field, item], 1); else data[field][item] += 1; }); const resultData: Record = {}; _.map(_.keys(data), (path: string) => { - Object.entries(data[path]).map(([key, value]: any) => { + Object.entries(data[path]).map(([, value]: any) => { const totalValue = _.sum(_.values(data[path])); const ratio = Math.round((value / totalValue) * 100); if (!_.has(resultData, path)) _.set(resultData, path, ratio); else if (ratio > _.get(resultData, path)) _.set(resultData, path, ratio); }); - let fieldName = parseSchemaPath(path); + const fieldName = parseSchemaPath(path); summary[fieldName] = { path: `$.${path}`, cardinality: 100 - _.get(resultData, path), diff --git a/api-service/src/services/SchemaGenerateService/SchemaHandler.ts b/api-service/src/services/SchemaGenerateService/SchemaHandler.ts index ad3bb713..45d76c9c 100644 --- a/api-service/src/services/SchemaGenerateService/SchemaHandler.ts +++ b/api-service/src/services/SchemaGenerateService/SchemaHandler.ts @@ -13,7 +13,7 @@ export const dataMappingPaths = { "number": "number.store_format.number.jsonSchema", "object": "object.store_format.object.jsonSchema", "array": "array.store_format.array.jsonSchema", -} +} export class SchemaHandler { private typeToMethod = { @@ -39,36 +39,36 @@ export class SchemaHandler { } private updateDataTypes(schema: any, conflict: ConflictTypes): any { - const { absolutePath, schema: { resolution: { value } } } = conflict; + const { absolutePath, schema: { resolution } } = conflict; return _.set(schema, `${absolutePath}`, { ...schema[absolutePath], ...{ - type: conflict.schema.resolution["value"], + type: resolution.value, oneof: conflict.schema.values.map(key => ({ type: key })), } }); } private setNulltype(schema: any, conflict: ConflictTypes): any { - const { absolutePath, schema: { resolution: { value } } } = conflict; + const { absolutePath } = conflict; const dataTypes: any = []; - _.forEach(DataMappings, (valueItem, keyItem) => { - _.forEach(_.get(valueItem, 'store_format'), (subValue, subKey) => { + _.forEach(DataMappings, (valueItem) => { + _.forEach(_.get(valueItem, "store_format"), (subValue) => { if (!_.find(dataTypes, ["type", subValue["jsonSchema"]])) dataTypes.push({ type: subValue["jsonSchema"] }) }) }); const arrivalDataTypes: any = _.keys(DataMappings).map((key: any) => ({ type: key })); - _.set(schema, `${absolutePath}.type`, 'null'); + _.set(schema, `${absolutePath}.type`, "null"); _.set(schema, `${absolutePath}.arrivalOneOf`, arrivalDataTypes); return _.set(schema, `${absolutePath}.oneof`, dataTypes); } private updateRequiredProp(schema: any, value: ConflictTypes): any { - const absolutePath = value.absolutePath.replace(value.required.property, value.required.property.replace('.', '$')) - const subStringArray: string[] = _.split(absolutePath, '.'); - const subString: string = _.join(_.slice(subStringArray, 0, subStringArray.length - 2), '.'); + const absolutePath = value.absolutePath.replace(value.required.property, value.required.property.replace(".", "$")) + const subStringArray: string[] = _.split(absolutePath, "."); + const subString: string = _.join(_.slice(subStringArray, 0, subStringArray.length - 2), "."); const path: string = _.isEmpty(subString) ? "required" : `${subString}.required` const requiredList: string[] = _.get(schema, path) const newProperty: string = value.required.property @@ -77,7 +77,7 @@ export class SchemaHandler { } private getArrivalSuggestions(schema: any, fieldData: any, property: any, type: string) { - let arrivalSuggestions: any = []; + const arrivalSuggestions: any = []; const types = _.get(fieldData, type); types && types.map((item: any) => { const storeFormat = _.get(dataMappingPaths, item.type); @@ -86,7 +86,7 @@ export class SchemaHandler { if (arrivalSuggestions.length > 0) _.set(schema, `${property}.arrivalOneOf`, arrivalSuggestions); return; - }; + } private getArrivalFormat(schema: any, fieldData: any, property: any, type: string) { const types = _.get(fieldData, type); @@ -105,11 +105,11 @@ export class SchemaHandler { const arrivalConflictExists = _.filter(suggestions, (suggestion) => _.has(suggestion, "arrivalConflict")); switch (true) { // Add arrival conflicts if there is arrival conflict in suggestions - case _.has(fieldData, 'oneof') && arrivalConflictExists.length > 0: - return this.getArrivalSuggestions(schema, fieldData, property, 'oneof') + case _.has(fieldData, "oneof") && arrivalConflictExists.length > 0: + return this.getArrivalSuggestions(schema, fieldData, property, "oneof") // Add arrival type if there are no arrival type conflicts case arrivalConflictExists.length === 0: - return this.getArrivalFormat(schema, fieldData, property, 'oneof') + return this.getArrivalFormat(schema, fieldData, property, "oneof") default: break; } @@ -126,22 +126,22 @@ export class SchemaHandler { } private checkForInvalidArray(value: any) { - if (_.has(value, 'items') && _.has(value, 'properties')) - _.unset(value, 'properties'); + if (_.has(value, "items") && _.has(value, "properties")) + _.unset(value, "properties"); } private updateMappings(schema: Map) { const recursive = (data: any) => { - _.map(data, (value, key) => { + _.map(data, (value) => { if (_.isPlainObject(value)) { - if ((_.has(value, 'properties'))) { - recursive(value['properties']); + if ((_.has(value, "properties"))) { + recursive(value["properties"]); } - if (value.type === 'array') { - if (_.has(value, 'items') && _.has(value["items"], 'properties')) { - recursive(value["items"]['properties']); + if (value.type === "array") { + if (_.has(value, "items") && _.has(value["items"], "properties")) { + recursive(value["items"]["properties"]); } - if (_.has(value, 'items') && _.has(value, 'properties')) + if (_.has(value, "items") && _.has(value, "properties")) this.checkForInvalidArray(value); this.updateStoreType(value, _.get(value, "type")); } else { diff --git a/api-service/src/services/SchemaGenerateService/SuggestionTemplate.ts b/api-service/src/services/SchemaGenerateService/SuggestionTemplate.ts index bb87fe32..fcaba4a5 100644 --- a/api-service/src/services/SchemaGenerateService/SuggestionTemplate.ts +++ b/api-service/src/services/SchemaGenerateService/SuggestionTemplate.ts @@ -6,7 +6,7 @@ import { SchemaSuggestionTemplate } from "./Template" export class SuggestionTemplate { public createSuggestionTemplate(sample: ConflictTypes[]): SuggestionsTemplate[] { - return _.map(sample, (value, key) => { + return _.map(sample, (value) => { const dataTypeSuggestions = this.getSchemaMessageTemplate(value.schema) const requiredSuggestions = this.getRequiredMessageTemplate(value.required) const formatSuggestions = this.getPropertyFormatTemplate(value.formats) @@ -24,7 +24,7 @@ export class SuggestionTemplate { message = SchemaSuggestionTemplate.getSchemaNullTypeMessage(object.conflicts, object.property); advice = SchemaSuggestionTemplate.TEMPLATES.SCHEMA_SUGGESTION.CREATE.NULL_TYPE_PROPERTY.ADVICE; } else { - let { conflictMessage, arrivalFormatMessage } = SchemaSuggestionTemplate.getSchemaDataTypeMessage(object.conflicts, object.property); + const { conflictMessage, arrivalFormatMessage } = SchemaSuggestionTemplate.getSchemaDataTypeMessage(object.conflicts, object.property); message = conflictMessage; arrival_format_message = arrivalFormatMessage; advice = SchemaSuggestionTemplate.TEMPLATES.SCHEMA_SUGGESTION.CREATE.DATATYPE_PROPERTY.ADVICE; diff --git a/api-service/src/services/SchemaGenerateService/Template.ts b/api-service/src/services/SchemaGenerateService/Template.ts index 49c86228..9beb3a46 100644 --- a/api-service/src/services/SchemaGenerateService/Template.ts +++ b/api-service/src/services/SchemaGenerateService/Template.ts @@ -71,7 +71,7 @@ export const SchemaSuggestionTemplate = { updatedConflicts[types[0]] = value; } }); - let response: Record = { + const response: Record = { conflictMessage: _.template( `${this.TEMPLATES.SCHEMA_SUGGESTION.CREATE.DATATYPE_PROPERTY.MESSAGE} at property: '${property}'. The property type <% _.map(conflicts, (value, key, list) => { %><%= key %>: <%= value %> time(s)<%= _.last(list) === value ? '' : ', ' %><% }); %><%= _.isEmpty(conflicts) ? '' : '' %>`)({ conflicts }), arrivalFormatMessage: null, diff --git a/api-service/src/services/TableGenerator.ts b/api-service/src/services/TableGenerator.ts index 838538b7..eb3065ae 100644 --- a/api-service/src/services/TableGenerator.ts +++ b/api-service/src/services/TableGenerator.ts @@ -10,33 +10,33 @@ class BaseTableGenerator { * @param dataSchema * @returns properties Record[] */ - flattenSchema = (dataSchema: Record, type: string) : Record[] => { + flattenSchema = (dataSchema: Record, type: string): Record[] => { - let properties: Record[] = [] + const properties: Record[] = [] const flatten = (schema: Record, prev: string | undefined, prevExpr: string | undefined) => { - _.mapKeys(schema, function(value, parentKey) { - const newKey = (prev) ? _.join([prev, parentKey], '.') : parentKey; - const newExpr = (prevExpr) ? _.join([prevExpr, ".['", parentKey, "']"], '') : _.join(["$.['", parentKey, "']"], ''); - switch(value['type']) { - case 'object': - flatten(_.get(value, 'properties'), newKey, newExpr); + _.mapKeys(schema, function (value, parentKey) { + const newKey = (prev) ? _.join([prev, parentKey], ".") : parentKey; + const newExpr = (prevExpr) ? _.join([prevExpr, ".['", parentKey, "']"], "") : _.join(["$.['", parentKey, "']"], ""); + switch (value["type"]) { + case "object": + flatten(_.get(value, "properties"), newKey, newExpr); break; - case 'array': - if(type === "druid" && _.get(value, 'items.type') == 'object' && _.get(value, 'items.properties')) { - _.mapKeys(_.get(value, 'items.properties'), function(value, childKey) { - const objChildKey = _.join([newKey, childKey], '.') - properties.push(_.merge(_.pick(value, ['type', 'arrival_format', 'is_deleted']), {expr: _.join([newExpr,"[*].['",childKey,"']"], ''), name: objChildKey, data_type: 'array'})) + case "array": + if (type === "druid" && _.get(value, "items.type") == "object" && _.get(value, "items.properties")) { + _.mapKeys(_.get(value, "items.properties"), function (value, childKey) { + const objChildKey = _.join([newKey, childKey], ".") + properties.push(_.merge(_.pick(value, ["type", "arrival_format", "is_deleted"]), { expr: _.join([newExpr, "[*].['", childKey, "']"], ""), name: objChildKey, data_type: "array" })) }) } else { - properties.push(_.merge(_.pick(value, ['arrival_format', 'data_type', 'is_deleted']), {expr: newExpr+'[*]', name: newKey, type: _.get(value, 'items.type')})) + properties.push(_.merge(_.pick(value, ["arrival_format", "data_type", "is_deleted"]), { expr: newExpr + "[*]", name: newKey, type: _.get(value, "items.type") })) } break; default: - properties.push(_.merge(_.pick(value, ['type', 'arrival_format', 'data_type', 'is_deleted']), {expr: newExpr, name: newKey})) + properties.push(_.merge(_.pick(value, ["type", "arrival_format", "data_type", "is_deleted"]), { expr: newExpr, name: newKey })) } }); } - flatten(_.get(dataSchema, 'properties'), undefined, undefined) + flatten(_.get(dataSchema, "properties"), undefined, undefined) return properties } @@ -51,15 +51,14 @@ class BaseTableGenerator { getAllFields = async (dataset: Record, type: string): Promise[]> => { const { data_schema, denorm_config, transformations_config } = dataset - const instance = this; - let dataFields = instance.flattenSchema(data_schema, type); + let dataFields = this.flattenSchema(data_schema, type); if (!_.isEmpty(denorm_config.denorm_fields)) { for (const denormField of denorm_config.denorm_fields) { const denormDataset: any = await datasetService.getDataset(denormField.dataset_id, ["data_schema"], true); - const properties = instance.flattenSchema(denormDataset.data_schema, type); + const properties = this.flattenSchema(denormDataset.data_schema, type); const transformProps = _.map(properties, (prop) => { - _.set(prop, 'name', _.join([denormField.denorm_out_field, prop.name], '.')); - _.set(prop, 'expr', _.replace(prop.expr, "$", "$." + denormField.denorm_out_field)); + _.set(prop, "name", _.join([denormField.denorm_out_field, prop.name], ".")); + _.set(prop, "expr", _.replace(prop.expr, "$", "$." + denormField.denorm_out_field)); return prop; }); dataFields.push(...transformProps); @@ -85,7 +84,7 @@ class BaseTableGenerator { class TableGenerator extends BaseTableGenerator { getDruidIngestionSpec = (dataset: Record, allFields: Record[], datasourceRef: string) => { - + const { dataset_config, router_config } = dataset return { "type": "kafka", @@ -109,22 +108,21 @@ class TableGenerator extends BaseTableGenerator { } } } - + private getDruidDimensions = (allFields: Record[], timestampKey: string, partitionKey: string | undefined) => { const dataFields = _.cloneDeep(allFields); - if(partitionKey) { // Move the partition column to the top of the dimensions - const partitionCol = _.remove(dataFields, {name: partitionKey}) - if(partitionCol && _.size(partitionCol) > 0) { + if (partitionKey) { // Move the partition column to the top of the dimensions + const partitionCol = _.remove(dataFields, { name: partitionKey }) + if (partitionCol && _.size(partitionCol) > 0) { dataFields.unshift(partitionCol[0]) } } - _.remove(dataFields, {name: timestampKey}) - const instance = this; + _.remove(dataFields, { name: timestampKey }) return _.union( _.map(dataFields, (field) => { return { - "type": instance.getDruidDimensionType(field.data_type), + "type": this.getDruidDimensionType(field.data_type), "name": field.name } }), @@ -132,7 +130,7 @@ class TableGenerator extends BaseTableGenerator { ) } - private getDruidDimensionType = (data_type: string):string => { + private getDruidDimensionType = (data_type: string): string => { switch (data_type) { case "number": return "double"; case "integer": return "long"; @@ -182,13 +180,13 @@ class TableGenerator extends BaseTableGenerator { getHudiIngestionSpecForUpdate = (dataset: Record, existingHudiSpec: Record, allFields: Record[], datasourceRef: string) => { - let newHudiSpec = this.getHudiIngestionSpecForCreate(dataset, allFields, datasourceRef) + const newHudiSpec = this.getHudiIngestionSpecForCreate(dataset, allFields, datasourceRef) const newColumnSpec = newHudiSpec.schema.columnSpec; - let oldColumnSpec = existingHudiSpec.schema.columnSpec; - let currIndex = _.get(_.maxBy(oldColumnSpec, 'index'), 'index') as unknown as number - const newColumns = _.differenceBy(newColumnSpec, oldColumnSpec, 'name'); - if(_.size(newColumns) > 0) { + const oldColumnSpec = existingHudiSpec.schema.columnSpec; + let currIndex = _.get(_.maxBy(oldColumnSpec, "index"), "index") as unknown as number + const newColumns = _.differenceBy(newColumnSpec, oldColumnSpec, "name"); + if (_.size(newColumns) > 0) { _.each(newColumns, (col) => { oldColumnSpec.push({ "type": col.type, @@ -197,21 +195,20 @@ class TableGenerator extends BaseTableGenerator { }) }) } - _.set(newHudiSpec, 'schema.columnSpec', oldColumnSpec) + _.set(newHudiSpec, "schema.columnSpec", oldColumnSpec) return newHudiSpec; } - private getHudiColumnSpec = (allFields: Record[], primaryKey: string, partitionKey: string, timestampKey: string) : Record[] => { + private getHudiColumnSpec = (allFields: Record[], primaryKey: string, partitionKey: string, timestampKey: string): Record[] => { - const instance = this; const dataFields = _.cloneDeep(allFields); - _.remove(dataFields, {name: primaryKey}) - _.remove(dataFields, {name: partitionKey}) - _.remove(dataFields, {name: timestampKey}) + _.remove(dataFields, { name: primaryKey }) + _.remove(dataFields, { name: partitionKey }) + _.remove(dataFields, { name: timestampKey }) let index = 1; - const transformFields = _.map(dataFields, (field) => { + const transformFields = _.map(dataFields, (field) => { return { - "type": instance.getHudiColumnType(field), + "type": this.getHudiColumnType(field), "name": field.name, "index": index++ } @@ -226,12 +223,12 @@ class TableGenerator extends BaseTableGenerator { return transformFields; } - private getHudiColumnType = (field: Record) : string => { - if(field.data_type === 'array' && field.arrival_format !== 'array') { + private getHudiColumnType = (field: Record): string => { + if (field.data_type === "array" && field.arrival_format !== "array") { return "array"; } - if(field.data_type === 'array' && field.arrival_format === 'array') { - switch(field.type) { + if (field.data_type === "array" && field.arrival_format === "array") { + switch (field.type) { case "string": return "array" case "number": @@ -244,11 +241,11 @@ class TableGenerator extends BaseTableGenerator { return "array" } } - switch(field.arrival_format) { + switch (field.arrival_format) { case "text": return "string" case "number": - switch(field.data_type) { + switch (field.data_type) { case "integer": return "int" case "epoch": @@ -260,7 +257,7 @@ class TableGenerator extends BaseTableGenerator { case "long": return "long" default: - return "double" + return "double" } case "integer": return "int" @@ -271,13 +268,15 @@ class TableGenerator extends BaseTableGenerator { } } - private getHudiFields = (allFields: Record[]) : Record[] => { + private getHudiFields = (allFields: Record[]): Record[] => { + const regexString = "[\\[\\]'\\*]"; + const regex = new RegExp(regexString, "g"); return _.union( _.map(allFields, (field) => { return { type: "path", - expr: _.replace(field.expr, /[\[\]'\*]/g, ""), + expr: _.replace(field.expr, regex, ""), name: field.name } }), @@ -285,15 +284,15 @@ class TableGenerator extends BaseTableGenerator { ) } - private getPrimaryKey = (dataset: Record) : string => { + private getPrimaryKey = (dataset: Record): string => { return dataset.dataset_config.keys_config.data_key; } - private getHudiPartitionKey = (dataset: Record) : string => { + private getHudiPartitionKey = (dataset: Record): string => { return dataset.dataset_config.keys_config.partition_key || dataset.dataset_config.keys_config.timestamp_key; } - private getTimestampKey = (dataset: Record) : string => { + private getTimestampKey = (dataset: Record): string => { return dataset.dataset_config.keys_config.timestamp_key; } } diff --git a/api-service/src/services/WrapperService.ts b/api-service/src/services/WrapperService.ts index 061b2b14..c38afb6d 100644 --- a/api-service/src/services/WrapperService.ts +++ b/api-service/src/services/WrapperService.ts @@ -1,6 +1,5 @@ import axios from "axios"; import { NextFunction, Request, Response } from "express"; -import _ from "lodash"; import { config } from "../configs/Config"; import { ResponseHandler } from "../helpers/ResponseHandler"; import { ErrorResponseHandler } from "../helpers/ErrorResponseHandler"; diff --git a/api-service/src/services/fs.ts b/api-service/src/services/fs.ts index e08597d6..cfc0010b 100644 --- a/api-service/src/services/fs.ts +++ b/api-service/src/services/fs.ts @@ -1,13 +1,14 @@ -import fs from 'fs'; -import path from 'path'; +import fs from "fs"; +import path from "path"; export const scrapModules = (folderPath: string, basename: string) => { - const mapping = new Map>(); + const mapping = new Map>(); fs.readdirSync(folderPath) .filter((file) => file !== basename) .map((file) => { const { default: { name, ...others }, + /* eslint-disable @typescript-eslint/no-var-requires */ } = require(path.join(folderPath, file)) as { default: Type }; mapping.set(name, others); diff --git a/api-service/src/services/managers/grafana/alert/helpers/index.ts b/api-service/src/services/managers/grafana/alert/helpers/index.ts index e9c19e29..9c577a52 100644 --- a/api-service/src/services/managers/grafana/alert/helpers/index.ts +++ b/api-service/src/services/managers/grafana/alert/helpers/index.ts @@ -32,7 +32,7 @@ const deleteAlertFolder = async (folderName: string) => { const checkIfGroupNameExists = async (category: string) => { const response = await getRules(); - const rules = _.get(response, 'data'); + const rules = _.get(response, "data"); if(!_.has(rules, category)) return undefined; return _.find(_.flatMap(_.values(rules)), { name: category, @@ -59,7 +59,7 @@ const getSpecificRule = async (payload: Record) => { const alertrules = await alerts(); const groups = _.get(alertrules, "data.data.groups"); const ruleGroup = _.find(groups, (group: any) => group.name == payload.category); - return _.find(_.get(ruleGroup, 'rules'), (rule: any) => rule.name == payload.name); + return _.find(_.get(ruleGroup, "rules"), (rule: any) => rule.name == payload.name); }; const updateMetadata = (metadata: any, dataSource: string, expression: string) => { @@ -89,7 +89,7 @@ const createFolder = (title: string) => { const createFolderIfNotExists = async (folderName: string) => { const folders = await getFolders(); - const isExists = _.find(folders.data, folder => _.get(folder, 'title') === folderName); + const isExists = _.find(folders.data, folder => _.get(folder, "title") === folderName); if (isExists) return; return createFolder(folderName); } @@ -215,7 +215,7 @@ const queryOperators = [ const getQueryExpression = (payload: Record) => { const { metric, operator, threshold } = payload; - const operatorSymbol = _.get(_.find(queryOperators, operatorMetadata => _.get(operatorMetadata, 'value') === operator), 'symbol'); + const operatorSymbol = _.get(_.find(queryOperators, operatorMetadata => _.get(operatorMetadata, "value") === operator), "symbol"); return `(${metric}) ${operatorSymbol} ${threshold}`; } @@ -229,7 +229,7 @@ const getMatchingLabels = async (channels: string[]) => { const { name, type } = channelMetadata; return `notificationChannel_${name}_${type}`; }) - .catch(err => null); + .catch(() => null); } const matchingLabels = await Promise.all(channels.map(fetchChannel)); @@ -245,22 +245,22 @@ const getMatchingLabels = async (channels: string[]) => { const transformRule = async ({ value, condition, metadata, isGroup }: any) => { const { name, id, interval, category, frequency, labels = {}, annotations = {}, severity, description, notification = {} } = value; const annotationObj = { ...annotations, description: description }; - const channels = _.get(notification, 'channels') || []; + const channels = _.get(notification, "channels") || []; const matchingLabelsForNotification = await getMatchingLabels(channels); const payload = { grafana_alert: { title: name, condition: condition, - no_data_state: _.get(metadata, 'no_data_state', 'NoData'), - exec_err_state: _.get(metadata, 'exec_err_state', 'Error'), + no_data_state: _.get(metadata, "no_data_state", "NoData"), + exec_err_state: _.get(metadata, "exec_err_state", "Error"), data: metadata, is_paused: false, }, for: interval, annotations: annotationObj, labels: { - 'alertId': id, + "alertId": id, ...labels, ...(severity && { severity }), ...matchingLabelsForNotification diff --git a/api-service/src/services/managers/grafana/alert/index.ts b/api-service/src/services/managers/grafana/alert/index.ts index b2dcd1e2..471a104d 100644 --- a/api-service/src/services/managers/grafana/alert/index.ts +++ b/api-service/src/services/managers/grafana/alert/index.ts @@ -1,5 +1,5 @@ import _ from "lodash"; -import { addGrafanaRule, checkIfGroupNameExists, checkIfRuleExists, createFolderIfNotExists, deleteAlertFolder, deleteAlertRule, getPrometheusDataSource, getQueryExpression, getQueryModel, getSpecificRule, transformRule, updateMetadata, getFilteredAlerts, getRules, groupRulesByCategory } from "./helpers"; +import { addGrafanaRule, checkIfGroupNameExists, checkIfRuleExists, createFolderIfNotExists, deleteAlertFolder, deleteAlertRule, getPrometheusDataSource, getQueryExpression, getQueryModel, getSpecificRule, transformRule, updateMetadata, getRules } from "./helpers"; import { Silence } from "../../../../models/Silence"; import constants from "../../constants"; @@ -27,7 +27,7 @@ const publishAlert = async (payload: Record) => { const getAlerts = async (payload: Record) => { const context = payload?.context || {}; - const alertId = _.get(payload, 'id'); + const alertId = _.get(payload, "id"); const { err, alertData } = await getSpecificRule(payload) .then(alertData => { if (!alertData) throw new Error() @@ -37,21 +37,21 @@ const getAlerts = async (payload: Record) => { const silenceModel = await Silence.findOne({ where: { alert_id: alertId } }); const silenceData = silenceModel?.toJSON(); - let silenceState: Record = { state: '', silenceId: '' }; + const silenceState: Record = { state: "", silenceId: "" }; if (silenceData) { const { end_time } = silenceData; const currentTime = new Date().getTime(); const endTime = new Date(end_time).getTime(); if (currentTime < endTime) { - silenceState.state = 'muted'; - silenceState['endTime'] = endTime; + silenceState.state = "muted"; + silenceState["endTime"] = endTime; } else { - silenceState.state = 'unmuted'; + silenceState.state = "unmuted"; } silenceState.silenceId = silenceData.id; } else { - silenceState.state = 'unmuted'; + silenceState.state = "unmuted"; } return { ...payload, context: { ...context, err }, ...(alertData && { alertData }), silenceState }; @@ -62,8 +62,8 @@ const deleteAlert = async (payload: Record) => { const alertCategory = await checkIfGroupNameExists(category); if (!alertCategory) throw new Error(constants.CATEGORY_NOT_EXIST); - if (_.get(alertCategory, 'rules.length') > 1) { - const filteredRule = _.filter(alertCategory.rules, (rule) => _.get(rule, 'grafana_alert.title') !== name) || []; + if (_.get(alertCategory, "rules.length") > 1) { + const filteredRule = _.filter(alertCategory.rules, (rule) => _.get(rule, "grafana_alert.title") !== name) || []; const filteredGroup = { ...alertCategory, rules: filteredRule }; return addGrafanaRule(filteredGroup); } @@ -83,19 +83,19 @@ const generateAlertPayload = (payload: Record) => { } const filterSystemRulesPredicate = (rule: Record) => { - const labels = _.get(rule, 'labels') || {}; + const labels = _.get(rule, "labels") || {}; const isSystemAlert = _.find(labels, (value, key) => (key === "alertSource" && value === "system-rule-ingestor-job")); if (!isSystemAlert) return true; return false; } -const deleteSystemRules = async (filters: Record) => { +const deleteSystemRules = async () => { const response = await getRules(); - const existingRules = _.cloneDeep(_.get(response, 'data')) as Record[]>; + const existingRules = _.cloneDeep(_.get(response, "data")) as Record[]>; for (const [category, evaluationGroups] of Object.entries(existingRules)) { const evaluationGroup = _.find(evaluationGroups, ["name", category]); if (!evaluationGroup) continue; - const rules = _.get(evaluationGroup, 'rules') || [] + const rules = _.get(evaluationGroup, "rules") || [] const filteredRules = _.filter(rules, filterSystemRulesPredicate); try { if (_.isEmpty(filteredRules)) { diff --git a/api-service/src/services/managers/grafana/index.ts b/api-service/src/services/managers/grafana/index.ts index 30c2b82d..1824c6c5 100644 --- a/api-service/src/services/managers/grafana/index.ts +++ b/api-service/src/services/managers/grafana/index.ts @@ -1,6 +1,6 @@ -import * as alertFunctions from './alert' -import * as notificationFunctions from './notification'; -import * as silenceFunctions from './silences'; +import * as alertFunctions from "./alert" +import * as notificationFunctions from "./notification"; +import * as silenceFunctions from "./silences"; const service = { name: "grafana", ...alertFunctions, ...notificationFunctions, ...silenceFunctions }; export default service \ No newline at end of file diff --git a/api-service/src/services/managers/grafana/notification/channels/email.ts b/api-service/src/services/managers/grafana/notification/channels/email.ts index d4628517..06658016 100644 --- a/api-service/src/services/managers/grafana/notification/channels/email.ts +++ b/api-service/src/services/managers/grafana/notification/channels/email.ts @@ -1,6 +1,6 @@ -import _ from 'lodash'; +import _ from "lodash"; import { IChannelConfig } from "../../../../../types/AlertModels"; -import { grafanaHttpClient } from '../../../../../connections/grafanaConnection'; +import { grafanaHttpClient } from "../../../../../connections/grafanaConnection"; const getReceiverObject = ({ name, recipientAddresses, message, multipleAddresses, type }: any) => { return { @@ -30,7 +30,7 @@ const service: IChannelConfig = { generateConfigPayload(payload: Record): Record { const { type, config, name } = payload; const { recipientAddresses, message, subject = "Obsrv Alert", labels = [[`notificationChannel_${name}_${type.toLowerCase()}`, "=", "true"]] } = config; - const multipleAddresses = _.size(_.split(recipientAddresses, ';')) > 1; + const multipleAddresses = _.size(_.split(recipientAddresses, ";")) > 1; return { notificationPolicy: { receiver: name, @@ -42,7 +42,7 @@ const service: IChannelConfig = { testChannel(payload: Record): Promise { const { name, type, config, message = "Test Channel" } = payload; const { recipientAddresses, subject = "Obsrv Alert" } = config; - const multipleAddresses = _.size(_.split(recipientAddresses, ';')) > 1; + const multipleAddresses = _.size(_.split(recipientAddresses, ";")) > 1; const alert = { annotations: { description: message }, labels: {} }; const body = { alert, receivers: [getReceiverObject({ name, type, multipleAddresses, recipientAddresses, subject })] }; return grafanaHttpClient.post("api/alertmanager/grafana/config/api/v1/receivers/test", body); diff --git a/api-service/src/services/managers/grafana/notification/channels/index.ts b/api-service/src/services/managers/grafana/notification/channels/index.ts index e7f5f87f..abc6b45a 100644 --- a/api-service/src/services/managers/grafana/notification/channels/index.ts +++ b/api-service/src/services/managers/grafana/notification/channels/index.ts @@ -1,11 +1,11 @@ -import path from 'path'; -import { scrapModules } from '../../../../../services/fs'; -import { IChannelConfig } from '../../../../../types/AlertModels'; +import path from "path"; +import { scrapModules } from "../../../../../services/fs"; +import { IChannelConfig } from "../../../../../types/AlertModels"; const channels = scrapModules(__dirname, path.basename(__filename)); export const getChannelService = (channelName: string) => { const channel = channels.get(channelName.toLowerCase()); - if (!channel) throw new Error('invalid channel'); + if (!channel) throw new Error("invalid channel"); return channel; } \ No newline at end of file diff --git a/api-service/src/services/managers/grafana/notification/channels/slack.ts b/api-service/src/services/managers/grafana/notification/channels/slack.ts index 2da034fc..983d9879 100644 --- a/api-service/src/services/managers/grafana/notification/channels/slack.ts +++ b/api-service/src/services/managers/grafana/notification/channels/slack.ts @@ -1,7 +1,6 @@ -import axios from 'axios'; -import _ from 'lodash'; -import CONSTANTS from '../../../constants' -import { IChannelConfig } from '../../../../../types/AlertModels'; +import axios from "axios"; +import CONSTANTS from "../../../constants" +import { IChannelConfig } from "../../../../../types/AlertModels"; const generateConfigPayload = (payload: Record): Record => { const { type, config, name } = payload; diff --git a/api-service/src/services/managers/grafana/notification/channels/teams.ts b/api-service/src/services/managers/grafana/notification/channels/teams.ts index 5cc0e689..714f4f19 100644 --- a/api-service/src/services/managers/grafana/notification/channels/teams.ts +++ b/api-service/src/services/managers/grafana/notification/channels/teams.ts @@ -1,7 +1,6 @@ -import axios from 'axios'; -import _ from 'lodash'; -import CONSTANTS from '../../../constants'; -import { IChannelConfig } from '../../../../../types/AlertModels'; +import axios from "axios"; +import CONSTANTS from "../../../constants"; +import { IChannelConfig } from "../../../../../types/AlertModels"; const generateConfigPayload = (payload: Record): Record => { const {type, config, name} = payload; diff --git a/api-service/src/services/managers/grafana/notification/helpers/index.ts b/api-service/src/services/managers/grafana/notification/helpers/index.ts index a90c2ad7..112365b7 100644 --- a/api-service/src/services/managers/grafana/notification/helpers/index.ts +++ b/api-service/src/services/managers/grafana/notification/helpers/index.ts @@ -1,7 +1,7 @@ import _ from "lodash"; import { grafanaHttpClient } from "../../../../../connections/grafanaConnection"; -import { getChannelService } from '../channels'; -import defaultTemplates from '../templates'; +import { getChannelService } from "../channels"; +import defaultTemplates from "../templates"; const generateChannelConfig = (payload: Record) => { const { type } = payload; @@ -18,9 +18,9 @@ const updateAlertManagerConfig = async (payload: Record) => { return grafanaHttpClient.post("/api/alertmanager/grafana/config/api/v1/alerts", payload); }; -const getReceivers = (alertmanager_config: Record) => _.get(alertmanager_config, 'alertmanager_config.receivers') as Array; -const getRoutes = (alertmanager_config: Record) => _.get(alertmanager_config, 'alertmanager_config.route.routes') as Array; -const getTemplates = (alertmanager_config: Record) => _.get(alertmanager_config, 'template_files') as Record; +const getReceivers = (alertmanager_config: Record) => _.get(alertmanager_config, "alertmanager_config.receivers") as Array; +const getRoutes = (alertmanager_config: Record) => _.get(alertmanager_config, "alertmanager_config.route.routes") as Array; +const getTemplates = (alertmanager_config: Record) => _.get(alertmanager_config, "template_files") as Record; const createContactPointsAndNotificationPolicy = async (metadata: Record) => { const { receiver, notificationPolicy } = metadata; @@ -28,9 +28,9 @@ const createContactPointsAndNotificationPolicy = async (metadata: Record, alert const clonedAlertManagerConfig = _.cloneDeep(alertManagerConfig); const existingReceivers = getReceivers(clonedAlertManagerConfig) || []; const existingRoutes = getRoutes(clonedAlertManagerConfig) || []; - _.remove(existingRoutes, route => _.get(route, 'receiver') === name); - _.remove(existingReceivers, receiver => _.get(receiver, 'name') === name); + _.remove(existingRoutes, route => _.get(route, "receiver") === name); + _.remove(existingReceivers, receiver => _.get(receiver, "name") === name); return clonedAlertManagerConfig; }; diff --git a/api-service/src/services/managers/grafana/notification/index.ts b/api-service/src/services/managers/grafana/notification/index.ts index bf7f387b..816e473c 100644 --- a/api-service/src/services/managers/grafana/notification/index.ts +++ b/api-service/src/services/managers/grafana/notification/index.ts @@ -1,5 +1,4 @@ -import _ from "lodash"; -import { getChannelService } from './channels'; +import { getChannelService } from "./channels"; import { createContactPointsAndNotificationPolicy, generateChannelConfig, getAlertManagerConfig, removeReceiverAndNotificationPolicy, updateAlertManagerConfig } from "./helpers"; const updateNotificationChannel = async (payload: Record) => { diff --git a/api-service/src/services/managers/grafana/silences/helpers/index.ts b/api-service/src/services/managers/grafana/silences/helpers/index.ts index 599ab05f..50af380c 100644 --- a/api-service/src/services/managers/grafana/silences/helpers/index.ts +++ b/api-service/src/services/managers/grafana/silences/helpers/index.ts @@ -19,7 +19,7 @@ const disableSilence = (silenceId: string) => { const getCurrentSilenceStatus = async (silenceId: string) => { const response = await getSilence(silenceId); - const currentSilenceStatus = _.get(response, 'data.status.state'); + const currentSilenceStatus = _.get(response, "data.status.state"); return currentSilenceStatus; } diff --git a/api-service/src/services/managers/index.ts b/api-service/src/services/managers/index.ts index e2e57568..84b666d9 100644 --- a/api-service/src/services/managers/index.ts +++ b/api-service/src/services/managers/index.ts @@ -61,9 +61,9 @@ export const deleteAlertRule = async (payload: Record, hardDelete: export const deleteSystemRules = async (payload: Record) => { - const { rules = [], manager } = payload; + const { manager } = payload; const service = getService(manager); - return service.deleteSystemRules(rules); + return service.deleteSystemRules(); } export const getAlertsMetadata = (payload: Record) => { @@ -125,7 +125,7 @@ export const deleteAlertByDataset = async (payload: Record) => { const { name } = payload; const alertRulePayload = await Alert.findAll({ where: { category: "datasets", "metadata.queryBuilderContext.subComponent": name }, raw: true }) if (!alertRulePayload) throw new Error(constants.ALERTS_NOT_FOUND) - for (let payload of alertRulePayload) { + for (const payload of alertRulePayload) { await deleteAlertRule(payload, true) await retireAlertSilence(_.get(payload, "id") || "") } @@ -140,7 +140,7 @@ export const deleteMetricAliasByDataset = async (payload: Record) = const { name } = payload; const metricAliasPayload = await Metrics.findAll({ where: { component: "datasets", subComponent: name } }) if (!metricAliasPayload) throw new Error(constants.METRIC_ALIAS_NOT_FOUND) - for (let payload of metricAliasPayload) { + for (const payload of metricAliasPayload) { await payload.destroy() } return constants.METRIC_ALIAS_DELETED_SUCCESSFULLY; @@ -173,7 +173,7 @@ export const getAlertMetricsByDataset = async (payload: Record) => export const createAlertsByDataset = async (payload: any) => { try { - for (let alerts of payload) { + for (const alerts of payload) { const alertPayload = _.omit(alerts as any, ["id", "status", "createdAt", "updatedAt", "created_by", "updated_by"]) await Alert.create(alertPayload) } @@ -184,7 +184,7 @@ export const createAlertsByDataset = async (payload: any) => { export const createMetricAliasByDataset = async (payload: any) => { try { - for (let metrics of payload) { + for (const metrics of payload) { const metricsPayload = _.omit(metrics as any, ["id", "createdAt", "updatedAt"]) await Metrics.create(metricsPayload) } @@ -198,7 +198,7 @@ export const publishAlertByDataset = async (payload: Record) => { const { name } = payload; const alertRulePayload = await Alert.findAll({ where: { category: "datasets", "metadata.queryBuilderContext.subComponent": name }, raw: true }) if (!alertRulePayload) throw new Error("Alert rule does not exist") - for (let payload of alertRulePayload) { + for (const payload of alertRulePayload) { await publishAlert(payload) } return constants.ALERTS_PUBLISHED_SUCCESSFULLY; diff --git a/api-service/src/services/managers/prometheus/alert/index.ts b/api-service/src/services/managers/prometheus/alert/index.ts index f2bd3a2f..28f5412f 100644 --- a/api-service/src/services/managers/prometheus/alert/index.ts +++ b/api-service/src/services/managers/prometheus/alert/index.ts @@ -1,17 +1,17 @@ -import CONSTANTS from '../../constants' +import CONSTANTS from "../../constants" -export const publishAlert = async (payload: Record) => { +export const publishAlert = async () => { throw new Error(CONSTANTS.METHOD_NOT_IMPLEMENTED) } -export const getAlerts = async (payload: Record) => { +export const getAlerts = async () => { throw new Error(CONSTANTS.METHOD_NOT_IMPLEMENTED) } -export const deleteAlert = async (payload: Record) => { +export const deleteAlert = async () => { throw new Error(CONSTANTS.METHOD_NOT_IMPLEMENTED) } -export const generateAlertPayload = (payload: Record) => { +export const generateAlertPayload = () => { throw new Error(CONSTANTS.METHOD_NOT_IMPLEMENTED) } -export const deleteSystemRules= async (payload: Record) => { +export const deleteSystemRules= async () => { throw new Error(CONSTANTS.METHOD_NOT_IMPLEMENTED) } diff --git a/api-service/src/services/managers/prometheus/index.ts b/api-service/src/services/managers/prometheus/index.ts index c8c2e7df..f8f0da66 100644 --- a/api-service/src/services/managers/prometheus/index.ts +++ b/api-service/src/services/managers/prometheus/index.ts @@ -1,5 +1,5 @@ -import * as alertFunctions from './alert' -import * as notificationFunctions from './notification'; -import * as silenceFunctions from './silences'; +import * as alertFunctions from "./alert" +import * as notificationFunctions from "./notification"; +import * as silenceFunctions from "./silences"; export default { ...alertFunctions, ...notificationFunctions, ...silenceFunctions } \ No newline at end of file diff --git a/api-service/src/services/managers/prometheus/notification/index.ts b/api-service/src/services/managers/prometheus/notification/index.ts index 91b663cb..9514d324 100644 --- a/api-service/src/services/managers/prometheus/notification/index.ts +++ b/api-service/src/services/managers/prometheus/notification/index.ts @@ -1,11 +1,11 @@ -import CONSTANTS from '../../constants' +import CONSTANTS from "../../constants" -export const createNotificationChannel = (payload: Record) => { +export const createNotificationChannel = () => { throw new Error(CONSTANTS.METHOD_NOT_IMPLEMENTED); } -export const testNotificationChannel = async (payload: Record) => { +export const testNotificationChannel = async () => { throw new Error(CONSTANTS.METHOD_NOT_IMPLEMENTED); } -export const updateNotificationChannel = (payload: Record) => { +export const updateNotificationChannel = () => { throw new Error(CONSTANTS.METHOD_NOT_IMPLEMENTED); } \ No newline at end of file diff --git a/api-service/src/services/managers/prometheus/silences/index.ts b/api-service/src/services/managers/prometheus/silences/index.ts index 1c073881..60ed0aef 100644 --- a/api-service/src/services/managers/prometheus/silences/index.ts +++ b/api-service/src/services/managers/prometheus/silences/index.ts @@ -1,18 +1,18 @@ -import CONSTANTS from '../../constants'; +import CONSTANTS from "../../constants"; -const createSilence = async (payload: Record) => { +const createSilence = async () => { throw new Error(CONSTANTS.METHOD_NOT_IMPLEMENTED); } -const getSilenceMetadata = async (payload: Record) => { +const getSilenceMetadata = async () => { throw new Error(CONSTANTS.METHOD_NOT_IMPLEMENTED) } -const updateSilence = async (silence: Record, payload: Record) => { +const updateSilence = async () => { throw new Error(CONSTANTS.METHOD_NOT_IMPLEMENTED); } -const deleteSilence = async (payload: Record) => { +const deleteSilence = async () => { throw new Error(CONSTANTS.METHOD_NOT_IMPLEMENTED); } diff --git a/api-service/src/services/telemetry.ts b/api-service/src/services/telemetry.ts index ede157f7..d4eca61b 100644 --- a/api-service/src/services/telemetry.ts +++ b/api-service/src/services/telemetry.ts @@ -2,17 +2,13 @@ import { Request, Response, NextFunction } from "express" import { v4 } from "uuid"; import _ from "lodash"; import { config as appConfig } from "../configs/Config"; -import { Kafka } from "kafkajs"; +import {send} from "../connections/kafkaConnection" -const env = _.get(appConfig, "env") +const {env, version} = _.pick(appConfig, ["env","version"]) const telemetryTopic = _.get(appConfig, "telemetry_dataset"); -const brokerServers = _.get(appConfig, "telemetry_service_config.kafka.config.brokers"); export enum OperationType { CREATE = 1, UPDATE, PUBLISH, RETIRE, LIST, GET } -const kafka = new Kafka({ clientId: telemetryTopic, brokers: brokerServers }); -const telemetryEventsProducer = kafka.producer(); -telemetryEventsProducer.connect().catch(err => console.error("Unable to connect to kafka", err.message)); const getDefaults = () => { return { @@ -29,7 +25,7 @@ const getDefaults = () => { sid: v4(), pdata: { id: `${env}.api.service`, - ver: "1.0.0" + ver: `${version}` } }, object: {}, @@ -54,7 +50,7 @@ const getDefaultEdata = ({ action }: any) => ({ }) const sendTelemetryEvents = async (event: Record) => { - telemetryEventsProducer.send({ topic: telemetryTopic, messages: [{ value: JSON.stringify(event) }] }).catch(console.log) + send({ messages: [{ value: JSON.stringify(event) }] }, telemetryTopic).catch(console.log); } const transformProps = (body: Record) => { diff --git a/api-service/src/telemetry/telemetryActions.ts b/api-service/src/telemetry/telemetryActions.ts index bbee6faa..befe6100 100644 --- a/api-service/src/telemetry/telemetryActions.ts +++ b/api-service/src/telemetry/telemetryActions.ts @@ -17,5 +17,8 @@ export default { "sqlQuery": "dataset:query:sql", "ingestEvents": "dataset:events:ingest", "submitIngestionSpec": "datasource:ingestion:submit", - "datasetExhaust": "dataset:exhaust:get" + "datasetExhaust": "dataset:exhaust:get", + "copyDataset": "dataset:copy", + "readConnectors": "connectors:read", + "listConnectors": "connectors:list", } \ No newline at end of file diff --git a/api-service/src/tests/DatasetManagement/DataIngestTest/DataIngestionTest.spec.ts b/api-service/src/tests/DatasetManagement/DataIngestTest/DataIngestionTest.spec.ts index a6037fff..487cd77e 100644 --- a/api-service/src/tests/DatasetManagement/DataIngestTest/DataIngestionTest.spec.ts +++ b/api-service/src/tests/DatasetManagement/DataIngestTest/DataIngestionTest.spec.ts @@ -1,9 +1,9 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import { TestInputsForDataIngestion } from "./Fixtures"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import { Dataset } from "../../../models/Dataset"; import sinon from "sinon"; import { Kafka } from "kafkajs"; @@ -14,17 +14,16 @@ chai.should(); chai.use(chaiHttp); const kafka = new Kafka(connectionConfig.kafka.config); -const producer = kafka.producer(); const apiEndpoint = "/v2/data/in/:datasetId" const resultResponse = [ { - topicName: 'local.test.topic', + topicName: "local.test.topic", partition: 0, errorCode: 0, - baseOffset: '257', - logAppendTime: '-1', - logStartOffset: '0' + baseOffset: "257", + logAppendTime: "-1", + logStartOffset: "0" } ] const kafkaModule = require("../../../connections/kafkaConnection"); @@ -39,7 +38,7 @@ describe("DATA INGEST API", () => { return Promise.resolve({ dataValues: { dataset_config: { - entry_topic: 'local.test.topic', + entry_topic: "local.test.topic", }, extraction_config: { is_batch_event: false, @@ -74,7 +73,7 @@ describe("DATA INGEST API", () => { return Promise.resolve({ dataValues: { dataset_config: { - entry_topic: 'local.test.topic', + entry_topic: "local.test.topic", } } }) @@ -103,7 +102,7 @@ describe("DATA INGEST API", () => { return Promise.resolve({ dataValues: { dataset_config: { - entry_topic: 'local.test.topic', + entry_topic: "local.test.topic", } } }) diff --git a/api-service/src/tests/DatasetManagement/DataOutTest/DataQueryTest.spec.ts b/api-service/src/tests/DatasetManagement/DataOutTest/DataQueryTest.spec.ts index 3a7be3dc..96d53e38 100644 --- a/api-service/src/tests/DatasetManagement/DataOutTest/DataQueryTest.spec.ts +++ b/api-service/src/tests/DatasetManagement/DataOutTest/DataQueryTest.spec.ts @@ -1,11 +1,11 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai from "chai"; import chaiHttp from "chai-http"; import nock from "nock"; import { TestQueries } from "./Fixtures"; import { config } from "../../../configs/Config"; -import chaiSpies from 'chai-spies' -import { describe, it } from 'mocha'; +import chaiSpies from "chai-spies" +import { describe, it } from "mocha"; import { Datasource } from "../../../models/Datasource"; chai.use(chaiSpies) chai.should(); @@ -35,7 +35,7 @@ describe("QUERY API TESTS", () => { }) nock(druidHost + ":" + druidPort) .get(listDruidDatasources) - .reply(200, ['telemetry-events.1_rollup']) + .reply(200, ["telemetry-events.1_rollup"]) chai .request(app) .post("/v2/data/query/telemetry-events") diff --git a/api-service/src/tests/DatasetManagement/DataOutTest/Fixtures.ts b/api-service/src/tests/DatasetManagement/DataOutTest/Fixtures.ts index 8f9bd9f7..c11de5f1 100644 --- a/api-service/src/tests/DatasetManagement/DataOutTest/Fixtures.ts +++ b/api-service/src/tests/DatasetManagement/DataOutTest/Fixtures.ts @@ -1,23 +1,23 @@ export const TestQueries = { VALID_QUERY: - '{"id": "api.data.out","ver": "1.0","ts": "1711966306164","params":{"msgid":"e180ecac-8f41-4f21-9a21-0b3a1a368917"},"context":{"datasetId":"test","aggregationLevel":"week"},"query":{"queryType":"timeseries","intervals":{"type":"intervals","intervals":["2024-01-31/2024-02-01"]},"granularity":"week","aggregations":[{"type":"filtered","aggregator":{"type":"count","name":"a0"},"filter":{"type":"not","field":{"type":"null","column":"msgid"}},"name":"msgid"},{"type":"filtered","aggregator":{"type":"count","name":"a1"},"filter":{"type":"not","field":{"type":"null","column":"ver"}},"name":"a1"}]}}', - HIGH_LIMIT_NATIVE_QUERY: '{"id": "api.data.out","ver": "1.0","ts": "1711966306164","params":{"msgid":"e180ecac-8f41-4f21-9a21-0b3a1a368917"},"context":{"datasetId":"telemetry-events","aggregationLevel":"week"},"query":{"queryType":"timeseries","intervals":{"type":"intervals","intervals":["2024-11-31/2024-12-01"]},"granularity":"day","aggregations":[{"type":"filtered","aggregator":{"type":"count","name":"a0"},"filter":{"type":"not","field":{"type":"null","column":"msgid"}},"name":"msgid"},{"type":"filtered","aggregator":{"type":"count","name":"a1"},"filter":{"type":"not","field":{"type":"null","column":"ver"}},"name":"a1"}],"limit":10000,"threshold":10000}}', + "{\"id\": \"api.data.out\",\"ver\": \"1.0\",\"ts\": \"1711966306164\",\"params\":{\"msgid\":\"e180ecac-8f41-4f21-9a21-0b3a1a368917\"},\"context\":{\"datasetId\":\"test\",\"aggregationLevel\":\"week\"},\"query\":{\"queryType\":\"timeseries\",\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"2024-01-31/2024-02-01\"]},\"granularity\":\"week\",\"aggregations\":[{\"type\":\"filtered\",\"aggregator\":{\"type\":\"count\",\"name\":\"a0\"},\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"null\",\"column\":\"msgid\"}},\"name\":\"msgid\"},{\"type\":\"filtered\",\"aggregator\":{\"type\":\"count\",\"name\":\"a1\"},\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"null\",\"column\":\"ver\"}},\"name\":\"a1\"}]}}", + HIGH_LIMIT_NATIVE_QUERY: "{\"id\": \"api.data.out\",\"ver\": \"1.0\",\"ts\": \"1711966306164\",\"params\":{\"msgid\":\"e180ecac-8f41-4f21-9a21-0b3a1a368917\"},\"context\":{\"datasetId\":\"telemetry-events\",\"aggregationLevel\":\"week\"},\"query\":{\"queryType\":\"timeseries\",\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"2024-11-31/2024-12-01\"]},\"granularity\":\"day\",\"aggregations\":[{\"type\":\"filtered\",\"aggregator\":{\"type\":\"count\",\"name\":\"a0\"},\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"null\",\"column\":\"msgid\"}},\"name\":\"msgid\"},{\"type\":\"filtered\",\"aggregator\":{\"type\":\"count\",\"name\":\"a1\"},\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"null\",\"column\":\"ver\"}},\"name\":\"a1\"}],\"limit\":10000,\"threshold\":10000}}", WITHOUT_THRESOLD_QUERY: - '{"id": "api.data.out","ver": "1.0","ts": "1711966306164","params":{"msgid":"e180ecac-8f41-4f21-9a21-0b3a1a368917"},"context":{"datasetId":"telemetry-events"},"query":{"queryType":"timeBoundary","dimension":"content_status","metric":"count","granularity":"all","intervals":["2020-12-21/2020-12-22"],"aggregations":[]}}', + "{\"id\": \"api.data.out\",\"ver\": \"1.0\",\"ts\": \"1711966306164\",\"params\":{\"msgid\":\"e180ecac-8f41-4f21-9a21-0b3a1a368917\"},\"context\":{\"datasetId\":\"telemetry-events\"},\"query\":{\"queryType\":\"timeBoundary\",\"dimension\":\"content_status\",\"metric\":\"count\",\"granularity\":\"all\",\"intervals\":[\"2020-12-21/2020-12-22\"],\"aggregations\":[]}}", VALID_SQL_QUERY: - '{"id": "api.data.out","ver": "1.0","ts": "1711966306164","params":{"msgid":"e180ecac-8f41-4f21-9a21-0b3a1a368917"},"context":{"datasetId":"test","aggregationLevel":"week"},"query":"SELECT * FROM \\"test\\" WHERE __time >= TIMESTAMP \'2020-12-31\' AND __time < TIMESTAMP \'2021-01-21\' LIMIT 10"}', + "{\"id\": \"api.data.out\",\"ver\": \"1.0\",\"ts\": \"1711966306164\",\"params\":{\"msgid\":\"e180ecac-8f41-4f21-9a21-0b3a1a368917\"},\"context\":{\"datasetId\":\"test\",\"aggregationLevel\":\"week\"},\"query\":\"SELECT * FROM \\\"test\\\" WHERE __time >= TIMESTAMP '2020-12-31' AND __time < TIMESTAMP '2021-01-21' LIMIT 10\"}", HIGH_LIMIT_SQL_QUERY: - '{"id": "api.data.out","ver": "1.0","ts": "1711966306164","params":{"msgid":"e180ecac-8f41-4f21-9a21-0b3a1a368917"},"context":{"datasetId":"telemetry-events"},"querySql":{"query":"SELECT msgid FROM \\"telemetry-events\\" WHERE __time >= TIMESTAMP \'2021-01-01\' AND __time < TIMESTAMP \'2021-01-22\' LIMIT 100000"}}', + "{\"id\": \"api.data.out\",\"ver\": \"1.0\",\"ts\": \"1711966306164\",\"params\":{\"msgid\":\"e180ecac-8f41-4f21-9a21-0b3a1a368917\"},\"context\":{\"datasetId\":\"telemetry-events\"},\"querySql\":{\"query\":\"SELECT msgid FROM \\\"telemetry-events\\\" WHERE __time >= TIMESTAMP '2021-01-01' AND __time < TIMESTAMP '2021-01-22' LIMIT 100000\"}}", HIGH_DATE_RANGE_SQL_QUERY: `{"id": "api.data.out","ver": "1.0","ts": "1711966306164","params":{"msgid":"e180ecac-8f41-4f21-9a21-0b3a1a368917"},"context":{"datasetId":"telemetry-events","aggregationLevel":"week"},"query":"SELECT actor_type, content_status FROM \\"telemetry-events\\" WHERE __time >= TIMESTAMP '2021-01-01' AND __time < TIMESTAMP '2022-02-12' LIMIT 10"}`, LIMIT_IS_NAN: - '{"id": "api.data.out","ver": "1.0","ts": "1711966306164","params":{"msgid":"e180ecac-8f41-4f21-9a21-0b3a1a368917"},"context":{"dataSource":"telemetry-events","aggregationLevel":"week"},"query":"SELECT content_status FROM \\"telemetry-events\\" WHERE __time >= TIMESTAMP \'2021-01-01\' AND __time < TIMESTAMP \'2021-01-12\' LIMIT 100"}', - DATASOURCE_NOT_FOUND: '{"id": "api.data.out","ver": "1.0","ts": "1711966306164","params":{"msgid":"e180ecac-8f41-4f21-9a21-0b3a1a368917"},"context":{"datasetId":"telemetry"},"query":"SELECT content_status FROM \\"telemetry\\" LIMIT 5"}', - INVALID_DATE_RANGE_NATIVE: '{"id":"api.data.out","ver":"1.0","ts":"1711966306164","params":{"msgid":"e180ecac-8f41-4f21-9a21-0b3a1a368917"},"context":{"datasetId":"telemetry-events","table":"week"},"query":{"queryType":"timeseries","intervals":{"type":"intervals","intervals":["2023-01-31/2023-04-01"]},"granularity":"day","aggregations":[{"type":"filtered","aggregator":{"type":"count","name":"a0"},"filter":{"type":"not","field":{"type":"null","column":"msgid"}},"name":"msgid"}]}}', + "{\"id\": \"api.data.out\",\"ver\": \"1.0\",\"ts\": \"1711966306164\",\"params\":{\"msgid\":\"e180ecac-8f41-4f21-9a21-0b3a1a368917\"},\"context\":{\"dataSource\":\"telemetry-events\",\"aggregationLevel\":\"week\"},\"query\":\"SELECT content_status FROM \\\"telemetry-events\\\" WHERE __time >= TIMESTAMP '2021-01-01' AND __time < TIMESTAMP '2021-01-12' LIMIT 100\"}", + DATASOURCE_NOT_FOUND: "{\"id\": \"api.data.out\",\"ver\": \"1.0\",\"ts\": \"1711966306164\",\"params\":{\"msgid\":\"e180ecac-8f41-4f21-9a21-0b3a1a368917\"},\"context\":{\"datasetId\":\"telemetry\"},\"query\":\"SELECT content_status FROM \\\"telemetry\\\" LIMIT 5\"}", + INVALID_DATE_RANGE_NATIVE: "{\"id\":\"api.data.out\",\"ver\":\"1.0\",\"ts\":\"1711966306164\",\"params\":{\"msgid\":\"e180ecac-8f41-4f21-9a21-0b3a1a368917\"},\"context\":{\"datasetId\":\"telemetry-events\",\"table\":\"week\"},\"query\":{\"queryType\":\"timeseries\",\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"2023-01-31/2023-04-01\"]},\"granularity\":\"day\",\"aggregations\":[{\"type\":\"filtered\",\"aggregator\":{\"type\":\"count\",\"name\":\"a0\"},\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"null\",\"column\":\"msgid\"}},\"name\":\"msgid\"}]}}", INVALID_SQL_QUERY: - '{"id": "api.data.out","ver": "1.0","ts": "1711966306164","params":{"msgid":"e180ecac-8f41-4f21-9a21-0b3a1a368917"},"context":{"datasetId":"telemetry","aggregationLevel":"week"},"query":"SELECT * FROM \\"telemetry\\" WHERE __time >= TIMESTAMP \'2020-12-31\' AND __time < TIMESTAMP \'2021-01-21\' LIMIT 10"}', + "{\"id\": \"api.data.out\",\"ver\": \"1.0\",\"ts\": \"1711966306164\",\"params\":{\"msgid\":\"e180ecac-8f41-4f21-9a21-0b3a1a368917\"},\"context\":{\"datasetId\":\"telemetry\",\"aggregationLevel\":\"week\"},\"query\":\"SELECT * FROM \\\"telemetry\\\" WHERE __time >= TIMESTAMP '2020-12-31' AND __time < TIMESTAMP '2021-01-21' LIMIT 10\"}", VALID_SQL_QUERY_WITHOUT_LIMIT: - '{"id": "api.data.out","ver": "1.0","ts": "1711966306164","params":{"msgid":"e180ecac-8f41-4f21-9a21-0b3a1a368917"},"context":{"datasetId":"telemetry","aggregationLevel":"week"},"query":"SELECT * FROM \\"telemetry-events\\" WHERE __time >= TIMESTAMP \'2020-12-31\' AND __time < TIMESTAMP \'2021-01-21\'"}', + "{\"id\": \"api.data.out\",\"ver\": \"1.0\",\"ts\": \"1711966306164\",\"params\":{\"msgid\":\"e180ecac-8f41-4f21-9a21-0b3a1a368917\"},\"context\":{\"datasetId\":\"telemetry\",\"aggregationLevel\":\"week\"},\"query\":\"SELECT * FROM \\\"telemetry-events\\\" WHERE __time >= TIMESTAMP '2020-12-31' AND __time < TIMESTAMP '2021-01-21'\"}", VALID_INTERVAL: - '{"id": "api.data.out","ver": "1.0","ts": "1711966306164","params":{"msgid":"e180ecac-8f41-4f21-9a21-0b3a1a368917"},"context":{"datasetId":"telemetry-events","aggregationLevel":"week"},"query":{"queryType":"timeseries","intervals":"2024-01-31/2024-02-01","granularity":"week","aggregations":[{"type":"filtered","aggregator":{"type":"count","name":"a0"},"filter":{"type":"not","field":{"type":"null","column":"msgid"}},"name":"msgid"},{"type":"filtered","aggregator":{"type":"count","name":"a1"},"filter":{"type":"not","field":{"type":"null","column":"ver"}},"name":"a1"}]}}', + "{\"id\": \"api.data.out\",\"ver\": \"1.0\",\"ts\": \"1711966306164\",\"params\":{\"msgid\":\"e180ecac-8f41-4f21-9a21-0b3a1a368917\"},\"context\":{\"datasetId\":\"telemetry-events\",\"aggregationLevel\":\"week\"},\"query\":{\"queryType\":\"timeseries\",\"intervals\":\"2024-01-31/2024-02-01\",\"granularity\":\"week\",\"aggregations\":[{\"type\":\"filtered\",\"aggregator\":{\"type\":\"count\",\"name\":\"a0\"},\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"null\",\"column\":\"msgid\"}},\"name\":\"msgid\"},{\"type\":\"filtered\",\"aggregator\":{\"type\":\"count\",\"name\":\"a1\"},\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"null\",\"column\":\"ver\"}},\"name\":\"a1\"}]}}", } \ No newline at end of file diff --git a/api-service/src/tests/DatasetManagement/DatasetCreate/DatasetCreate.spec.ts b/api-service/src/tests/DatasetManagement/DatasetCreate/DatasetCreate.spec.ts index d70c7d1f..51953ba0 100644 --- a/api-service/src/tests/DatasetManagement/DatasetCreate/DatasetCreate.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetCreate/DatasetCreate.spec.ts @@ -1,16 +1,13 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai, { expect } from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import httpStatus from "http-status"; import { TestInputsForDatasetCreate, DATASET_CREATE_SUCCESS_FIXTURES, DATASET_FAILURE_DUPLICATE_DENORM_FIXTURES } from "./Fixtures"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import { DatasetDraft } from "../../../models/DatasetDraft"; import { sequelize } from "../../../connections/databaseConnection"; -import _ from "lodash"; import { apiId } from "../../../controllers/DatasetCreate/DatasetCreate" -import { DatasetTransformationsDraft } from "../../../models/TransformationDraft"; -import { DatasetTransformations } from "../../../models/Transformation"; import { Dataset } from "../../../models/Dataset"; chai.use(spies); @@ -30,34 +27,13 @@ describe("DATASET CREATE API", () => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve(null) }) - chai.spy.on(sequelize, "query", () => { - return Promise.resolve([{ nextVal: 9 }]) + chai.spy.on(Dataset, "findOne", () => { + return Promise.resolve(null) }) chai.spy.on(DatasetDraft, "create", () => { return Promise.resolve({ dataValues: { id: "telemetry" } }) }) - chai.spy.on(Dataset, "findOne", () => { - return Promise.resolve({ "data_schema": {"$schema": "https://json-schema.org/draft/2020-12/schema","type": "object", - "properties": { - "eid": {"type": "string"}, - "ets": {"type": "string"} - }, - "additionalProperties": true - },}) - }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve() - }) - chai.spy.on(DatasetTransformations, "findAll", () => { - return Promise.resolve() - }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) - + chai .request(app) .post("/v2/datasets/create") @@ -80,7 +56,10 @@ describe("DATASET CREATE API", () => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve(null) }) - + chai.spy.on(Dataset, "findOne", () => { + return Promise.resolve(null) + }) + chai .request(app) .post("/v2/datasets/create") @@ -91,7 +70,7 @@ describe("DATASET CREATE API", () => { res.body.id.should.be.eq(apiId); res.body.params.status.should.be.eq(fixture.status) res.body.params.msgid.should.be.eq(fixture.msgid) - res.body.error.message.should.be.eq("Duplicate denorm key found") + res.body.error.message.should.be.eq("Duplicate denorm output fields found.") res.body.error.code.should.be.eq("DATASET_DUPLICATE_DENORM_KEY") done(); }); @@ -130,30 +109,11 @@ describe("DATASET CREATE API", () => { res.body.id.should.be.eq(apiId); res.body.params.status.should.be.eq("FAILED") res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Dataset already exists") + res.body.error.message.should.be.eq("Dataset Already exists with id:sb-ddd") res.body.error.code.should.be.eq("DATASET_EXISTS") done(); }); }); - it("Dataset creation failure: Connection to the database failed", (done) => { - chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.reject({}) - }) - chai - .request(app) - .post("/v2/datasets/create") - .send(TestInputsForDatasetCreate.DATASET_WITH_DUPLICATE_DENORM_KEY) - .end((err, res) => { - res.should.have.status(httpStatus.INTERNAL_SERVER_ERROR); - res.body.should.be.a("object") - res.body.id.should.be.eq(apiId); - res.body.params.status.should.be.eq("FAILED") - res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Failed to create dataset") - res.body.error.code.should.be.eq("DATASET_CREATION_FAILURE") - done(); - }); - }); }) \ No newline at end of file diff --git a/api-service/src/tests/DatasetManagement/DatasetCreate/Fixtures.ts b/api-service/src/tests/DatasetManagement/DatasetCreate/Fixtures.ts index b2dbb7f3..01b87ac1 100644 --- a/api-service/src/tests/DatasetManagement/DatasetCreate/Fixtures.ts +++ b/api-service/src/tests/DatasetManagement/DatasetCreate/Fixtures.ts @@ -1,5 +1,4 @@ import httpStatus from "http-status" -import _ from "lodash" export const TestInputsForDatasetCreate = { VALID_DATASET: { @@ -11,7 +10,7 @@ export const TestInputsForDatasetCreate = { }, "request": { "dataset_id": "sb-ddd", - "type": "dataset", + "type": "event", "name": "sb-telemetry2", "validation_config": { "validate": true, @@ -44,29 +43,38 @@ export const TestInputsForDatasetCreate = { "denorm_fields": [ { "denorm_key": "actor.id", - "denorm_out_field": "userdata" + "denorm_out_field": "userdata", + "dataset_id": "trip-details" } ] }, "dataset_config": { - "data_key": "", - "timestamp_key": "ets", - "file_upload_path": ["/config/file.json"] + "indexing_config": { + "olap_store_enabled": false, + "lakehouse_enabled": true, + "cache_enabled": false + }, + "keys_config": { + "timestamp_key": "ets" + }, + "file_upload_path": [ + "telemetry.json" + ] }, "tags": [] } }, - VALID_DATASET_WITH_DEFAULT_TS: { + VALID_DATASET_WITH_TRANSFORMATIONS: { "id": "api.datasets.create", - "ver": "v1", + "ver": "v2", "ts": "2024-04-10T16:10:50+05:30", "params": { "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6d" }, "request": { "dataset_id": "sb-ddd", - "type": "dataset", + "type": "event", "name": "sb-telemetry2", "data_schema": { "$schema": "https://json-schema.org/draft/2020-12/schema", @@ -75,6 +83,9 @@ export const TestInputsForDatasetCreate = { "eid": { "type": "string" }, + "ets": { + "type": "string" + }, "ver": { "type": "string" }, @@ -85,23 +96,33 @@ export const TestInputsForDatasetCreate = { "additionalProperties": true }, "dataset_config": { - "data_key": "", - "timestamp_key": "obsrv_meta.syncts" + "indexing_config": { + "olap_store_enabled": false, + "lakehouse_enabled": true, + "cache_enabled": false + }, + "keys_config": { + "timestamp_key": "ets" + }, + "file_upload_path": [ + "telemetry.json" + ] }, + "transformations_config": [{ "field_key": "eid", "transformation_function": { "type": "mask", "expr": "eid", "datatype": "string", "category": "pii" }, "mode": "Strict" }], "tags": [] } }, - VALID_DATASET_WITH_TRANSFORMATIONS: { + VALID_DATASET_WITH_MULTIPLE_TRANSFORMATIONS: { "id": "api.datasets.create", - "ver": "v2", + "ver": "v1", "ts": "2024-04-10T16:10:50+05:30", "params": { "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6d" }, "request": { "dataset_id": "sb-ddd", - "type": "dataset", + "type": "event", "name": "sb-telemetry2", "data_schema": { "$schema": "https://json-schema.org/draft/2020-12/schema", @@ -110,10 +131,10 @@ export const TestInputsForDatasetCreate = { "eid": { "type": "string" }, - "ets": { + "ver": { "type": "string" }, - "ver": { + "ets": { "type": "string" }, "required": [ @@ -123,32 +144,24 @@ export const TestInputsForDatasetCreate = { "additionalProperties": true }, "dataset_config": { - "data_key": "", - "timestamp_key": "ets", - "file_upload_path": ["/config/file.json"] + "indexing_config": { + "olap_store_enabled": false, + "lakehouse_enabled": true, + "cache_enabled": false + }, + "keys_config": { + "timestamp_key": "ets" + }, + "file_upload_path": [ + "telemetry.json" + ] }, - "transformations_config": [ - { - "field_key": "eid", - "transformation_function": { - "type": "mask", - "expr": "eid", - "condition": null - }, - "mode": "Strict", - "metadata": { - "_transformationType": "mask", - "_transformedFieldDataType": "string", - "_transformedFieldSchemaType": "string", - "section": "transformation" - } - } - ], + "transformations_config": [{ "field_key": "eid", "transformation_function": { "type": "mask", "expr": "eid", "datatype": "string", "category": "pii" }, "mode": "Strict" }, { "field_key": "ver", "transformation_function": { "type": "mask", "expr": "ver", "datatype": "string", "category": "pii" }, "mode": "Strict" }], "tags": [] } }, - VALID_DATASET_WITH_MULTIPLE_TRANSFORMATIONS: { + VALID_DATASET_WITH_CONNECTORS: { "id": "api.datasets.create", "ver": "v1", "ts": "2024-04-10T16:10:50+05:30", @@ -157,7 +170,7 @@ export const TestInputsForDatasetCreate = { }, "request": { "dataset_id": "sb-ddd", - "type": "dataset", + "type": "event", "name": "sb-telemetry2", "data_schema": { "$schema": "https://json-schema.org/draft/2020-12/schema", @@ -179,42 +192,19 @@ export const TestInputsForDatasetCreate = { "additionalProperties": true }, "dataset_config": { - "data_key": "", - "timestamp_key": "ets", - "file_upload_path": ["/config/file.json"] - }, - "transformations_config": [ - { - "field_key": "eid", - "transformation_function": { - "type": "mask", - "expr": "eid", - "condition": null - }, - "mode": "Strict", - "metadata": { - "_transformationType": "mask", - "_transformedFieldDataType": "string", - "_transformedFieldSchemaType": "string", - "section": "transformation" - } + "indexing_config": { + "olap_store_enabled": false, + "lakehouse_enabled": true, + "cache_enabled": false }, - { - "field_key": "eid", - "transformation_function": { - "type": "mask", - "expr": "eid", - "condition": null - }, - "mode": "Strict", - "metadata": { - "_transformationType": "mask", - "_transformedFieldDataType": "string", - "_transformedFieldSchemaType": "string", - "section": "transformation" - } - } - ], + "keys_config": { + "timestamp_key": "ets" + }, + "file_upload_path": [ + "telemetry.json" + ] + }, + "connectors_config":[{"id":"6c3fc8c2-357d-489b-b0c9-afdde6e5c6c0","connector_id":"kafka","connector_config":{"type":"kafka","topic":"telemetry.ingest","kafkaBrokers":"kafka-headless.kafka.svc:9092"},"version":"v1"}, {"id":"6c3fc8c2-357d-489b-b0c9-afdde6e5cai","connector_id":"debezium","connector_config":{"type":"debezium","topic":"telemetry.ingest","kafkaBrokers":"kafka-headless.kafka.svc:9092"},"version":"v1"}], "tags": [] } }, @@ -228,7 +218,7 @@ export const TestInputsForDatasetCreate = { }, "request": { "dataset_id": "sb-ddd", - "type": "dataset", + "type": "event", "name": "sb-telemetry2", "data_schema": { "$schema": "https://json-schema.org/draft/2020-12/schema", @@ -250,9 +240,18 @@ export const TestInputsForDatasetCreate = { "additionalProperties": true }, "dataset_config": { - "data_key": "", - "timestamp_key": "ets" - } + "indexing_config": { + "olap_store_enabled": false, + "lakehouse_enabled": true, + "cache_enabled": false + }, + "keys_config": { + "timestamp_key": "ets" + }, + "file_upload_path": [ + "telemetry.json" + ] + }, } }, @@ -265,7 +264,7 @@ export const TestInputsForDatasetCreate = { }, "request": { "dataset_id": "sb-ddd", - "type": "master-dataset", + "type": "master", "name": "sb-telemetry2", "data_schema": { "$schema": "https://json-schema.org/draft/2020-12/schema", @@ -287,9 +286,18 @@ export const TestInputsForDatasetCreate = { "additionalProperties": true }, "dataset_config": { - "data_key": "", - "timestamp_key": "ets" - } + "indexing_config": { + "olap_store_enabled": false, + "lakehouse_enabled": true, + "cache_enabled": true + }, + "keys_config": { + "data_key": "ets" + }, + "file_upload_path": [ + "telemetry.json" + ] + }, } }, VALID_MORE_THAN_MINIMAL_DATASET: { @@ -301,7 +309,7 @@ export const TestInputsForDatasetCreate = { }, "request": { "dataset_id": "sb-ddd", - "type": "dataset", + "type": "event", "name": "sb-telemetry2", "data_schema": { "$schema": "https://json-schema.org/draft/2020-12/schema", @@ -325,14 +333,24 @@ export const TestInputsForDatasetCreate = { "denorm_fields": [ { "denorm_key": "actor.id", - "denorm_out_field": "userdata" + "denorm_out_field": "userdata", + "dataset_id": "master-telemetry" } ] }, "dataset_config": { - "data_key": "", - "timestamp_key": "ets" - } + "indexing_config": { + "olap_store_enabled": false, + "lakehouse_enabled": true, + "cache_enabled": false + }, + "keys_config": { + "timestamp_key": "ets" + }, + "file_upload_path": [ + "telemetry.json" + ] + }, } }, VALID_MORE_THAN_MINIMAL_MASTER_DATASET: { @@ -344,7 +362,7 @@ export const TestInputsForDatasetCreate = { }, "request": { "dataset_id": "sb-ddd", - "type": "master-dataset", + "type": "master", "name": "sb-telemetry2", "data_schema": { "$schema": "https://json-schema.org/draft/2020-12/schema", @@ -368,14 +386,24 @@ export const TestInputsForDatasetCreate = { "denorm_fields": [ { "denorm_key": "actor.id", - "denorm_out_field": "userdata" + "denorm_out_field": "userdata", + "dataset_id": "telemetry" } ] }, "dataset_config": { - "data_key": "", - "timestamp_key": "ets" - }, + "indexing_config": { + "olap_store_enabled": false, + "lakehouse_enabled": true, + "cache_enabled": true + }, + "keys_config": { + "data_key": "ets" + }, + "file_upload_path": [ + "telemetry.json" + ] + } } }, VALID_MASTER_DATASET: { @@ -387,7 +415,7 @@ export const TestInputsForDatasetCreate = { }, "request": { "dataset_id": "sb-ddd", - "type": "master-dataset", + "type": "master", "name": "sb-telemetry2", "validation_config": { "validate": true, @@ -420,13 +448,23 @@ export const TestInputsForDatasetCreate = { "denorm_fields": [ { "denorm_key": "actor.id", - "denorm_out_field": "userdata" + "denorm_out_field": "userdata", + "dataset_id": "telemetry" } ] }, "dataset_config": { - "data_key": "", - "timestamp_key": "ets" + "indexing_config": { + "olap_store_enabled": false, + "lakehouse_enabled": true, + "cache_enabled": true + }, + "keys_config": { + "data_key": "ets" + }, + "file_upload_path": [ + "telemetry.json" + ] }, "tags": [] } @@ -444,41 +482,6 @@ export const TestInputsForDatasetCreate = { } }, - DATASET_WITH_INVALID_TIMESTAMP: { - "id": "api.datasets.create", - "ver": "v1", - "ts": "2024-04-10T16:10:50+05:30", - "params": { - "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6d" - }, - "request": { - "dataset_id": "sb-ddd", - "type": "dataset", - "name": "sb-telemetry2", - "data_schema": { - "$schema": "https://json-schema.org/draft/2020-12/schema", - "type": "object", - "properties": { - "eid": { - "type": "string" - }, - "ver": { - "type": "string" - }, - "required": [ - "eid" - ] - }, - "additionalProperties": true - }, - "dataset_config": { - "data_key": "", - "timestamp_key": "lastAccessed" - }, - "tags": [] - } - }, - DATASET_WITH_DUPLICATE_DENORM_KEY: { "id": "api.datasets.create", "ver": "v2", @@ -488,7 +491,7 @@ export const TestInputsForDatasetCreate = { }, "request": { "dataset_id": "sb-ddd", - "type": "dataset", + "type": "event", "name": "sb-telemetry2", "data_schema": { "$schema": "https://json-schema.org/draft/2020-12/schema", @@ -510,11 +513,13 @@ export const TestInputsForDatasetCreate = { "denorm_fields": [ { "denorm_key": "actor.id", - "denorm_out_field": "userdata" + "denorm_out_field": "userdata", + "dataset_id": "telemetry" }, { "denorm_key": "actor.id", - "denorm_out_field": "userdata" + "denorm_out_field": "userdata", + "dataset_id": "telemetry" } ] } @@ -580,19 +585,19 @@ export const DATASET_CREATE_SUCCESS_FIXTURES = [ "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6d" }, { - "title": "Dataset creation success: When multiple transformation payload provided with same field key", - "requestPayload": TestInputsForDatasetCreate.VALID_DATASET_WITH_MULTIPLE_TRANSFORMATIONS, + "title": "Dataset creation success: When connectors payload provided", + "requestPayload": TestInputsForDatasetCreate.VALID_DATASET_WITH_CONNECTORS, "httpStatus": httpStatus.OK, "status": "SUCCESS", "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6d" }, { - "title": "Dataset creation success: Geenerating ingestion spec successfully using the data schema", - "requestPayload": TestInputsForDatasetCreate.VALID_DATASET_WITH_DEFAULT_TS, + "title": "Dataset creation success: When multiple transformation payload provided with same field key", + "requestPayload": TestInputsForDatasetCreate.VALID_DATASET_WITH_MULTIPLE_TRANSFORMATIONS, "httpStatus": httpStatus.OK, "status": "SUCCESS", "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6d" - }, + } ] export const DATASET_FAILURE_DUPLICATE_DENORM_FIXTURES = [ @@ -602,12 +607,5 @@ export const DATASET_FAILURE_DUPLICATE_DENORM_FIXTURES = [ "httpStatus": httpStatus.BAD_REQUEST, "status": "FAILED", "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6d" - }, - { - "title": "Master Dataset creation failure: Dataset contains duplicate denorm out field", - "requestPayload": _.set(TestInputsForDatasetCreate.DATASET_WITH_DUPLICATE_DENORM_KEY, "request.type", "master-dataset"), - "httpStatus": httpStatus.BAD_REQUEST, - "status": "FAILED", - "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6d" } ] \ No newline at end of file diff --git a/api-service/src/tests/DatasetManagement/DatasetList/DatasetList.spec.ts b/api-service/src/tests/DatasetManagement/DatasetList/DatasetList.spec.ts index 67344e5a..380ee8ad 100644 --- a/api-service/src/tests/DatasetManagement/DatasetList/DatasetList.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetList/DatasetList.spec.ts @@ -1,16 +1,14 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai, { expect } from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import httpStatus from "http-status"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import _ from "lodash"; -import { apiId, errorCode } from "../../../controllers/DatasetList/DatasetList"; +import { apiId } from "../../../controllers/DatasetList/DatasetList"; import { TestInputsForDatasetList } from "./Fixtures"; import { Dataset } from "../../../models/Dataset"; import { DatasetDraft } from "../../../models/DatasetDraft"; -import { DatasetTransformations } from "../../../models/Transformation"; -import { DatasetTransformationsDraft } from "../../../models/TransformationDraft"; chai.use(spies); chai.should(); @@ -31,12 +29,6 @@ describe("DATASET LIST API", () => { chai.spy.on(DatasetDraft, "findAll", () => { return Promise.resolve([TestInputsForDatasetList.VALID_DRAFT_DATASET_SCHEMA]) }) - chai.spy.on(DatasetTransformations, "findAll", () => { - return Promise.resolve([TestInputsForDatasetList.TRANSFORMATIONS_LIVE_SCHEMA]) - }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve([TestInputsForDatasetList.TRANSFORMATIONS_DRAFT_SCHEMA]) - }) chai .request(app) .post("/v2/datasets/list") @@ -50,7 +42,7 @@ describe("DATASET LIST API", () => { res.body.result.count.should.be.eq(2) res.body.params.msgid.should.be.eq(msgid) const result = JSON.stringify(res.body.result.data) - const expectedResult = JSON.stringify([{ ..._.omit(TestInputsForDatasetList.VALID_LIVE_DATASET_SCHEMA, ["data_version"]), version: 1, transformations_config: [_.omit(TestInputsForDatasetList.TRANSFORMATIONS_LIVE_SCHEMA, ["dataset_id"])] }, { ...TestInputsForDatasetList.VALID_DRAFT_DATASET_SCHEMA, "transformations_config": [_.omit(TestInputsForDatasetList.TRANSFORMATIONS_DRAFT_SCHEMA, ["dataset_id"])] }]) + const expectedResult = JSON.stringify(TestInputsForDatasetList.VALID_RESPONSE) result.should.be.eq(expectedResult) done(); }); @@ -60,11 +52,8 @@ describe("DATASET LIST API", () => { chai.spy.on(DatasetDraft, "findAll", () => { return Promise.resolve([TestInputsForDatasetList.VALID_DRAFT_DATASET_SCHEMA]) }) - chai.spy.on(DatasetTransformations, "findAll", () => { - return Promise.resolve([TestInputsForDatasetList.TRANSFORMATIONS_LIVE_SCHEMA]) - }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve([TestInputsForDatasetList.TRANSFORMATIONS_DRAFT_SCHEMA]) + chai.spy.on(Dataset, "findAll", () => { + return Promise.resolve([]) }) chai .request(app) @@ -79,7 +68,7 @@ describe("DATASET LIST API", () => { res.body.result.count.should.be.eq(1) res.body.params.msgid.should.be.eq(msgid) const result = JSON.stringify(res.body.result.data) - const expectedResult = JSON.stringify([{ ...TestInputsForDatasetList.VALID_DRAFT_DATASET_SCHEMA, "transformations_config": [_.omit(TestInputsForDatasetList.TRANSFORMATIONS_DRAFT_SCHEMA, ["dataset_id"])] }]) + const expectedResult = JSON.stringify([{ ...TestInputsForDatasetList.VALID_DRAFT_DATASET_SCHEMA }]) result.should.be.eq(expectedResult) done(); }); @@ -89,11 +78,8 @@ describe("DATASET LIST API", () => { chai.spy.on(Dataset, "findAll", () => { return Promise.resolve([TestInputsForDatasetList.VALID_LIVE_DATASET_SCHEMA]) }) - chai.spy.on(DatasetTransformations, "findAll", () => { - return Promise.resolve([TestInputsForDatasetList.TRANSFORMATIONS_LIVE_SCHEMA]) - }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve([TestInputsForDatasetList.TRANSFORMATIONS_DRAFT_SCHEMA]) + chai.spy.on(DatasetDraft, "findAll", () => { + return Promise.resolve([]) }) chai .request(app) @@ -108,39 +94,7 @@ describe("DATASET LIST API", () => { res.body.result.count.should.be.eq(1) res.body.params.msgid.should.be.eq(msgid) const result = JSON.stringify(res.body.result.data) - const expectedResult = JSON.stringify([{ ..._.omit(TestInputsForDatasetList.VALID_LIVE_DATASET_SCHEMA, ["data_version"]), version: 1, transformations_config: [_.omit(TestInputsForDatasetList.TRANSFORMATIONS_LIVE_SCHEMA, ["dataset_id"])] }]) - result.should.be.eq(expectedResult) - done(); - }); - }); - - it("Dataset list success: When sortBy is provided in request payload", (done) => { - chai.spy.on(Dataset, "findAll", () => { - return Promise.resolve([TestInputsForDatasetList.VALID_LIVE_DATASET_SCHEMA]) - }) - chai.spy.on(DatasetDraft, "findAll", () => { - return Promise.resolve([TestInputsForDatasetList.VALID_DRAFT_DATASET_SCHEMA]) - }) - chai.spy.on(DatasetTransformations, "findAll", () => { - return Promise.resolve([TestInputsForDatasetList.TRANSFORMATIONS_LIVE_SCHEMA]) - }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve([TestInputsForDatasetList.TRANSFORMATIONS_DRAFT_SCHEMA]) - }) - chai - .request(app) - .post("/v2/datasets/list") - .send(TestInputsForDatasetList.REQUEST_WITH_SORTBY) - .end((err, res) => { - res.should.have.status(httpStatus.OK); - res.body.should.be.a("object") - res.body.id.should.be.eq(apiId); - res.body.params.status.should.be.eq("SUCCESS") - res.body.result.should.be.a("object") - res.body.result.count.should.be.eq(2) - res.body.params.msgid.should.be.eq(msgid) - const result = JSON.stringify(res.body.result.data) - const expectedResult = JSON.stringify([{ ...TestInputsForDatasetList.VALID_DRAFT_DATASET_SCHEMA, "transformations_config": [_.omit(TestInputsForDatasetList.TRANSFORMATIONS_DRAFT_SCHEMA, ["dataset_id"])] }, { ..._.omit(TestInputsForDatasetList.VALID_LIVE_DATASET_SCHEMA, ["data_version"]), version: 1, transformations_config: [_.omit(TestInputsForDatasetList.TRANSFORMATIONS_LIVE_SCHEMA, ["dataset_id"])] }]) + const expectedResult = JSON.stringify([{ ...TestInputsForDatasetList.VALID_LIVE_DATASET_SCHEMA}]) result.should.be.eq(expectedResult) done(); }); @@ -162,23 +116,4 @@ describe("DATASET LIST API", () => { }); }); - - it("Dataset list failure: Connection to the database failed", (done) => { - chai.spy.on(Dataset, "findAll", () => { - return Promise.reject() - }) - chai - .request(app) - .post("/v2/datasets/list") - .send(TestInputsForDatasetList.REQUEST_WITHOUT_FILTERS) - .end((err, res) => { - res.should.have.status(httpStatus.INTERNAL_SERVER_ERROR); - res.body.should.be.a("object") - res.body.id.should.be.eq(apiId); - res.body.params.status.should.be.eq("FAILED") - res.body.error.code.should.be.eq(errorCode) - res.body.error.message.should.be.eq("Failed to list dataset") - done(); - }); - }); }) \ No newline at end of file diff --git a/api-service/src/tests/DatasetManagement/DatasetList/Fixtures.ts b/api-service/src/tests/DatasetManagement/DatasetList/Fixtures.ts index a905b1c0..31a5ba1c 100644 --- a/api-service/src/tests/DatasetManagement/DatasetList/Fixtures.ts +++ b/api-service/src/tests/DatasetManagement/DatasetList/Fixtures.ts @@ -1,181 +1,53 @@ export const TestInputsForDatasetList = { VALID_DRAFT_DATASET_SCHEMA: { "dataset_id": "telemetry", - "id": "telemetry.1", "name": "telemetry", - "type": "dataset", - "validation_config": { - "validate": true, - "mode": "Strict" - }, - "extraction_config": { - "is_batch_event": true, - "extraction_key": "events", - "dedup_config": { - "drop_duplicates": true, - "dedup_key": "id", - "dedup_period": 604800 - } - }, - "dedup_config": { - "drop_duplicates": true, - "dedup_key": "msgid", - "dedup_period": 604800 - }, - "data_schema": { - "$schema": "https://json-schema.org/draft/2020-12/schema", - "type": "object", - "properties": { - "eid": { - "type": "string" - }, - "ver": { - "type": "string" - }, - "required": [ - "eid" - ] + "type": "events", + "dataset_config": { + "indexing_config": { + "olap_store_enabled": false, + "lakehouse_enabled": true, + "cache_enabled": false }, - "additionalProperties": true - }, - "router_config": { - "topic": "" - }, - "denorm_config": { - "redis_db_host": "localhost", - "redis_db_port": 6379, - "denorm_fields": [ - { - "denorm_key": "actor.id", - "denorm_out_field": "userdata" - } + "keys_config": { + "timestamp_key": "ets" + }, + "file_upload_path": [ + "telemetry.json" ] }, - "dataset_config": { - "data_key": "eid", - "timestamp_key": "ets", - "entry_topic": "local.ingest", - "redis_db_host": "localhost", - "redis_db_port": 6379, - "index_data": true, - "redis_db": 0 - }, "tags": [ "tag1", "tag2" ], "status": "Draft", "version": 1, - "client_state": {}, - "created_by": "SYSTEM", - "updated_by": "SYSTEM", - "created_date": "2024-04-15 07:51:49.49", - "update_date": "", - "published_date": "" + "api_version": "v2" }, VALID_LIVE_DATASET_SCHEMA: { "dataset_id": "sb-telemetry", - "id": "sb-telemetry", "name": "sb-telemetry", - "type": "master-dataset", - "validation_config": { - "validate": true, - "mode": "Strict" - }, - "extraction_config": { - "is_batch_event": true, - "extraction_key": "events", - "dedup_config": { - "drop_duplicates": true, - "dedup_key": "id", - "dedup_period": 604800 - } - }, - "dedup_config": { - "drop_duplicates": true, - "dedup_key": "msgid", - "dedup_period": 604800 - }, - "data_schema": { - "$schema": "https://json-schema.org/draft/2020-12/schema", - "type": "object", - "properties": { - "eid": { - "type": "string" - }, - "ver": { - "type": "string" - }, - "required": [ - "eid" - ] + "type": "master", + "dataset_config": { + "indexing_config": { + "olap_store_enabled": false, + "lakehouse_enabled": true, + "cache_enabled": true }, - "additionalProperties": true - }, - "router_config": { - "topic": "" - }, - "denorm_config": { - "redis_db_host": "localhost", - "redis_db_port": 6379, - "denorm_fields": [ - { - "denorm_key": "actor.id", - "denorm_out_field": "userdata" - } + "keys_config": { + "data_key": "ets" + }, + "file_upload_path": [ + "telemetry.json" ] }, - "dataset_config": { - "data_key": "eid", - "timestamp_key": "ets", - "entry_topic": "local.ingest", - "redis_db_host": "localhost", - "redis_db_port": 6379, - "index_data": true, - "redis_db": 0 - }, "tags": [ "tag1", "tag2" ], "status": "Live", "data_version": 1, - "created_by": "SYSTEM", - "updated_by": "SYSTEM", - "created_date": "2024-04-16 07:51:49.49", - "update_date": "", - "published_date": "" - }, - TRANSFORMATIONS_DRAFT_SCHEMA: { - "dataset_id": "telemetry.1", - "field_key": "eid", - "transformation_function": { - "type": "mask", - "expr": "eid", - "condition": null - }, - "mode": "Strict", - "metadata": { - "_transformationType": "mask", - "_transformedFieldDataType": "string", - "_transformedFieldSchemaType": "string", - "section": "transformation" - } - }, - TRANSFORMATIONS_LIVE_SCHEMA: { - "dataset_id": "sb-telemetry", - "field_key": "eid", - "transformation_function": { - "type": "mask", - "expr": "eid", - "condition": null - }, - "mode": "Strict", - "metadata": { - "_transformationType": "mask", - "_transformedFieldDataType": "string", - "_transformedFieldSchemaType": "string", - } + "api_version": "v2" }, REQUEST_WITHOUT_FILTERS: { @@ -206,18 +78,7 @@ export const TestInputsForDatasetList = { "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6d" }, "request": { - "filters": { status: "Live", type: "master-dataset" } - } - }, - REQUEST_WITH_SORTBY: { - "id": "api.datasets.list", - "ver": "v2", - "ts": "2024-04-10T16:10:50+05:30", - "params": { - "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6d" - }, - "request": { - "sortBy": [{ "field": "created_date", "order": "asc" }] + "filters": { status: "Live", type: "master" } } }, INVALID_REQUEST: { @@ -230,5 +91,6 @@ export const TestInputsForDatasetList = { "request": { "filters": { status: ["Ready"] } } - } + }, + VALID_RESPONSE: [{"dataset_id":"sb-telemetry","name":"sb-telemetry","type":"master","dataset_config":{"indexing_config":{"olap_store_enabled":false,"lakehouse_enabled":true,"cache_enabled":true},"keys_config":{"data_key":"ets"},"file_upload_path":["telemetry.json"]},"tags":["tag1","tag2"],"status":"Live","data_version":1,"api_version":"v2"},{"dataset_id":"telemetry","name":"telemetry","type":"events","dataset_config":{"indexing_config":{"olap_store_enabled":false,"lakehouse_enabled":true,"cache_enabled":false},"keys_config":{"timestamp_key":"ets"},"file_upload_path":["telemetry.json"]},"tags":["tag1","tag2"],"status":"Draft","version":1,"api_version":"v2"}] } \ No newline at end of file diff --git a/api-service/src/tests/DatasetManagement/DatasetRead/DatasetRead.spec.ts b/api-service/src/tests/DatasetManagement/DatasetRead/DatasetRead.spec.ts index dce1b54c..cfd333eb 100644 --- a/api-service/src/tests/DatasetManagement/DatasetRead/DatasetRead.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetRead/DatasetRead.spec.ts @@ -1,19 +1,20 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai, { expect } from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import httpStatus from "http-status"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import _ from "lodash"; -import { apiId } from "../../../controllers/DatasetRead/DatasetRead"; +import { apiId, defaultFields } from "../../../controllers/DatasetRead/DatasetRead"; import { TestInputsForDatasetRead } from "./Fixtures"; import { DatasetTransformations } from "../../../models/Transformation"; -import { DatasetTransformationsDraft } from "../../../models/TransformationDraft"; import { Dataset } from "../../../models/Dataset"; import { DatasetDraft } from "../../../models/DatasetDraft"; -import { Datasource } from "../../../models/Datasource"; import { DatasetSourceConfig } from "../../../models/DatasetSourceConfig"; +import { ConnectorInstances } from "../../../models/ConnectorInstances"; +import { DatasetTransformationsDraft } from "../../../models/TransformationDraft"; import { DatasetSourceConfigDraft } from "../../../models/DatasetSourceConfigDraft"; +import { sequelize } from "../../../connections/databaseConnection"; import { DatasourceDraft } from "../../../models/DatasourceDraft"; chai.use(spies); @@ -27,8 +28,8 @@ describe("DATASET READ API", () => { }); it("Dataset read success: When minimal fields requested", (done) => { - chai.spy.on(Dataset, "findAll", () => { - return Promise.resolve([{ 'name': 'sb-telemetry', 'data_version': 1 }]) + chai.spy.on(Dataset, "findOne", () => { + return Promise.resolve({ "name": "sb-telemetry", "version": 1 }) }) chai .request(app) @@ -39,43 +40,37 @@ describe("DATASET READ API", () => { res.body.id.should.be.eq(apiId); res.body.params.status.should.be.eq("SUCCESS") res.body.result.should.be.a("object") - res.body.result.name.should.be.eq('sb-telemetry') + res.body.result.name.should.be.eq("sb-telemetry") const result = JSON.stringify(res.body.result) - result.should.be.eq(JSON.stringify({ 'name': 'sb-telemetry', 'version': 1 })) + result.should.be.eq(JSON.stringify({ name: "sb-telemetry", version: 1 })) done(); }); }); it("Dataset read success: Fetch all dataset fields when fields param is empty", (done) => { - chai.spy.on(DatasetDraft, "findAll", () => { - return Promise.resolve([TestInputsForDatasetRead.DRAFT_SCHEMA]) - }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve([]) + chai.spy.on(DatasetDraft, "findOne", () => { + return Promise.resolve(TestInputsForDatasetRead.DRAFT_SCHEMA) }) chai .request(app) - .get("/v2/datasets/read/sb-telemetry?status=Draft") + .get("/v2/datasets/read/sb-telemetry?mode=edit") .end((err, res) => { res.should.have.status(httpStatus.OK); res.body.should.be.a("object") res.body.id.should.be.eq(apiId); res.body.params.status.should.be.eq("SUCCESS") res.body.result.should.be.a("object") - res.body.result.type.should.be.eq('dataset') - res.body.result.status.should.be.eq('Draft') + res.body.result.type.should.be.eq("event") + res.body.result.status.should.be.eq("Draft") const result = JSON.stringify(res.body.result) - result.should.be.eq(JSON.stringify({ ...TestInputsForDatasetRead.DRAFT_SCHEMA, "transformations_config": [] })) + result.should.be.eq(JSON.stringify({ ...TestInputsForDatasetRead.DRAFT_SCHEMA })) done(); }); }); - it("Dataset read success: Fetch live dataset when status param is empty", (done) => { - chai.spy.on(Dataset, "findAll", () => { - return Promise.resolve([TestInputsForDatasetRead.LIVE_SCHEMA]) - }) - chai.spy.on(DatasetTransformations, "findAll", () => { - return Promise.resolve(TestInputsForDatasetRead.TRANSFORMATIONS_SCHEMA) + it("Dataset read success: Fetch live dataset when mode param not provided", (done) => { + chai.spy.on(Dataset, "findOne", () => { + return Promise.resolve(TestInputsForDatasetRead.LIVE_SCHEMA) }) chai .request(app) @@ -86,14 +81,14 @@ describe("DATASET READ API", () => { res.body.id.should.be.eq(apiId); res.body.params.status.should.be.eq("SUCCESS") res.body.result.should.be.a("object") - res.body.result.status.should.be.eq('Live') + res.body.result.status.should.be.eq("Live") const result = JSON.stringify(res.body.result) - result.should.be.eq(JSON.stringify({ ..._.omit({ ...TestInputsForDatasetRead.LIVE_SCHEMA, "transformations_config": TestInputsForDatasetRead.TRANSFORMATIONS_SCHEMA }, ["data_version"]), version: 1 })) + result.should.be.eq(JSON.stringify({ ...TestInputsForDatasetRead.LIVE_SCHEMA })) done(); }); }); - it("Dataset read success: Creating draft on mode=edit if no draft found", (done) => { + it("Dataset read success: Creating draft on mode=edit if no draft found in v2", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve() }) @@ -103,148 +98,213 @@ describe("DATASET READ API", () => { chai.spy.on(DatasetTransformations, "findAll", () => { return Promise.resolve(TestInputsForDatasetRead.TRANSFORMATIONS_SCHEMA) }) - chai.spy.on(Datasource, "findAll", () => { - return Promise.resolve([TestInputsForDatasetRead.DATASOURCE_SCHEMA]) + chai.spy.on(ConnectorInstances, "findAll", () => { + return Promise.resolve(TestInputsForDatasetRead.CONNECTORS_SCHEMA_V2) }) - chai.spy.on(DatasetSourceConfig, "findAll", () => { - return Promise.resolve([]) + chai.spy.on(Dataset, "findAll", () => { + return Promise.resolve(TestInputsForDatasetRead.MASTER_DATASET_SCHEMA) }) chai.spy.on(DatasetDraft, "create", () => { return Promise.resolve({ dataValues: TestInputsForDatasetRead.DRAFT_SCHEMA }) }) - chai.spy.on(DatasetTransformationsDraft, "bulkCreate", () => { - return Promise.resolve({}) + chai + .request(app) + .get("/v2/datasets/read/sb-telemetry?mode=edit") + .end((err, res) => { + res.should.have.status(httpStatus.OK); + res.body.should.be.a("object") + res.body.id.should.be.eq(apiId); + res.body.params.status.should.be.eq("SUCCESS") + res.body.result.should.be.a("object") + res.body.result.name.should.be.eq("sb-telemetry") + const result = JSON.stringify(res.body.result) + result.should.be.eq(JSON.stringify(TestInputsForDatasetRead.DRAFT_SCHEMA)) + done(); + }); + }); + + it("Dataset read success: Creating draft on mode=edit if no draft found in v1", (done) => { + chai.spy.on(DatasetDraft, "findOne", () => { + return Promise.resolve() }) - chai.spy.on(DatasourceDraft, "bulkCreate", () => { - return Promise.resolve({}) + chai.spy.on(Dataset, "findOne", () => { + return Promise.resolve({ ...TestInputsForDatasetRead.LIVE_SCHEMA, "api_version": "v1" }) }) - chai.spy.on(DatasetSourceConfigDraft, "bulkCreate", () => { - return Promise.resolve({}) + chai.spy.on(DatasetTransformations, "findAll", () => { + return Promise.resolve(TestInputsForDatasetRead.TRANSFORMATIONS_SCHEMA_V1) }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve([]) + chai.spy.on(DatasetSourceConfig, "findAll", () => { + return Promise.resolve(TestInputsForDatasetRead.CONNECTORS_SCHEMA_V1) + }) + chai.spy.on(Dataset, "findAll", () => { + return Promise.resolve(TestInputsForDatasetRead.MASTER_DATASET_SCHEMA) + }) + chai.spy.on(DatasetDraft, "create", () => { + return Promise.resolve({ dataValues: TestInputsForDatasetRead.DRAFT_SCHEMA }) }) chai .request(app) - .get("/v2/datasets/read/sb-telemetry?status=Draft&mode=edit") + .get("/v2/datasets/read/sb-telemetry?mode=edit") .end((err, res) => { res.should.have.status(httpStatus.OK); res.body.should.be.a("object") res.body.id.should.be.eq(apiId); res.body.params.status.should.be.eq("SUCCESS") res.body.result.should.be.a("object") - res.body.result.name.should.be.eq('sb-telemetry') + res.body.result.name.should.be.eq("sb-telemetry") const result = JSON.stringify(res.body.result) - result.should.be.eq(JSON.stringify({ ...TestInputsForDatasetRead.DRAFT_SCHEMA, "transformations_config": [] })) + result.should.be.eq(JSON.stringify(TestInputsForDatasetRead.DRAFT_SCHEMA)) done(); }); }); - it("Dataset read success: Updating dataset status to draft on mode=edit if dataset status is Live", (done) => { + it("Dataset read success: Migrating v1 draft dataset to v2 on mode=edit", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ dataset_id: "sb-telemetry", name: "sb-telemetry", status: "Live", data_schema: {} }) - }) - chai.spy.on(Dataset, "findOne", () => { - return Promise.resolve({ dataset_id: "sb-telemetry", name: "sb-telemetry", status: "Live", data_version: 2, data_schema: {} }) + return Promise.resolve(TestInputsForDatasetRead.DRAFT_SCHEMA_V1) }) - chai.spy.on(DatasetTransformations, "findAll", () => { - return Promise.resolve(TestInputsForDatasetRead.TRANSFORMATIONS_SCHEMA) + chai.spy.on(DatasetTransformationsDraft, "findAll", () => { + return Promise.resolve(TestInputsForDatasetRead.TRANSFORMATIONS_SCHEMA_V1) }) - chai.spy.on(Datasource, "findAll", () => { - return Promise.resolve([TestInputsForDatasetRead.DATASOURCE_SCHEMA]) + chai.spy.on(DatasetSourceConfigDraft, "findAll", () => { + return Promise.resolve(TestInputsForDatasetRead.CONNECTORS_SCHEMA_V1) }) - chai.spy.on(DatasetSourceConfig, "findAll", () => { - return Promise.resolve([]) + chai.spy.on(DatasetDraft, "update", () => { + return Promise.resolve({ dataValues: TestInputsForDatasetRead.DRAFT_SCHEMA }) }) - chai.spy.on(DatasetTransformationsDraft, "update", () => { + chai.spy.on(DatasetTransformationsDraft, "destroy", () => { return Promise.resolve({}) }) - chai.spy.on(DatasourceDraft, "update", () => { + chai.spy.on(DatasetSourceConfigDraft, "destroy", () => { return Promise.resolve({}) }) - chai.spy.on(DatasetSourceConfigDraft, "update", () => { + chai.spy.on(DatasourceDraft, "destroy", () => { return Promise.resolve({}) }) - chai.spy.on(DatasetDraft, "update", () => { - return Promise.resolve({}) + const t = chai.spy.on(sequelize, "transaction", () => { + return Promise.resolve(sequelize.transaction) }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve([]) + chai.spy.on(t, "commit", () => { + return Promise.resolve({}) }) chai .request(app) - .get("/v2/datasets/read/sb-telemetry?status=Draft&mode=edit") + .get("/v2/datasets/read/sb-telemetry?mode=edit") .end((err, res) => { res.should.have.status(httpStatus.OK); res.body.should.be.a("object") res.body.id.should.be.eq(apiId); res.body.params.status.should.be.eq("SUCCESS") res.body.result.should.be.a("object") - res.body.result.name.should.be.eq('sb-telemetry') - const result = JSON.stringify(_.omit(res.body.result, "version_key")) - result.should.be.eq(JSON.stringify({"dataset_id":"sb-telemetry","name":"sb-telemetry","data_schema":{},"version":2,"status":"Draft","api_version":"v2","transformations_config":[]})) + res.body.result.name.should.be.eq("sb-telemetry") + const result = JSON.stringify(res.body.result) + result.should.be.eq(JSON.stringify(_.pick(TestInputsForDatasetRead.DRAFT_SCHEMA_V1, defaultFields))) done(); }); }); it("Dataset read failure: Updating dataset status to draft on mode=edit fails as live record not found", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ dataset_id: "sb-telemetry", name: "sb-telemetry", status: "Live", data_schema: {} }) + return Promise.resolve() }) chai.spy.on(Dataset, "findOne", () => { return Promise.resolve() }) chai .request(app) - .get("/v2/datasets/read/sb-telemetry?status=Draft&mode=edit") + .get("/v2/datasets/read/sb-telemetry?mode=edit") .end((err, res) => { res.should.have.status(httpStatus.NOT_FOUND); res.body.should.be.a("object") res.body.id.should.be.eq(apiId); res.body.params.status.should.be.eq("FAILED") - res.body.error.message.should.be.eq("Failed to fetch live dataset") + res.body.error.message.should.be.eq("Dataset with the given dataset_id:sb-telemetry not found") res.body.error.code.should.be.eq("DATASET_NOT_FOUND") done(); }); }); - it("Dataset read failure: When the dataset of requested dataset_id not found", (done) => { + it("Dataset read failure: When dependent denorm master dataset not found", (done) => { + chai.spy.on(DatasetDraft, "findOne", () => { + return Promise.resolve() + }) + chai.spy.on(Dataset, "findOne", () => { + return Promise.resolve({ ...TestInputsForDatasetRead.LIVE_SCHEMA, "api_version": "v1" }) + }) + chai.spy.on(DatasetTransformations, "findAll", () => { + return Promise.resolve(TestInputsForDatasetRead.TRANSFORMATIONS_SCHEMA_V1) + }) + chai.spy.on(DatasetSourceConfig, "findAll", () => { + return Promise.resolve(TestInputsForDatasetRead.CONNECTORS_SCHEMA_V1) + }) chai.spy.on(Dataset, "findAll", () => { return Promise.resolve([]) }) chai .request(app) - .get("/v2/datasets/read/sb-telemetry?fields=name") + .get("/v2/datasets/read/sb-telemetry?mode=edit") .end((err, res) => { res.should.have.status(httpStatus.NOT_FOUND); res.body.should.be.a("object") res.body.id.should.be.eq(apiId); res.body.params.status.should.be.eq("FAILED") - res.body.error.message.should.be.eq("Dataset with the given dataset_id not found") - res.body.error.code.should.be.eq("DATASET_NOT_FOUND") + res.body.error.message.should.be.eq("The dependent dataset not found") + res.body.error.code.should.be.eq("DEPENDENT_MASTER_DATA_NOT_FOUND") done(); }); }); - it("Dataset read failure: When specified field of live dataset cannot be found", (done) => { + it("Dataset read failure: When dependent denorm master dataset not live", (done) => { + chai.spy.on(DatasetDraft, "findOne", () => { + return Promise.resolve() + }) + chai.spy.on(Dataset, "findOne", () => { + return Promise.resolve({ ...TestInputsForDatasetRead.LIVE_SCHEMA, "api_version": "v1" }) + }) + chai.spy.on(DatasetTransformations, "findAll", () => { + return Promise.resolve(TestInputsForDatasetRead.TRANSFORMATIONS_SCHEMA_V1) + }) + chai.spy.on(DatasetSourceConfig, "findAll", () => { + return Promise.resolve(TestInputsForDatasetRead.CONNECTORS_SCHEMA_V1) + }) + chai.spy.on(Dataset, "findAll", () => { + return Promise.resolve([{"dataset_id":"master_dataset", "dataset_config":{"cache_config":{"redis_db":20}}}]) + }) chai .request(app) - .get("/v2/datasets/read/sb-telemetry?fields=data") + .get("/v2/datasets/read/sb-telemetry?mode=edit") .end((err, res) => { - res.should.have.status(httpStatus.BAD_REQUEST); + res.should.have.status(httpStatus.PRECONDITION_REQUIRED); res.body.should.be.a("object") res.body.id.should.be.eq(apiId); res.body.params.status.should.be.eq("FAILED") - expect(res.body.error.message).to.match(/^The specified field(.+) in the dataset cannot be found.$/) - res.body.error.code.should.be.eq("DATASET_INVALID_FIELDS") + res.body.error.message.should.be.eq("The dependent master dataset is not published") + res.body.error.code.should.be.eq("DEPENDENT_MASTER_DATA_NOT_LIVE") done(); }); }); - it("Dataset read failure: When specified field of draft dataset cannot be found", (done) => { + it("Dataset read failure: When the dataset of requested dataset_id not found", (done) => { + chai.spy.on(Dataset, "findOne", () => { + return Promise.resolve(null) + }) chai .request(app) - .get("/v2/datasets/read/sb-telemetry?fields=data&status=Draft") + .get("/v2/datasets/read/sb-telemetry?fields=name") + .end((err, res) => { + res.should.have.status(httpStatus.NOT_FOUND); + res.body.should.be.a("object") + res.body.id.should.be.eq(apiId); + res.body.params.status.should.be.eq("FAILED") + res.body.error.message.should.be.eq("Dataset with the given dataset_id:sb-telemetry not found") + res.body.error.code.should.be.eq("DATASET_NOT_FOUND") + done(); + }); + }); + + it("Dataset read failure: When specified field of live dataset cannot be found", (done) => { + chai + .request(app) + .get("/v2/datasets/read/sb-telemetry?fields=data") .end((err, res) => { res.should.have.status(httpStatus.BAD_REQUEST); res.body.should.be.a("object") @@ -256,21 +316,19 @@ describe("DATASET READ API", () => { }); }); - it("Dataset read failure: Connection to the database failed", (done) => { - chai.spy.on(Dataset, "findAll", () => { - return Promise.reject() - }) + it("Dataset read failure: When specified field of draft dataset cannot be found", (done) => { chai .request(app) - .get("/v2/datasets/read/sb-telemetry") + .get("/v2/datasets/read/sb-telemetry?fields=data&mode=edit") .end((err, res) => { - res.should.have.status(httpStatus.INTERNAL_SERVER_ERROR); + res.should.have.status(httpStatus.BAD_REQUEST); res.body.should.be.a("object") res.body.id.should.be.eq(apiId); res.body.params.status.should.be.eq("FAILED") - res.body.error.message.should.be.eq("Failed to read dataset") - res.body.error.code.should.be.eq("DATASET_READ_FAILURE") + expect(res.body.error.message).to.match(/^The specified field(.+) in the dataset cannot be found.$/) + res.body.error.code.should.be.eq("DATASET_INVALID_FIELDS") done(); }); }); + }) \ No newline at end of file diff --git a/api-service/src/tests/DatasetManagement/DatasetRead/Fixtures.ts b/api-service/src/tests/DatasetManagement/DatasetRead/Fixtures.ts index 93c118e8..d520a102 100644 --- a/api-service/src/tests/DatasetManagement/DatasetRead/Fixtures.ts +++ b/api-service/src/tests/DatasetManagement/DatasetRead/Fixtures.ts @@ -1,102 +1,43 @@ export const TestInputsForDatasetRead = { DRAFT_SCHEMA: { "dataset_id": "sb-telemetry", - "id": "sb-telemetry", "name": "sb-telemetry", - "type": "dataset", - "validation_config": { - "validate": true, - "mode": "Strict" - }, - "extraction_config": { - "is_batch_event": true, - "extraction_key": "events", - "dedup_config": { - "drop_duplicates": true, - "dedup_key": "id", - "dedup_period": 604800 - } - }, - "dedup_config": { - "drop_duplicates": true, - "dedup_key": "mid", - "dedup_period": 604800 - }, - "data_schema": { - "$schema": "https://json-schema.org/draft/2020-12/schema", - "type": "object", - "properties": { - "eid": { - "type": "string" - }, - "ver": { - "type": "string" - }, - "required": [ - "eid" - ] - }, - "additionalProperties": true - }, - "router_config": { - "topic": "" - }, - "denorm_config": { - "redis_db_host": "localhost", - "redis_db_port": 6379, - "denorm_fields": [ - { - "denorm_key": "actor.id", - "denorm_out_field": "userdata" - } - ] - }, - "dataset_config": { - "data_key": "eid", - "timestamp_key": "ets", - "entry_topic": "local.ingest", - "redis_db_host": "localhost", - "redis_db_port": 6379, - "index_data": true, - "redis_db": 0 - }, + "type": "event", + "status": "Draft", "tags": [ "tag1", "tag2" ], - "status": "Draft", "version": 1, - "client_state": {}, - "created_by": "SYSTEM", - "updated_by": "SYSTEM", - "created_date": "", - "update_date": "", - "published_date": "" + "api_version": "v2", + "dataset_config": { + "indexing_config": { + "olap_store_enabled": false, + "lakehouse_enabled": true, + "cache_enabled": false + }, + "keys_config": { + "timestamp_key": "ets" + }, + "file_upload_path": [ + "telemetry.json" + ] + } }, - LIVE_SCHEMA: { - + DRAFT_SCHEMA_V1: { "dataset_id": "sb-telemetry", - "id": "sb-telemetry", "name": "sb-telemetry", - "type": "dataset", + "type": "event", + "status": "Draft", + "tags": [ + "tag1", + "tag2" + ], "validation_config": { "validate": true, + "validation_mode": "Strict", "mode": "Strict" }, - "extraction_config": { - "is_batch_event": true, - "extraction_key": "events", - "dedup_config": { - "drop_duplicates": true, - "dedup_key": "id", - "dedup_period": 604800 - } - }, - "dedup_config": { - "drop_duplicates": true, - "dedup_key": "mid", - "dedup_period": 604800 - }, "data_schema": { "$schema": "https://json-schema.org/draft/2020-12/schema", "type": "object", @@ -107,47 +48,62 @@ export const TestInputsForDatasetRead = { "ver": { "type": "string" }, + "ets": { + "type": "string" + }, "required": [ "eid" ] }, "additionalProperties": true }, - "router_config": { - "topic": "" - }, - "denorm_config": { - "redis_db_host": "localhost", - "redis_db_port": 6379, - "denorm_fields": [ - { - "denorm_key": "actor.id", - "denorm_out_field": "userdata" - } - ] - }, + "version": 1, + "api_version": "v1", "dataset_config": { - "data_key": "eid", "timestamp_key": "ets", - "entry_topic": "local.ingest", + "data_key": "", "redis_db_host": "localhost", "redis_db_port": 6379, - "index_data": true, "redis_db": 0 - }, + } + }, + LIVE_SCHEMA: { + "dataset_id": "sb-telemetry", + "name": "sb-telemetry", + "type": "event", + "status": "Live", "tags": [ "tag1", "tag2" ], - "status": "Live", "data_version": 1, - "created_by": "SYSTEM", - "updated_by": "SYSTEM", - "created_date": "", - "update_date": "", - "published_date": "" + "api_version": "v2", + "denorm_config": { + "denorm_fields": [ + { + "denorm_key": "actor.id", + "denorm_out_field": "userdata", + "redis_db": 16 + } + ] + }, + "dataset_config": { + "indexing_config": { + "olap_store_enabled": false, + "lakehouse_enabled": true, + "cache_enabled": false + }, + "keys_config": { + "timestamp_key": "ets" + }, + "file_upload_path": [ + "telemetry.json" + ] + } }, - TRANSFORMATIONS_SCHEMA: [ + MASTER_DATASET_SCHEMA:[{"dataset_id":"master_dataset", "dataset_config":{"cache_config":{"redis_db":16}}}], + TRANSFORMATIONS_SCHEMA: [{ "field_key": "eid", "transformation_function": { "type": "mask", "expr": "eid", "datatype": "string", "category": "pii" }, "mode": "Strict" }], + TRANSFORMATIONS_SCHEMA_V1: [ { "field_key": "eid", "transformation_function": { @@ -164,27 +120,24 @@ export const TestInputsForDatasetRead = { } } ], - DATASOURCE_SCHEMA:{ - "id": "sb-telemetry_sb-telemetry", - "datasource": "sb-telemetry", - "dataset_id": "sb-telemetry", - "ingestion_spec": {"type":"kafka","spec":{"dataSchema":{"dataSource":"dataset-conf_day","dimensionsSpec":{"dimensions":[{"type":"string","name":"a"},{"type":"string","name":"obsrv.meta.source.connector"},{"type":"string","name":"obsrv.meta.source.id"}]},"timestampSpec":{"column":"obsrv_meta.syncts","format":"auto"},"metricsSpec":[],"granularitySpec":{"type":"uniform","segmentGranularity":"DAY","queryGranularity":"none","rollup":false}},"tuningConfig":{"type":"kafka","maxBytesInMemory":134217728,"maxRowsPerSegment":5000000,"logParseExceptions":true},"ioConfig":{"type":"kafka","consumerProperties":{"bootstrap.servers":"localhost:9092"},"taskCount":1,"replicas":1,"taskDuration":"PT1H","useEarliestOffset":true,"completionTimeout":"PT1H","inputFormat":{"type":"json","flattenSpec":{"useFieldDiscovery":true,"fields":[{"type":"path","expr":"$.['a']","name":"a"},{"type":"path","expr":"$.obsrv_meta.['syncts']","name":"obsrv_meta.syncts"},{"type":"path","expr":"$.obsrv_meta.source.['connector']","name":"obsrv.meta.source.connector"},{"type":"path","expr":"$.obsrv_meta.source.['connectorInstance']","name":"obsrv.meta.source.id"},{"expr":"$.obsrv_meta.syncts","name":"obsrv_meta.syncts","type":"path"}]}},"appendToExisting":false}}}, - "datasource_ref": "sb-telemetry_DAY", - "retention_period": { - "enabled": "false" - }, - "archival_policy": { - "enabled": "false" - }, - "purge_policy": { - "enabled": "false" - }, - "backup_config": { - "enabled": "false" - }, - "status": "Live", - "created_by": "SYSTEM", - "updated_by": "SYSTEM", - "published_date": "2023-07-03 00:00:00" - } + CONNECTORS_SCHEMA_V1: [ + { + "id": "hsh882ehdshe", + "connector_type": "kafka", + "connector_config": { + "topic": "local.ingest", + "brokerURL": "localhost:9092" + } + } + ], + CONNECTORS_SCHEMA_V2: [ + { + "id": "hsh882ehdshe", + "connector_id": "kafka", + "connector_config": { + "topic": "local.ingest", + "brokerURL": "localhost:9092" + } + } + ] } \ No newline at end of file diff --git a/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetDelete.spec.ts b/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetDelete.spec.ts index 4b612625..0016ae5d 100644 --- a/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetDelete.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetDelete.spec.ts @@ -1,9 +1,9 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import httpStatus from "http-status"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import _ from "lodash"; import { TestInputsForDatasetStatusTransition } from "./Fixtures"; import { DatasetDraft } from "../../../models/DatasetDraft"; @@ -78,7 +78,7 @@ describe("DATASET STATUS TRANSITION DELETE", () => { res.body.id.should.be.eq("api.datasets.status-transition"); res.body.params.status.should.be.eq("FAILED") res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Dataset not found to delete") + res.body.error.message.should.be.eq("Dataset not found for dataset: telemetry.1") res.body.error.code.should.be.eq("DATASET_NOT_FOUND") done(); }); diff --git a/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetLive.spec.ts b/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetLive.spec.ts index e4b7b067..e26ddafe 100644 --- a/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetLive.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetLive.spec.ts @@ -1,14 +1,17 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import httpStatus from "http-status"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import _ from "lodash"; import { TestInputsForDatasetStatusTransition } from "./Fixtures"; import { DatasetDraft } from "../../../models/DatasetDraft"; import { commandHttpService } from "../../../connections/commandServiceConnection"; import { sequelize } from "../../../connections/databaseConnection"; +import { DatasourceDraft } from "../../../models/DatasourceDraft"; +import { Dataset } from "../../../models/Dataset"; +import { Datasource } from "../../../models/Datasource"; chai.use(spies); chai.should(); @@ -24,17 +27,116 @@ describe("DATASET STATUS TRANSITION LIVE", () => { it("Dataset status transition success: When the action is to set dataset live", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ dataset_id: "telemetry", status: "ReadyToPublish" }) + return Promise.resolve(TestInputsForDatasetStatusTransition.DRAFT_DATASET_SCHEMA_FOR_PUBLISH) + }) + chai.spy.on(Dataset, "findAll", () => { + return Promise.resolve([{ "id": "master-dataset", "status": "Live", "dataset_config": { "cache_config": { "redis_db": 21 } }, "api_version": "v2" }]) + }) + chai.spy.on(DatasetDraft, "update", () => { + return Promise.resolve({}) + }) + chai.spy.on(Dataset, "findOne", () => { + return Promise.resolve({ "data_schema": { "email": { "data_type": "string", "arrival_format": "string" } } }) + }) + chai.spy.on(DatasourceDraft, "create", () => { + return Promise.resolve({}) + }) + const t = chai.spy.on(sequelize, "transaction", () => { + return Promise.resolve(sequelize.transaction) + }) + chai.spy.on(t, "commit", () => { + return Promise.resolve({}) + }) + chai.spy.on(commandHttpService, "post", () => { + return Promise.resolve({}) + }) + chai + .request(app) + .post("/v2/datasets/status-transition") + .send(TestInputsForDatasetStatusTransition.VALID_SCHEMA_FOR_LIVE) + .end((err, res) => { + res.should.have.status(httpStatus.OK); + res.body.should.be.a("object") + res.body.id.should.be.eq("api.datasets.status-transition"); + res.body.params.status.should.be.eq("SUCCESS") + res.body.result.should.be.a("object") + res.body.params.msgid.should.be.eq(msgid) + res.body.result.message.should.be.eq("Dataset status transition to Live successful") + res.body.result.dataset_id.should.be.eq("telemetry") + done(); + }); + }); + + it("Dataset status transition success: When the action is to set dataset live v1 by creating hudi spec", (done) => { + chai.spy.on(DatasetDraft, "findOne", () => { + return Promise.resolve(TestInputsForDatasetStatusTransition.DRAFT_DATASET_SCHEMA_FOR_PUBLISH_HUDI) + }) + chai.spy.on(Dataset, "findAll", () => { + return Promise.resolve([{ "id": "master-dataset", "status": "Live", "dataset_config": { "cache_config": { "redis_db": 21 } }, "api_version": "v2" }]) + }) + chai.spy.on(DatasetDraft, "update", () => { + return Promise.resolve({}) + }) + chai.spy.on(Dataset, "findOne", () => { + return Promise.resolve({ "data_schema": { "email": { "data_type": "string", "arrival_format": "string" } } }) + }) + chai.spy.on(DatasourceDraft, "create", () => { + return Promise.resolve({}) + }) + const t = chai.spy.on(sequelize, "transaction", () => { + return Promise.resolve(sequelize.transaction) + }) + chai.spy.on(t, "commit", () => { + return Promise.resolve({}) }) chai.spy.on(commandHttpService, "post", () => { return Promise.resolve({}) }) + chai + .request(app) + .post("/v2/datasets/status-transition") + .send(TestInputsForDatasetStatusTransition.VALID_SCHEMA_FOR_LIVE) + .end((err, res) => { + res.should.have.status(httpStatus.OK); + res.body.should.be.a("object") + res.body.id.should.be.eq("api.datasets.status-transition"); + res.body.params.status.should.be.eq("SUCCESS") + res.body.result.should.be.a("object") + res.body.params.msgid.should.be.eq(msgid) + res.body.result.message.should.be.eq("Dataset status transition to Live successful") + res.body.result.dataset_id.should.be.eq("telemetry") + done(); + }); + }); + + it("Dataset status transition success: When the action is to set dataset live v2 by updating hudi spec", (done) => { + chai.spy.on(DatasetDraft, "findOne", () => { + return Promise.resolve(TestInputsForDatasetStatusTransition.DRAFT_DATASET_SCHEMA_FOR_PUBLISH_HUDI) + }) + chai.spy.on(Dataset, "findAll", () => { + return Promise.resolve([{ "id": "master-dataset", "status": "Live", "dataset_config": { "cache_config": { "redis_db": 21 } }, "api_version": "v2" }]) + }) + chai.spy.on(DatasetDraft, "update", () => { + return Promise.resolve({}) + }) + chai.spy.on(Dataset, "findOne", () => { + return Promise.resolve({ "api_version":"v2", "data_schema": { "email": { "data_type": "string", "arrival_format": "string" } } }) + }) + chai.spy.on(DatasourceDraft, "create", () => { + return Promise.resolve({}) + }) + chai.spy.on(Datasource, "findOne", () => { + return Promise.resolve({"ingestion_spec":{"dataset": "dataset-all-fields4", "schema": {"table": "dataset-all-fields4_events", "partitionColumn": "eid", "timestampColumn": "obsrv_meta.syncts", "primaryKey": "eid", "columnSpec": [{"type": "string", "name": "mid", "index": 1}, {"type": "epoch", "name": "ets", "index": 2}, {"type": "string", "name": "userdata.mid", "index": 3}, {"type": "epoch", "name": "userdata.ets", "index": 4}, {"type": "string", "name": "userdata.eid", "index": 5}, {"type": "string", "name": "email", "index": 6}, {"type": "string", "name": "obsrv.meta.source.connector", "index": 7}, {"type": "string", "name": "obsrv.meta.source.id", "index": 8}]}, "inputFormat": {"type": "json", "flattenSpec": {"fields": [{"type": "path", "expr": "$.mid", "name": "mid"}, {"type": "path", "expr": "$.ets", "name": "ets"}, {"type": "path", "expr": "$.eid", "name": "eid"}, {"type": "path", "expr": "$.userdata.mid", "name": "userdata.mid"}, {"type": "path", "expr": "$.userdata.ets", "name": "userdata.ets"}, {"type": "path", "expr": "$.userdata.eid", "name": "userdata.eid"}, {"type": "path", "expr": "$.email", "name": "email"}, {"type": "path", "expr": "$.obsrv_meta.syncts", "name": "obsrv_meta.syncts"}, {"type": "path", "expr": "$.obsrv_meta.source.connector", "name": "obsrv.meta.source.connector"}, {"type": "path", "expr": "$.obsrv_meta.source.connectorInstance", "name": "obsrv.meta.source.id"}]}}}}) + }) const t = chai.spy.on(sequelize, "transaction", () => { return Promise.resolve(sequelize.transaction) }) chai.spy.on(t, "commit", () => { return Promise.resolve({}) }) + chai.spy.on(commandHttpService, "post", () => { + return Promise.resolve({}) + }) chai .request(app) .post("/v2/datasets/status-transition") @@ -47,7 +149,109 @@ describe("DATASET STATUS TRANSITION LIVE", () => { res.body.result.should.be.a("object") res.body.params.msgid.should.be.eq(msgid) res.body.result.message.should.be.eq("Dataset status transition to Live successful") - res.body.result.dataset_id.should.be.eq("telemetry.1") + res.body.result.dataset_id.should.be.eq("telemetry") + done(); + }); + }); + + it("Dataset status transition failure: Unable to fetch redis db number for master dataset", (done) => { + chai.spy.on(DatasetDraft, "findOne", () => { + return Promise.resolve(TestInputsForDatasetStatusTransition.DRAFT_MASTER_DATASET_INVALID) + }) + chai.spy.on(sequelize, "query", () => { + return Promise.resolve([]) + }) + chai + .request(app) + .post("/v2/datasets/status-transition") + .send(TestInputsForDatasetStatusTransition.VALID_SCHEMA_FOR_LIVE_MASTER) + .end((err, res) => { + res.should.have.status(httpStatus.INTERNAL_SERVER_ERROR); + res.body.should.be.a("object") + res.body.id.should.be.eq("api.datasets.status-transition"); + res.body.params.status.should.be.eq("FAILED") + res.body.params.msgid.should.be.eq(msgid) + res.body.error.message.should.be.eq("Unable to fetch the redis db index for the master data") + res.body.error.code.should.be.eq("REDIS_DB_INDEX_FETCH_FAILED") + done(); + }); + }); + + it("Dataset status transition success: When the action is to set master dataset live", (done) => { + chai.spy.on(DatasetDraft, "findOne", () => { + return Promise.resolve(TestInputsForDatasetStatusTransition.DRAFT_MASTER_DATASET_SCHEMA_FOR_PUBLISH) + }) + chai.spy.on(sequelize, "query", () => { + return Promise.resolve([[{ nextval: 9 }]]) + }) + chai.spy.on(DatasetDraft, "update", () => { + return Promise.resolve({}) + }) + const t = chai.spy.on(sequelize, "transaction", () => { + return Promise.resolve(sequelize.transaction) + }) + chai.spy.on(t, "commit", () => { + return Promise.resolve({}) + }) + chai.spy.on(commandHttpService, "post", () => { + return Promise.resolve({}) + }) + chai + .request(app) + .post("/v2/datasets/status-transition") + .send(TestInputsForDatasetStatusTransition.VALID_SCHEMA_FOR_LIVE_MASTER) + .end((err, res) => { + res.should.have.status(httpStatus.OK); + res.body.should.be.a("object") + res.body.id.should.be.eq("api.datasets.status-transition"); + res.body.params.status.should.be.eq("SUCCESS") + res.body.result.should.be.a("object") + res.body.params.msgid.should.be.eq(msgid) + res.body.result.message.should.be.eq("Dataset status transition to Live successful") + res.body.result.dataset_id.should.be.eq("master-telemetry") + done(); + }); + }); + + it("Dataset status transition failure: When the dependent denorm master dataset is not live", (done) => { + chai.spy.on(DatasetDraft, "findOne", () => { + return Promise.resolve(_.clone(TestInputsForDatasetStatusTransition.DRAFT_DATASET_SCHEMA_FOR_PUBLISH)) + }) + chai.spy.on(Dataset, "findAll", () => { + return Promise.resolve([{ "id": "master-dataset", "status": "Retired", "dataset_config": { "redis_db": 21 }, "api_version": "v1" }]) + }) + chai + .request(app) + .post("/v2/datasets/status-transition") + .send(TestInputsForDatasetStatusTransition.VALID_SCHEMA_FOR_LIVE) + .end((err, res) => { + res.should.have.status(httpStatus.PRECONDITION_REQUIRED); + res.body.should.be.a("object") + res.body.id.should.be.eq("api.datasets.status-transition"); + res.body.params.status.should.be.eq("FAILED") + res.body.params.msgid.should.be.eq(msgid) + res.body.error.message.should.be.eq("The datasets with id:master-dataset are not in published status") + res.body.error.code.should.be.eq("DEPENDENT_MASTER_DATA_NOT_LIVE") + done(); + }); + }); + + it("Dataset status transition failure: When dataset to publish is self referencing the denorm master dataset", (done) => { + chai.spy.on(DatasetDraft, "findOne", () => { + return Promise.resolve({...TestInputsForDatasetStatusTransition.DRAFT_DATASET_SCHEMA_FOR_PUBLISH, "id": "master-dataset"}) + }) + chai + .request(app) + .post("/v2/datasets/status-transition") + .send(TestInputsForDatasetStatusTransition.VALID_SCHEMA_FOR_LIVE) + .end((err, res) => { + res.should.have.status(httpStatus.CONFLICT); + res.body.should.be.a("object") + res.body.id.should.be.eq("api.datasets.status-transition"); + res.body.params.status.should.be.eq("FAILED") + res.body.params.msgid.should.be.eq(msgid) + res.body.error.message.should.be.eq("The denorm master dataset is self-referencing itself") + res.body.error.code.should.be.eq("SELF_REFERENCING_MASTER_DATA") done(); }); }); @@ -66,7 +270,7 @@ describe("DATASET STATUS TRANSITION LIVE", () => { res.body.id.should.be.eq("api.datasets.status-transition"); res.body.params.status.should.be.eq("FAILED") res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Dataset not found to perform status transition to live") + res.body.error.message.should.be.eq("Dataset not found for dataset: telemetry") res.body.error.code.should.be.eq("DATASET_NOT_FOUND") done(); }) @@ -74,17 +278,29 @@ describe("DATASET STATUS TRANSITION LIVE", () => { it("Dataset status transition failure: When the command api call to publish dataset fails", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ dataset_id: "telemetry", status: "ReadyToPublish" }) + return Promise.resolve(TestInputsForDatasetStatusTransition.DRAFT_DATASET_SCHEMA_FOR_PUBLISH) }) - chai.spy.on(commandHttpService, "post", () => { - return Promise.reject() + chai.spy.on(Dataset, "findAll", () => { + return Promise.resolve([{ "id": "master-dataset", "status": "Live", "dataset_config": { "cache_config": { "redis_db": 21 } }, "api_version": "v2" }]) + }) + chai.spy.on(DatasetDraft, "update", () => { + return Promise.resolve({}) + }) + chai.spy.on(Dataset, "findOne", () => { + return Promise.resolve({ "data_schema": { "email": { "data_type": "string", "arrival_format": "string" } } }) + }) + chai.spy.on(DatasourceDraft, "create", () => { + return Promise.resolve({}) }) const t = chai.spy.on(sequelize, "transaction", () => { return Promise.resolve(sequelize.transaction) }) - chai.spy.on(t, "rollback", () => { + chai.spy.on(t, "commit", () => { return Promise.resolve({}) }) + chai.spy.on(commandHttpService, "post", () => { + return Promise.reject() + }) chai .request(app) .post("/v2/datasets/status-transition") @@ -94,26 +310,25 @@ describe("DATASET STATUS TRANSITION LIVE", () => { res.body.should.be.a("object") res.body.id.should.be.eq("api.datasets.status-transition"); res.body.params.status.should.be.eq("FAILED") - res.body.error.message.should.be.eq("Failed to perform status transition on datasets") done(); }); }); it("Dataset status transition failure: When the dataset to publish is in draft state", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ dataset_id: "telemetry", status: "Draft" }) + return Promise.resolve({...TestInputsForDatasetStatusTransition.DRAFT_DATASET_SCHEMA_FOR_PUBLISH, status: "Draft"}) }) chai .request(app) .post("/v2/datasets/status-transition") .send(TestInputsForDatasetStatusTransition.VALID_SCHEMA_FOR_LIVE) .end((err, res) => { - res.should.have.status(httpStatus.BAD_REQUEST); + res.should.have.status(httpStatus.CONFLICT); res.body.should.be.a("object") res.body.id.should.be.eq("api.datasets.status-transition"); res.body.params.status.should.be.eq("FAILED") res.body.error.code.should.be.eq("DATASET_LIVE_FAILURE") - res.body.error.message.should.be.eq("Failed to mark dataset Live as it is not in ready to publish state") + res.body.error.message.should.be.eq("Transition failed for dataset: telemetry status:Draft with status transition to Live") done(); }); }); diff --git a/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetReadyToPublish.spec.ts b/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetReadyToPublish.spec.ts index c6ac18d4..f7d5f1f3 100644 --- a/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetReadyToPublish.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetReadyToPublish.spec.ts @@ -1,14 +1,12 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai, { expect } from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import httpStatus from "http-status"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import _ from "lodash"; import { TestInputsForDatasetStatusTransition } from "./Fixtures"; import { DatasetDraft } from "../../../models/DatasetDraft"; -import { sequelize } from "../../../connections/databaseConnection"; - chai.use(spies); chai.should(); @@ -29,12 +27,32 @@ describe("DATASET STATUS TRANSITION READY TO PUBLISH", () => { chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({}) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) + + chai + .request(app) + .post("/v2/datasets/status-transition") + .send(TestInputsForDatasetStatusTransition.VALID_REQUEST_FOR_READY_FOR_PUBLISH) + .end((err, res) => { + res.should.have.status(httpStatus.OK); + res.body.should.be.a("object") + res.body.id.should.be.eq("api.datasets.status-transition"); + res.body.params.status.should.be.eq("SUCCESS") + res.body.result.should.be.a("object") + res.body.params.msgid.should.be.eq(msgid) + res.body.result.message.should.be.eq("Dataset status transition to ReadyToPublish successful") + res.body.result.dataset_id.should.be.eq("telemetry") + done(); + }); + }); + + it("Dataset status transition success: When the action is make master dataset ready to publish", (done) => { + chai.spy.on(DatasetDraft, "findOne", () => { + return Promise.resolve(TestInputsForDatasetStatusTransition.VALID_MASTER_SCHEMA_FOR_READY_TO_PUBLISH) }) - chai.spy.on(t, "commit", () => { + chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({}) }) + chai .request(app) .post("/v2/datasets/status-transition") @@ -47,7 +65,7 @@ describe("DATASET STATUS TRANSITION READY TO PUBLISH", () => { res.body.result.should.be.a("object") res.body.params.msgid.should.be.eq(msgid) res.body.result.message.should.be.eq("Dataset status transition to ReadyToPublish successful") - res.body.result.dataset_id.should.be.eq("telemetry.1") + res.body.result.dataset_id.should.be.eq("telemetry") done(); }); }); @@ -66,7 +84,7 @@ describe("DATASET STATUS TRANSITION READY TO PUBLISH", () => { res.body.id.should.be.eq("api.datasets.status-transition"); res.body.params.status.should.be.eq("FAILED") res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Dataset not found to perform status transition to ready to publish") + res.body.error.message.should.be.eq("Dataset not found for dataset: telemetry") res.body.error.code.should.be.eq("DATASET_NOT_FOUND") done(); }); @@ -74,19 +92,19 @@ describe("DATASET STATUS TRANSITION READY TO PUBLISH", () => { it("Dataset status transition failure: When dataset is already ready to publish", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({dataset_id:"telemetry", status:"ReadyToPublish"}) + return Promise.resolve({ ...TestInputsForDatasetStatusTransition.VALID_SCHEMA_FOR_READY_TO_PUBLISH, "status": "ReadyToPublish" }) }) chai .request(app) .post("/v2/datasets/status-transition") .send(TestInputsForDatasetStatusTransition.VALID_REQUEST_FOR_READY_FOR_PUBLISH) .end((err, res) => { - res.should.have.status(httpStatus.BAD_REQUEST); + res.should.have.status(httpStatus.CONFLICT); res.body.should.be.a("object") res.body.id.should.be.eq("api.datasets.status-transition"); res.body.params.status.should.be.eq("FAILED") res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Failed to mark dataset Ready to publish as it not in draft state") + res.body.error.message.should.be.eq("Transition failed for dataset: dataset-all-fields7 status:ReadyToPublish with status transition to ReadyToPublish") res.body.error.code.should.be.eq("DATASET_READYTOPUBLISH_FAILURE") done(); }); @@ -97,12 +115,6 @@ describe("DATASET STATUS TRANSITION READY TO PUBLISH", () => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve(TestInputsForDatasetStatusTransition.INVALID_SCHEMA_FOR_READY_TO_PUBLISH) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "rollback", () => { - return Promise.resolve({}) - }) chai .request(app) .post("/v2/datasets/status-transition") diff --git a/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetRetire.spec.ts b/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetRetire.spec.ts index 490b2aa6..1a0428af 100644 --- a/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetRetire.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetRetire.spec.ts @@ -1,9 +1,9 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import httpStatus from "http-status"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import _ from "lodash"; import { TestInputsForDatasetStatusTransition } from "./Fixtures"; import { Dataset } from "../../../models/Dataset"; @@ -29,7 +29,7 @@ describe("DATASET STATUS TRANSITION RETIRE", () => { it("Dataset status transition success: When the action is to Retire dataset", (done) => { chai.spy.on(Dataset, "findOne", () => { - return Promise.resolve({ dataset_id: "telemetry", status: "Live", type: "dataset" }) + return Promise.resolve(TestInputsForDatasetStatusTransition.SCHEMA_TO_RETIRE) }) chai.spy.on(DatasetTransformations, "update", () => { return Promise.resolve({}) @@ -44,7 +44,7 @@ describe("DATASET STATUS TRANSITION RETIRE", () => { return Promise.resolve({}) }) chai.spy.on(Datasource, "findAll", () => { - return Promise.resolve(["telemetry"]) + return Promise.resolve([{ datasource_ref: "telemetry" }]) }) chai.spy.on(druidHttpService, "post", () => { return Promise.resolve({}) @@ -91,12 +91,6 @@ describe("DATASET STATUS TRANSITION RETIRE", () => { chai.spy.on(Dataset, "update", () => { return Promise.resolve({}) }) - chai.spy.on(Dataset, "findAll", () => { - return Promise.resolve() - }) - chai.spy.on(DatasetDraft, "findAll", () => { - return Promise.resolve() - }) chai.spy.on(commandHttpService, "post", () => { return Promise.resolve({}) }) @@ -185,7 +179,7 @@ describe("DATASET STATUS TRANSITION RETIRE", () => { res.body.id.should.be.eq("api.datasets.status-transition"); res.body.params.status.should.be.eq("FAILED") res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Dataset not found to retire") + res.body.error.message.should.be.eq("Dataset not found for dataset: telemetry") res.body.error.code.should.be.eq("DATASET_NOT_FOUND") done(); }) @@ -193,19 +187,19 @@ describe("DATASET STATUS TRANSITION RETIRE", () => { it("Dataset status transition failure: When dataset is already retired", (done) => { chai.spy.on(Dataset, "findOne", () => { - return Promise.resolve({ dataset_id: "telemetry", status: "Retired", type: "dataset" }) + return Promise.resolve({ ...TestInputsForDatasetStatusTransition.SCHEMA_TO_RETIRE, status: "Retired" }) }) chai .request(app) .post("/v2/datasets/status-transition") .send(TestInputsForDatasetStatusTransition.VALID_SCHEMA_FOR_RETIRE) .end((err, res) => { - res.should.have.status(httpStatus.BAD_REQUEST); + res.should.have.status(httpStatus.CONFLICT); res.body.should.be.a("object") res.body.id.should.be.eq("api.datasets.status-transition"); res.body.params.status.should.be.eq("FAILED") res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Failed to Retire dataset as it is not in live state") + res.body.error.message.should.be.eq("Transition failed for dataset: dataset-all-fields7 status:Retired with status transition to Retire") res.body.error.code.should.be.eq("DATASET_RETIRE_FAILURE") done(); }) @@ -213,19 +207,13 @@ describe("DATASET STATUS TRANSITION RETIRE", () => { it("Dataset status transition failure: When dataset to retire is used by other datasets", (done) => { chai.spy.on(Dataset, "findOne", () => { - return Promise.resolve({ dataset_id: "telemetry", type: "master-dataset", status: "Live" }) + return Promise.resolve({ ...TestInputsForDatasetStatusTransition.SCHEMA_TO_RETIRE, type: "master" }) }) chai.spy.on(Dataset, "findAll", () => { - return Promise.resolve([{ dataset_id: "telemetry", denorm_config: { denorm_fields: [{ dataset_id: "telemetry" }] } }]) + return Promise.resolve([{ dataset_id: "telemetry", denorm_config: { denorm_fields: [{ dataset_id: "dataset-all-fields7" }] } }]) }) chai.spy.on(DatasetDraft, "findAll", () => { - return Promise.resolve([{ dataset_id: "telemetry", denorm_config: { denorm_fields: [{ dataset_id: "telemetry" }] } }]) - }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "rollback", () => { - return Promise.resolve({}) + return Promise.resolve([{ dataset_id: "telemetry", denorm_config: { denorm_fields: [{ dataset_id: "dataset-all-fields7" }] } }]) }) chai .request(app) @@ -237,78 +225,10 @@ describe("DATASET STATUS TRANSITION RETIRE", () => { res.body.id.should.be.eq("api.datasets.status-transition"); res.body.params.status.should.be.eq("FAILED") res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Failed to retire dataset as it is used by other datasets") + res.body.error.message.should.be.eq("Failed to retire dataset as it is in use. Please retire or delete dependent datasets before retiring this dataset") res.body.error.code.should.be.eq("DATASET_IN_USE") done(); }); }); - it("Dataset status transition failure: When setting retire status to live records fail", (done) => { - chai.spy.on(Dataset, "findOne", () => { - return Promise.resolve({ dataset_id: "telemetry", status: "Live", type: "dataset" }) - }) - chai.spy.on(Dataset, "update", () => { - return Promise.reject({}) - }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "rollback", () => { - return Promise.resolve({}) - }) - chai - .request(app) - .post("/v2/datasets/status-transition") - .send(TestInputsForDatasetStatusTransition.VALID_SCHEMA_FOR_RETIRE) - .end((err, res) => { - res.should.have.status(httpStatus.INTERNAL_SERVER_ERROR); - res.body.should.be.a("object") - res.body.id.should.be.eq("api.datasets.status-transition"); - res.body.params.status.should.be.eq("FAILED") - res.body.error.message.should.be.eq("Failed to perform status transition on datasets") - done(); - }); - }); - - it("Dataset status transition failure: Failed to restart pipeline", (done) => { - chai.spy.on(Dataset, "findOne", () => { - return Promise.resolve({ dataset_id: "telemetry", type: "dataset", status: "Live", }) - }) - chai.spy.on(DatasetTransformations, "update", () => { - return Promise.resolve({}) - }) - chai.spy.on(DatasetSourceConfig, "update", () => { - return Promise.resolve({}) - }) - chai.spy.on(Datasource, "update", () => { - return Promise.resolve({}) - }) - chai.spy.on(Dataset, "update", () => { - return Promise.resolve({}) - }) - chai.spy.on(Datasource, "findAll", () => { - return Promise.resolve(["telemetry"]) - }) - chai.spy.on(commandHttpService, "post", () => { - return Promise.reject({}) - }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "rollback", () => { - return Promise.resolve({}) - }) - chai - .request(app) - .post("/v2/datasets/status-transition") - .send(TestInputsForDatasetStatusTransition.VALID_SCHEMA_FOR_RETIRE) - .end((err, res) => { - res.should.have.status(httpStatus.INTERNAL_SERVER_ERROR); - res.body.should.be.a("object") - res.body.id.should.be.eq("api.datasets.status-transition"); - res.body.params.status.should.be.eq("FAILED") - res.body.error.message.should.be.eq("Failed to perform status transition on datasets") - done(); - }); - }); }) \ No newline at end of file diff --git a/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetStatusTransition.spec.ts b/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetStatusTransition.spec.ts index da2d4450..7117aa94 100644 --- a/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetStatusTransition.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetStatusTransition/DatasetStatusTransition.spec.ts @@ -1,9 +1,9 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai, { expect } from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import httpStatus from "http-status"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import _ from "lodash"; import { TestInputsForDatasetStatusTransition } from "./Fixtures"; import { DatasetDraft } from "../../../models/DatasetDraft"; @@ -22,7 +22,7 @@ describe("DATASET STATUS TRANSITION API", () => { }); it("Dataset status transition failure: Invalid request payload provided", (done) => { - + chai .request(app) .post("/v2/datasets/status-transition") @@ -39,20 +39,21 @@ describe("DATASET STATUS TRANSITION API", () => { }); }); - it("Dataset status transition failure: Connection to the database failed", (done) => { + it("Dataset status transition failure: When the action is performed on v1 apis", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.reject() + return Promise.resolve({ api_version: "v1" }) }) chai .request(app) .post("/v2/datasets/status-transition") - .send(TestInputsForDatasetStatusTransition.VALID_SCHEMA_FOR_DELETE) + .send(TestInputsForDatasetStatusTransition.VALID_SCHEMA_FOR_LIVE) .end((err, res) => { - res.should.have.status(httpStatus.INTERNAL_SERVER_ERROR); + res.should.have.status(httpStatus.BAD_REQUEST); res.body.should.be.a("object") res.body.id.should.be.eq("api.datasets.status-transition"); res.body.params.status.should.be.eq("FAILED") - res.body.error.message.should.be.eq("Failed to perform status transition on datasets") + res.body.error.code.should.be.eq("DATASET_API_VERSION_MISMATCH") + res.body.error.message.should.be.eq("Draft dataset api version is not v2. Perform a read api call with mode=edit to migrate the dataset") done(); }); }); diff --git a/api-service/src/tests/DatasetManagement/DatasetStatusTransition/Fixtures.ts b/api-service/src/tests/DatasetManagement/DatasetStatusTransition/Fixtures.ts index 7deb12f7..e65f1852 100644 --- a/api-service/src/tests/DatasetManagement/DatasetStatusTransition/Fixtures.ts +++ b/api-service/src/tests/DatasetManagement/DatasetStatusTransition/Fixtures.ts @@ -1,187 +1,164 @@ export const TestInputsForDatasetStatusTransition = { - VALID_SCHEMA_FOR_DELETE: { - "id": "api.datasets.status-transition", - "ver": "v2", - "ts": "2024-04-19T12:58:47+05:30", - "params": { - "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6" - }, - "request": { - "dataset_id": "telemetry.1", - "status": "Delete" - } + VALID_SCHEMA_FOR_DELETE: { + "id": "api.datasets.status-transition", + "ver": "v2", + "ts": "2024-04-19T12:58:47+05:30", + "params": { + "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6" }, - VALID_SCHEMA_FOR_LIVE: { - "id": "api.datasets.status-transition", - "ver": "v2", - "ts": "2024-04-19T12:58:47+05:30", - "params": { - "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6" - }, - "request": { - "dataset_id": "telemetry.1", - "status": "Live" - } + "request": { + "dataset_id": "telemetry.1", + "status": "Delete" + } + }, + VALID_SCHEMA_FOR_LIVE: { + "id": "api.datasets.status-transition", + "ver": "v2", + "ts": "2024-04-19T12:58:47+05:30", + "params": { + "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6" }, - VALID_SCHEMA_FOR_RETIRE: { - "id": "api.datasets.status-transition", - "ver": "v2", - "ts": "2024-04-19T12:58:47+05:30", - "params": { - "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6" - }, - "request": { - "dataset_id": "telemetry", - "status": "Retire" - } + "request": { + "dataset_id": "telemetry", + "status": "Live" + } + }, + VALID_SCHEMA_FOR_LIVE_MASTER: { + "id": "api.datasets.status-transition", + "ver": "v2", + "ts": "2024-04-19T12:58:47+05:30", + "params": { + "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6" }, - INVALID_SCHEMA: { - "id": "api.datasets.status-transition", - "ver": "v2", - "ts": "2024-04-19T12:58:47+05:30", - "params": { - "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6" - }, - "request": { - "dataset_id": "telemetry.1", - "status": "" - } + "request": { + "dataset_id": "master-telemetry", + "status": "Live" + } + }, + VALID_SCHEMA_FOR_RETIRE: { + "id": "api.datasets.status-transition", + "ver": "v2", + "ts": "2024-04-19T12:58:47+05:30", + "params": { + "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6" }, - VALID_REQUEST_FOR_READY_FOR_PUBLISH:{ - "id": "api.datasets.status-transition", - "ver": "v2", - "ts": "2024-04-19T12:58:47+05:30", - "params": { - "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6" - }, - "request": { - "dataset_id": "telemetry.1", - "status": "ReadyToPublish" - } + "request": { + "dataset_id": "telemetry", + "status": "Retire" + } + }, + INVALID_SCHEMA: { + "id": "api.datasets.status-transition", + "ver": "v2", + "ts": "2024-04-19T12:58:47+05:30", + "params": { + "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6" }, - VALID_SCHEMA_FOR_READY_TO_PUBLISH: { - "dataset_id": "telemetry", - "type": "dataset", - "name": "sb-telemetry", - "id": "telemetry.1", - "status": "Draft", - "version_key": "1789887878", - "validation_config": { - "validate": true, - "mode": "Strict" - }, - "extraction_config": { - "is_batch_event": true, - "extraction_key": "events", - "batch_id": "id", - "dedup_config": { - "drop_duplicates": true, - "dedup_key": "id", - "dedup_period": 3783 - } - }, - "dedup_config": { - "drop_duplicates": true, - "dedup_key": "mid", - "dedup_period": 3783 - }, - "data_schema": { - "$schema": "https://json-schema.org/draft/2020-12/schema", - "type": "object", - "properties": { - "ets": { - "type": "string" - }, - "ver": { - "type": "string" - }, - "required": [ - "eid" - ] - }, - "additionalProperties": true - }, - "router_config": { - "topic": "test" - }, - "denorm_config": { - "redis_db_host": "local", - "redis_db_port": 5432, - "denorm_fields": [ - { - "denorm_key": "actor.id", - "denorm_out_field": "userdata", - "dataset_name": "name", - "dataset_id": "name" - }, - { - "denorm_key": "actor.id", - "denorm_out_field": "mid", - "dataset_name": "name", - "dataset_id": "name" - } - ] - }, - "dataset_config": { - "data_key": "mid", - "timestamp_key": "ets", - "entry_topic": "topic", - "redis_db_host": "local", - "redis_db_port": 5432, - "redis_db": 0, - "index_data": true - }, - "client_state": {}, - "tags": [ - "tag1", - "tag2" - ] - }, - INVALID_SCHEMA_FOR_READY_TO_PUBLISH: { - "dataset_id": "telemetry", - "type": "", - "name": "sb-telemetry", - "id": "telemetry.1", - "status": "Draft", - "version_key": "1789887878", - "validation_config": { - "validate": true, - "mode": "Strict" - }, - "router_config": { - "topic": "test" - }, - "denorm_config": { - "redis_db_host": "local", - "redis_db_port": 5432, - "denorm_fields": [ - { - "denorm_key": "actor.id", - "denorm_out_field": "userdata", - "dataset_name": "name", - "dataset_id": "name" - }, - { - "denorm_key": "actor.id", - "denorm_out_field": "mid", - "dataset_name": "name", - "dataset_id": "name" - } - ] - }, - "dataset_config": { - "data_key": "mid", - "timestamp_key": "ets", - "entry_topic": "topic", - "redis_db_host": "local", - "redis_db_port": 5432, - "redis_db": 0, - "index_data": true - }, - "client_state": {}, - "tags": [ - "tag1", - "tag2" - ] - } + "request": { + "dataset_id": "telemetry.1", + "status": "" + } + }, + VALID_REQUEST_FOR_READY_FOR_PUBLISH: { + "id": "api.datasets.status-transition", + "ver": "v2", + "ts": "2024-04-19T12:58:47+05:30", + "params": { + "msgid": "4a7f14c3-d61e-4d4f-be78-181834eeff6" + }, + "request": { + "dataset_id": "telemetry", + "status": "ReadyToPublish" + } + }, + VALID_SCHEMA_FOR_READY_TO_PUBLISH: { + "id": "dataset-all-fields7", + "dataset_id": "dataset-all-fields7", + "version": 1, + "type": "event", + "name": "sb-telemetry", + "validation_config": { "validate": false, "mode": "Strict" }, + "extraction_config": { "is_batch_event": true, "extraction_key": "events", "dedup_config": { "drop_duplicates": true, "dedup_key": "id", "dedup_period": 604800 } }, + "dedup_config": { "drop_duplicates": true, "dedup_key": "mid", "dedup_period": 604800 }, + "data_schema": { "$schema": "https://json-schema.org/draft/2020-12/schema", "type": "object", "properties": { "mid": { "type": "string", "arrival_format": "text", "data_type": "string" }, "ets": { "type": "integer", "arrival_format": "number", "data_type": "epoch" }, "eid": { "type": "string", "arrival_format": "text", "data_type": "string" } }, "additionalProperties": true }, + "denorm_config": { "redis_db_host": "localhost", "redis_db_port": 5679, "denorm_fields": [{ "denorm_key": "eid", "denorm_out_field": "userdata", "dataset_id": "master-dataset", "redis_db": 85 }] }, + "router_config": { "topic": "dataset-all-fields7" }, + "dataset_config": { "indexing_config": { "olap_store_enabled": true, "lakehouse_enabled": true, "cache_enabled": false }, "keys_config": { "data_key": "eid", "partition_key": "eid", "timestamp_key": "obsrv_meta.syncts" }, "cache_config": { "redis_db_host": "localhost", "redis_db_port": 5679, "redis_db": 0 }, "file_upload_path": [] }, + "tags": ["tag1"], + "status": "Draft", + "created_by": "SYSTEM", + "updated_by": "SYSTEM", + "created_date": "2024-07-24 19:12:13.021", + "updated_date": "2024-07-25 06:12:38.412", + "version_key": "1721887933020", + "api_version": "v2", + "transformations_config": [{ "field_key": "email", "transformation_function": { "type": "mask", "expr": "mid", "datatype": "string", "category": "pii" }, "mode": "Strict" }], + "connectors_config": [{ "id": "91898e828u82882u8", "connector_id": "kafka", "connector_config": "AR/hz8iBxRyc9s0ohXa3+id+7GoWtjVjNWvurFFgV1Ocw2kgc+XVbnfXX26zkP3+rQ49gio0JzwsFzOK61TtXLx968IKol5eGfaEHF68O5faoxxjKBsyvhPaRQ91DKKi", "version": "v1" }], + "sample_data": {}, + "entry_topic": "local.ingest" + }, + VALID_MASTER_SCHEMA_FOR_READY_TO_PUBLISH: { + "id": "dataset-all-fields7", + "dataset_id": "dataset-all-fields7", + "version": 1, + "type": "master", + "name": "sb-telemetry", + "validation_config": { "validate": false, "mode": "Strict" }, + "extraction_config": { "is_batch_event": true, "extraction_key": "events", "dedup_config": { "drop_duplicates": true, "dedup_key": "id", "dedup_period": 604800 } }, + "dedup_config": { "drop_duplicates": true, "dedup_key": "mid", "dedup_period": 604800 }, + "data_schema": { "$schema": "https://json-schema.org/draft/2020-12/schema", "type": "object", "properties": { "mid": { "type": "string", "arrival_format": "text", "data_type": "string" }, "ets": { "type": "integer", "arrival_format": "number", "data_type": "epoch" }, "eid": { "type": "string", "arrival_format": "text", "data_type": "string" } }, "additionalProperties": true }, + "denorm_config": { "redis_db_host": "localhost", "redis_db_port": 5679, "denorm_fields": [{ "denorm_key": "eid", "denorm_out_field": "userdata", "dataset_id": "master-dataset", "redis_db": 85 }] }, + "router_config": { "topic": "dataset-all-fields7" }, + "dataset_config": { "indexing_config": { "olap_store_enabled": true, "lakehouse_enabled": true, "cache_enabled": false }, "keys_config": { "data_key": "eid", "partition_key": "eid", "timestamp_key": "obsrv_meta.syncts" }, "cache_config": { "redis_db_host": "localhost", "redis_db_port": 5679, "redis_db": 0 }, "file_upload_path": [] }, + "tags": ["tag1"], + "status": "Draft", + "created_by": "SYSTEM", + "updated_by": "SYSTEM", + "created_date": "2024-07-24 19:12:13.021", + "updated_date": "2024-07-25 06:12:38.412", + "version_key": "1721887933020", + "transformations_config": [], + "connectors_config": [], + "api_version": "v2", + "sample_data": {}, + "entry_topic": "local.ingest" + }, + INVALID_SCHEMA_FOR_READY_TO_PUBLISH: { + "id": "dataset-all-fields7", + "dataset_id": "dataset-all-fields7", + "version": 1, + "type": "event", + "name": "sb-telemetry", + "validation_config": { "validate": false, "mode": "Strict" }, + "extraction_config": { "is_batch_event": true, "extraction_key": "events", "dedup_config": { "drop_duplicates": true, "dedup_key": "id", "dedup_period": 604800 } }, + "dedup_config": { "drop_duplicates": true, "dedup_key": "mid", "dedup_period": 604800 }, + "data_schema": { "$schema": "https://json-schema.org/draft/2020-12/schema", "type": "object", "properties": { "mid": { "type": "string", "arrival_format": "text", "data_type": "string" }, "ets": { "type": "integer", "arrival_format": "number", "data_type": "epoch" }, "eid": { "type": "string", "arrival_format": "text", "data_type": "string" } }, "additionalProperties": true }, + "denorm_config": { "redis_db_host": "localhost", "redis_db_port": 5679, "denorm_fields": [{ "denorm_key": "eid", "denorm_out_field": "userdata", "dataset_id": "master-dataset", "redis_db": 85 }] }, + "router_config": { "topic": "dataset-all-fields7" }, + "tags": ["tag1"], + "status":"Draft", + "version_key": "1721887933020", + "api_version": "v2" + }, + SCHEMA_TO_RETIRE: { + "id": "dataset-all-fields7", + "dataset_id": "dataset-all-fields7", + "version": 1, + "type": "event", + "name": "sb-telemetry", + "validation_config": { "validate": false, "mode": "Strict" }, + "extraction_config": { "is_batch_event": true, "extraction_key": "events", "dedup_config": { "drop_duplicates": true, "dedup_key": "id", "dedup_period": 604800 } }, + "dedup_config": { "drop_duplicates": true, "dedup_key": "mid", "dedup_period": 604800 }, + "data_schema": { "$schema": "https://json-schema.org/draft/2020-12/schema", "type": "object", "properties": { "mid": { "type": "string", "arrival_format": "text", "data_type": "string" }, "ets": { "type": "integer", "arrival_format": "number", "data_type": "epoch" }, "eid": { "type": "string", "arrival_format": "text", "data_type": "string" } }, "additionalProperties": true }, + "denorm_config": { "redis_db_host": "localhost", "redis_db_port": 5679, "denorm_fields": [{ "denorm_key": "eid", "denorm_out_field": "userdata", "dataset_id": "master-dataset", "redis_db": 85 }] }, + "router_config": { "topic": "dataset-all-fields7" }, + "tags": ["tag1"], + "status":"Live", + "version_key": "1721887933020", + "api_version": "v2" + }, + DRAFT_DATASET_SCHEMA_FOR_PUBLISH: { "dataset_id": "telemetry", "data_schema": { "$schema": "https://json-schema.org/draft/2020-12/schema", "type": "object", "properties": { "ets": { "type": "string" }, "ver": { "type": "string" } }, "additionalProperties": true }, "status": "ReadyToPublish", "id": "telemetry", "type": "events", "api_version": "v2", "denorm_config": { "denorm_fields": [{ "denorm_out_field": "pid", "denorm_key": "eid", "dataset_id": "master-dataset" }] }, "dataset_config": { "indexing_config": { "olap_store_enabled": true, "lakehouse_enabled": false, "cache_enabled": false }, "keys_config": { "timestamp_key": "ets", "partition_key": "", "data_key": "eid" }, "file_upload_path": ["telemetry.json"] }, "router_config": { "topic": "telemetry" } }, + DRAFT_DATASET_SCHEMA_FOR_PUBLISH_HUDI: { "dataset_id": "telemetry", "data_schema": { "$schema": "https://json-schema.org/draft/2020-12/schema", "type": "object", "properties": { "ets": { "type": "string" }, "ver": { "type": "string" } }, "additionalProperties": true }, "status": "ReadyToPublish", "id": "telemetry", "type": "events", "api_version": "v2", "denorm_config": { "denorm_fields": [{ "denorm_out_field": "pid", "denorm_key": "eid", "dataset_id": "master-dataset" }] }, "dataset_config": { "indexing_config": { "olap_store_enabled": false, "lakehouse_enabled": true, "cache_enabled": false }, "keys_config": { "timestamp_key": "ets", "partition_key": "ets", "data_key": "eid" }, "file_upload_path": ["telemetry.json"] }, "router_config": { "topic": "telemetry" } }, + DRAFT_MASTER_DATASET_SCHEMA_FOR_PUBLISH: { "dataset_id": "master-telemetry", "data_schema": { "$schema": "https://json-schema.org/draft/2020-12/schema", "type": "object", "properties": { "ets": { "type": "string" }, "ver": { "type": "string" } }, "additionalProperties": true }, "status": "ReadyToPublish", "id": "master-telemetry", "type": "master", "api_version": "v2", "denorm_config": { "denorm_fields": [] }, "dataset_config": { "indexing_config": { "olap_store_enabled": false, "lakehouse_enabled": false, "cache_enabled": true }, "keys_config": { "timestamp_key": "ets", "partition_key": "", "data_key": "eid" }, "cache_config": { "redis_db_host": "localhost", "redis_db_port": 5679, "redis_db": 0 }, "file_upload_path": ["telemetry.json"] }, "router_config": { "topic": "telemetry" } }, + DRAFT_MASTER_DATASET_INVALID: { "dataset_id": "master-telemetry", "status": "ReadyToPublish", "id": "master-telemetry", "type": "master", "api_version": "v2", "denorm_config": { "denorm_fields": [] }, "dataset_config": { "indexing_config": { "olap_store_enabled": false, "lakehouse_enabled": false, "cache_enabled": true }, "keys_config": { "timestamp_key": "ets", "partition_key": "", "data_key": "eid" }, "cache_config": { "redis_db_host": "localhost", "redis_db_port": 5679, "redis_db": 0 }, "file_upload_path": ["telemetry.json"] }, "router_config": { "topic": "telemetry" } } } \ No newline at end of file diff --git a/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetConnectors.spec.ts b/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetConnectors.spec.ts new file mode 100644 index 00000000..1122aac4 --- /dev/null +++ b/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetConnectors.spec.ts @@ -0,0 +1,74 @@ +import app from "../../../app"; +import chai from "chai"; +import chaiHttp from "chai-http"; +import spies from "chai-spies"; +import httpStatus from "http-status"; +import { describe, it } from "mocha"; +import { DatasetDraft } from "../../../models/DatasetDraft"; +import _ from "lodash"; +import { TestInputsForDatasetUpdate, msgid, validVersionKey } from "./Fixtures"; +import { apiId } from "../../../controllers/DatasetUpdate/DatasetUpdate" + +chai.use(spies); +chai.should(); +chai.use(chaiHttp); + +describe("DATASET CONNECTORS UPDATE", () => { + + afterEach(() => { + chai.spy.restore(); + }); + + it("Success: Dataset connectors successfully added", (done) => { + chai.spy.on(DatasetDraft, "findOne", () => { + return Promise.resolve({ + id: "telemetry", status: "Draft", version_key: validVersionKey, type:"event", api_version: "v2", connectors_config:[] + }) + }) + chai.spy.on(DatasetDraft, "update", () => { + return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) + }) + chai + .request(app) + .patch("/v2/datasets/update") + .send(TestInputsForDatasetUpdate.DATASET_UPDATE_CONNECTORS_ADD) + .end((err, res) => { + res.should.have.status(httpStatus.OK); + res.body.should.be.a("object") + res.body.id.should.be.eq(apiId); + res.body.params.status.should.be.eq("SUCCESS") + res.body.params.msgid.should.be.eq(msgid) + res.body.result.id.should.be.eq("telemetry") + res.body.result.message.should.be.eq("Dataset is updated successfully") + res.body.result.version_key.should.be.a("string") + done(); + }); + }); + + it("Success: Dataset connectors successfully removed", (done) => { + chai.spy.on(DatasetDraft, "findOne", () => { + return Promise.resolve({ + id: "telemetry", status: "Draft", version_key: validVersionKey, type:"event", api_version: "v2", connectors_config:[{"id":"6c3fc8c2-357d-489b-b0c9-afdde6e5c6c0","connector_id":"kafka","connector_config":{"type":"kafka","topic":"telemetry.ingest","kafkaBrokers":"kafka-headless.kafka.svc:9092"},"version":"v1"}] + }) + }) + chai.spy.on(DatasetDraft, "update", () => { + return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) + }) + chai + .request(app) + .patch("/v2/datasets/update") + .send(TestInputsForDatasetUpdate.DATASET_UPDATE_CONNECTORS_REMOVE) + .end((err, res) => { + res.should.have.status(httpStatus.OK); + res.body.should.be.a("object") + res.body.id.should.be.eq(apiId); + res.body.params.status.should.be.eq("SUCCESS") + res.body.params.msgid.should.be.eq(msgid) + res.body.result.id.should.be.eq("telemetry") + res.body.result.message.should.be.eq("Dataset is updated successfully") + res.body.result.version_key.should.be.a("string") + done(); + }); + }); + +}) \ No newline at end of file diff --git a/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetDedup.spec.ts b/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetDedup.spec.ts index 9b1ee96b..eaa80648 100644 --- a/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetDedup.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetDedup.spec.ts @@ -1,14 +1,13 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai, { expect } from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import httpStatus from "http-status"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import { DatasetDraft } from "../../../models/DatasetDraft"; import _ from "lodash"; import { TestInputsForDatasetUpdate, msgid, requestStructure, validVersionKey } from "./Fixtures"; import { apiId, invalidInputErrCode } from "../../../controllers/DatasetUpdate/DatasetUpdate" -import { sequelize } from "../../../connections/databaseConnection"; chai.use(spies); chai.should(); @@ -23,18 +22,12 @@ describe("DATASET DEDUPE CONFIG UPDATE", () => { it("Success: Dataset dedupe configs updated with dedup key if duplicates need to be dropped", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", status: "Draft", version_key: validVersionKey, type:"dataset" + id: "telemetry", status: "Draft", version_key: validVersionKey, type: "event", api_version: "v2" }) }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) chai .request(app) .patch("/v2/datasets/update") @@ -55,22 +48,16 @@ describe("DATASET DEDUPE CONFIG UPDATE", () => { it("Success: Dataset dedupe configs updated with default values if duplicates need to be dropped", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", status: "Draft", version_key: validVersionKey, type:"dataset" + id: "telemetry", status: "Draft", version_key: validVersionKey, type: "event", api_version: "v2" }) }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) chai .request(app) .patch("/v2/datasets/update") - .send({ ...requestStructure, request: { dataset_id: "telemetry", version_key: validVersionKey, dedup_config: { drop_duplicates: false } } }) + .send({ ...requestStructure, request: { dataset_id: "telemetry", version_key: validVersionKey, dedup_config: { drop_duplicates: false, dedup_key: "mid" } } }) .end((err, res) => { console.log(res.body.result) res.should.have.status(httpStatus.OK); diff --git a/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetDenorm.spec.ts b/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetDenorm.spec.ts index 831cd534..5f330032 100644 --- a/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetDenorm.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetDenorm.spec.ts @@ -1,14 +1,13 @@ -import app from "../../../../app"; -import chai, { expect } from "chai"; +import app from "../../../app"; +import chai from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import httpStatus from "http-status"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import { DatasetDraft } from "../../../models/DatasetDraft"; import _ from "lodash"; import { TestInputsForDatasetUpdate, msgid, validVersionKey } from "./Fixtures"; import { apiId } from "../../../controllers/DatasetUpdate/DatasetUpdate" -import { sequelize } from "../../../connections/databaseConnection"; chai.use(spies); chai.should(); @@ -23,18 +22,12 @@ describe("DATASET DENORM UPDATE", () => { it("Success: Dataset denorms successfully added", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", status: "Draft", type:"dataset", version_key: validVersionKey, denorm_config: { denorm_field: [] } + id: "telemetry", status: "Draft", type:"event", version_key: validVersionKey, api_version:"v2", denorm_config: { denorm_field: [] } }) }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) chai .request(app) .patch("/v2/datasets/update") @@ -55,18 +48,12 @@ describe("DATASET DENORM UPDATE", () => { it("Success: Dataset denorms successfully removed", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", status: "Draft", type:"dataset", version_key: validVersionKey, denorm_config: { denorm_fields: [{ denorm_out_field: "userdata" }] } + id: "telemetry", status: "Draft", type:"event", version_key: validVersionKey, api_version:"v2", denorm_config: { denorm_fields: [{ denorm_out_field: "userdata" }] } }) }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) chai .request(app) .patch("/v2/datasets/update") @@ -87,10 +74,11 @@ describe("DATASET DENORM UPDATE", () => { it("Success: When payload contains same denorms to be removed", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", version_key: validVersionKey, type:"dataset", status: "Draft", denorm_config: { + id: "telemetry", version_key: validVersionKey, type:"dataset", api_version:"v2", status: "Draft", denorm_config: { denorm_fields: [{ "denorm_key": "actor.id", - "denorm_out_field": "mid" + "denorm_out_field": "mid", + "dataset_id": "master" }] } }) @@ -98,12 +86,6 @@ describe("DATASET DENORM UPDATE", () => { chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) chai .request(app) .patch("/v2/datasets/update") @@ -121,84 +103,36 @@ describe("DATASET DENORM UPDATE", () => { }); }); - - it("Failure: Dataset contains duplicate denorm field", (done) => { - chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ status: "Draft", version_key: validVersionKey }) - }) - chai - .request(app) - .patch("/v2/datasets/update") - .send(TestInputsForDatasetUpdate.DATASET_UPDATE_DUPLICATE_DENORM_KEY) - .end((err, res) => { - res.should.have.status(httpStatus.BAD_REQUEST); - res.body.should.be.a("object") - res.body.id.should.be.eq(apiId); - res.body.params.status.should.be.eq("FAILED") - res.body.params.msgid.should.be.eq(msgid) - expect(res.body.error.message).to.match(/^Dataset contains duplicate denorm out keys(.+)$/) - res.body.error.code.should.be.eq("DATASET_DUPLICATE_DENORM_KEY") - done(); - }); - }); - - it("Failure: When denorm fields provided to add already exists", (done) => { + it("Success: Ignore the denorm when payload contains denorm that already exists", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", status: "Draft", version_key: validVersionKey, tags: ["tag1", "tag2"], denorm_config: { - denorm_fields: [{ + id: "telemetry", version_key: validVersionKey, type:"dataset", api_version:"v2", status: "Draft", denorm_config: { + denorm_fields: [ { "denorm_key": "actor.id", - "denorm_out_field": "userdata" + "denorm_out_field": "mid", + "dataset_id": "master" }] } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "rollback", () => { - return Promise.resolve({}) + chai.spy.on(DatasetDraft, "update", () => { + return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) chai .request(app) .patch("/v2/datasets/update") - .send(TestInputsForDatasetUpdate.DATASET_UPDATE_REQUEST) + .send(TestInputsForDatasetUpdate.DATASET_UPDATE_WITH_EXISTING_DENORM) .end((err, res) => { - res.should.have.status(httpStatus.BAD_REQUEST); + res.should.have.status(httpStatus.OK); res.body.should.be.a("object") res.body.id.should.be.eq(apiId); - res.body.params.status.should.be.eq("FAILED") + res.body.params.status.should.be.eq("SUCCESS") res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Denorm fields already exist") - res.body.error.code.should.be.eq("DATASET_DENORM_EXISTS") + res.body.result.id.should.be.eq("telemetry") + res.body.result.message.should.be.eq("Dataset is updated successfully") + res.body.result.version_key.should.be.a("string") done(); }); }); - it("Failure: When denorm fields provided to delete does not exists", (done) => { - chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ - id: "telemetry", status: "Draft", version_key: validVersionKey, tags: ["tag1", "tag2"], denorm_config: { - denorm_fields: [{ - "denorm_key": "actor.id", - "denorm_out_field": "id" - }] - } - }) - }) - chai - .request(app) - .patch("/v2/datasets/update") - .send(TestInputsForDatasetUpdate.DATASET_UPDATE_REQUEST) - .end((err, res) => { - res.should.have.status(httpStatus.NOT_FOUND); - res.body.should.be.a("object") - res.body.id.should.be.eq(apiId); - res.body.params.status.should.be.eq("FAILED") - res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Denorm fields do not exist to remove") - res.body.error.code.should.be.eq("DATASET_DENORM_DO_NOT_EXIST") - done(); - }); - }); }) \ No newline at end of file diff --git a/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetExtraction.spec.ts b/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetExtraction.spec.ts index 211bcd9d..d17f1c3b 100644 --- a/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetExtraction.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetExtraction.spec.ts @@ -1,9 +1,9 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai, { expect } from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import httpStatus from "http-status"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import { DatasetDraft } from "../../../models/DatasetDraft"; import _ from "lodash"; import { TestInputsForDatasetUpdate, msgid, requestStructure, validVersionKey } from "./Fixtures"; @@ -23,18 +23,13 @@ describe("DATASET EXTRACTION CONFIG UPDATE", () => { it("Success: Dataset extraction configs updated if it is a batch event", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", status: "Draft", version_key: validVersionKey, type: "dataset" + id: "telemetry", status: "Draft", version_key: validVersionKey, api_version: "v2", type: "event" }) }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) + chai .request(app) .patch("/v2/datasets/update") @@ -55,22 +50,29 @@ describe("DATASET EXTRACTION CONFIG UPDATE", () => { it("Success: Dataset extraction configs updated with default values if it is not batch event", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", status: "Draft", version_key: validVersionKey, type:"dataset" + id: "telemetry", status: "Draft", version_key: validVersionKey, api_version: "v2", type: "event" }) }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) + chai .request(app) .patch("/v2/datasets/update") - .send({ ...requestStructure, request: { dataset_id: "telemetry", version_key: validVersionKey, extraction_config: { "is_batch_event": false } } }) + .send({ + ...requestStructure, request: { + dataset_id: "telemetry", version_key: validVersionKey, + "extraction_config": { + "is_batch_event": false, + "extraction_key": "events", + "dedup_config": { + "drop_duplicates": true, + "dedup_key": "id" + } + } + } + }) .end((err, res) => { res.should.have.status(httpStatus.OK); res.body.should.be.a("object") diff --git a/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetTags.spec.ts b/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetTags.spec.ts index b445455a..49ba7b8a 100644 --- a/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetTags.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetTags.spec.ts @@ -1,9 +1,9 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import httpStatus from "http-status"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import { DatasetDraft } from "../../../models/DatasetDraft"; import _ from "lodash"; import { TestInputsForDatasetUpdate, msgid, validVersionKey } from "./Fixtures"; @@ -23,18 +23,13 @@ describe("DATASET TAGS UPDATE", () => { it("Success: Dataset tags successfully added", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", status: "Draft", type: "dataset", version_key: validVersionKey, denorm_config: { denorm_fields: [] } + id: "telemetry", status: "Draft", type: "event", version_key: validVersionKey, denorm_config: { denorm_fields: [] }, api_version: "v2" }) }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) + chai .request(app) .patch("/v2/datasets/update") @@ -55,18 +50,13 @@ describe("DATASET TAGS UPDATE", () => { it("Success: Dataset tags successfully removed", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", status: "Draft", type: "dataset", version_key: validVersionKey, tags: ["tag1", "tag2"] + id: "telemetry", status: "Draft", type: "event", version_key: validVersionKey, tags: ["tag1", "tag2"], api_version: "v2" }) }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) + chai .request(app) .patch("/v2/datasets/update") @@ -84,80 +74,4 @@ describe("DATASET TAGS UPDATE", () => { }); }); - it("Success: When payload contains same tags to be added or removed", (done) => { - chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ - id: "telemetry", status: "Draft", type: "dataset", version_key: validVersionKey, tags: ["tag1", "tag2"] - }) - }) - chai.spy.on(DatasetDraft, "update", () => { - return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) - }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) - chai - .request(app) - .patch("/v2/datasets/update") - .send(TestInputsForDatasetUpdate.DATASET_UPDATE_WITH_SAME_TAGS_ADD) - .end((err, res) => { - res.should.have.status(httpStatus.OK); - res.body.should.be.a("object") - res.body.id.should.be.eq(apiId); - res.body.params.status.should.be.eq("SUCCESS") - res.body.params.msgid.should.be.eq(msgid) - res.body.result.id.should.be.eq("telemetry") - res.body.result.message.should.be.eq("Dataset is updated successfully") - res.body.result.version_key.should.be.a("string") - done(); - }); - }); - - it("Failure: When tags provided to add already exists", (done) => { - chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ - id: "telemetry", status: "Draft", version_key: validVersionKey, tags: ["tag3", "tag1"] - }) - }) - chai - .request(app) - .patch("/v2/datasets/update") - .send(TestInputsForDatasetUpdate.DATASET_UPDATE_REQUEST) - .end((err, res) => { - res.should.have.status(httpStatus.BAD_REQUEST); - res.body.should.be.a("object") - res.body.id.should.be.eq(apiId); - res.body.params.status.should.be.eq("FAILED") - res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Dataset tags already exist") - res.body.error.code.should.be.eq("DATASET_TAGS_EXISTS") - done(); - }); - }); - - it("Failure: When tags provided to delete does not exists", (done) => { - chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ - id: "telemetry", status: "Draft", version_key: validVersionKey, tags: ["tag5"] - }) - }) - chai - .request(app) - .patch("/v2/datasets/update") - .send(TestInputsForDatasetUpdate.DATASET_UPDATE_REQUEST) - .end((err, res) => { - res.should.have.status(httpStatus.NOT_FOUND); - res.body.should.be.a("object") - res.body.id.should.be.eq(apiId); - res.body.params.status.should.be.eq("FAILED") - res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Dataset tags do not exist to remove") - res.body.error.code.should.be.eq("DATASET_TAGS_DO_NOT_EXIST") - done(); - }); - }); - }) \ No newline at end of file diff --git a/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetTransformation.spec.ts b/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetTransformation.spec.ts index cb1fd6be..58252e83 100644 --- a/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetTransformation.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetTransformation.spec.ts @@ -1,13 +1,12 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import httpStatus from "http-status"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import { DatasetDraft } from "../../../models/DatasetDraft"; import _ from "lodash"; import { TestInputsForDatasetUpdate, msgid, validVersionKey } from "./Fixtures"; -import { DatasetTransformationsDraft } from "../../../models/TransformationDraft"; import { apiId } from "../../../controllers/DatasetUpdate/DatasetUpdate" import { sequelize } from "../../../connections/databaseConnection"; @@ -24,24 +23,13 @@ describe("DATASET TRANSFORMATIONS UPDATE", () => { it("Success: Dataset transformations successfully added", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", status: "Draft", version_key: validVersionKey, type:"dataset" + id: "telemetry", status: "Draft", version_key: validVersionKey, type:"event", api_version: "v2", "transformations_config":[] }) }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve([]) - }) - chai.spy.on(DatasetTransformationsDraft, "bulkCreate", () => { - return Promise.resolve({}) - }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) + chai .request(app) .patch("/v2/datasets/update") @@ -62,24 +50,13 @@ describe("DATASET TRANSFORMATIONS UPDATE", () => { it("Success: Dataset transformations successfully removed", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", status: "Draft", version_key: validVersionKey, type:"dataset" + id: "telemetry", status: "Draft", version_key: validVersionKey, type:"event", api_version: "v2", "transformations_config":[{ "field_key": "key1", "transformation_function": { "type": "mask", "expr": "eid", "datatype": "string", "category": "pii" }, "mode": "Strict" }] }) }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve([{ field_key: "key1" }, { field_key: "key3" }]) - }) - chai.spy.on(DatasetTransformationsDraft, "destroy", () => { - return Promise.resolve({}) - }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) + chai .request(app) .patch("/v2/datasets/update") @@ -97,69 +74,14 @@ describe("DATASET TRANSFORMATIONS UPDATE", () => { }); }); - it("Success: Dataset transformations successfully updated", (done) => { - chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ - id: "telemetry", status: "Draft", version_key: validVersionKey, type:"dataset" - }) - }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve([{ field_key: "key1" }, { field_key: "key3" }]) - }) - chai.spy.on(DatasetTransformationsDraft, "update", () => { - return Promise.resolve({}) - }) - chai.spy.on(DatasetDraft, "update", () => { - return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) - }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) - chai - .request(app) - .patch("/v2/datasets/update") - .send(TestInputsForDatasetUpdate.DATASET_UPDATE_TRANSFORMATIONS_UPDATE) - .end((err, res) => { - res.should.have.status(httpStatus.OK); - res.body.should.be.a("object") - res.body.id.should.be.eq(apiId); - res.body.params.status.should.be.eq("SUCCESS") - res.body.params.msgid.should.be.eq(msgid) - res.body.result.id.should.be.eq("telemetry") - res.body.result.message.should.be.eq("Dataset is updated successfully") - res.body.result.version_key.should.be.a("string") - done(); - }); - }); - it("Success: When payload contains same transformation field_key to be added, updated or removed", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ id: "telemetry", status: "Draft", version_key: validVersionKey, type:"dataset" }) - }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve([{ field_key: "key2" }, { field_key: "key3" }]) - }) - chai.spy.on(DatasetTransformationsDraft, "bulkCreate", () => { - return Promise.resolve({}) - }) - chai.spy.on(DatasetTransformationsDraft, "update", () => { - return Promise.resolve({}) - }) - chai.spy.on(DatasetTransformationsDraft, "destroy", () => { - return Promise.resolve({}) + return Promise.resolve({ id: "telemetry", status: "Draft", version_key: validVersionKey, type:"event", api_version: "v2" }) }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) + chai .request(app) .patch("/v2/datasets/update") @@ -177,111 +99,4 @@ describe("DATASET TRANSFORMATIONS UPDATE", () => { }); }); - it("Failure: When transformation fields provided to add already exists", (done) => { - chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ - id: "telemetry", status: "Draft", type:"dataset", version_key: validVersionKey, tags: ["tag1", "tag2"], denorm_config: { - denorm_fields: [{ - "denorm_key": "actor.id", - "denorm_out_field": "mid" - }] - } - }) - }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve([{ field_key: "key1" }, { field_key: "key3" }]) - }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "rollback", () => { - return Promise.resolve({}) - }) - chai - .request(app) - .patch("/v2/datasets/update") - .send(TestInputsForDatasetUpdate.DATASET_UPDATE_REQUEST) - .end((err, res) => { - res.should.have.status(httpStatus.BAD_REQUEST); - res.body.should.be.a("object") - res.body.id.should.be.eq(apiId); - res.body.params.status.should.be.eq("FAILED") - res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Dataset transformations already exists") - res.body.error.code.should.be.eq("DATASET_TRANSFORMATIONS_EXIST") - done(); - }); - }); - - it("Failure: When transformation fields provided to update do not exists", (done) => { - chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ - id: "telemetry", status: "Draft", type:"dataset" , version_key: validVersionKey, tags: ["tag1", "tag2"], denorm_config: { - denorm_fields: [{ - "denorm_key": "actor.id", - "denorm_out_field": "mid" - }] - } - }) - }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve([{ field_key: "key7" }, { field_key: "key2" }]) - }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "rollback", () => { - return Promise.resolve({}) - }) - chai - .request(app) - .patch("/v2/datasets/update") - .send(TestInputsForDatasetUpdate.DATASET_UPDATE_REQUEST) - .end((err, res) => { - res.should.have.status(httpStatus.NOT_FOUND); - res.body.should.be.a("object") - res.body.id.should.be.eq(apiId); - res.body.params.status.should.be.eq("FAILED") - res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Dataset transformations do not exist to update") - res.body.error.code.should.be.eq("DATASET_TRANSFORMATIONS_DO_NOT_EXIST") - done(); - }); - }); - - it("Failure: When transformation fields provided to remove do not exists", (done) => { - chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ - id: "telemetry", status: "Draft", type:"dataset", version_key: validVersionKey, tags: ["tag1", "tag2"], denorm_config: { - denorm_fields: [{ - "denorm_key": "actor.id", - "denorm_out_field": "mid" - }] - } - }) - }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve([{ field_key: "key7" }, { field_key: "key3" }]) - }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "rollback", () => { - return Promise.resolve({}) - }) - chai - .request(app) - .patch("/v2/datasets/update") - .send(TestInputsForDatasetUpdate.DATASET_UPDATE_REQUEST) - .end((err, res) => { - res.should.have.status(httpStatus.NOT_FOUND); - res.body.should.be.a("object") - res.body.id.should.be.eq(apiId); - res.body.params.status.should.be.eq("FAILED") - res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Dataset transformations do not exist to remove") - res.body.error.code.should.be.eq("DATASET_TRANSFORMATIONS_DO_NOT_EXIST") - done(); - }); - }); }) \ No newline at end of file diff --git a/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetUpdate.spec.ts b/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetUpdate.spec.ts index 0ee5b588..c2050f23 100644 --- a/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetUpdate.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetUpdate.spec.ts @@ -1,18 +1,17 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai, { expect } from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import httpStatus from "http-status"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import { DatasetDraft } from "../../../models/DatasetDraft"; import _ from "lodash"; import { TestInputsForDatasetUpdate, msgid, requestStructure, validVersionKey } from "./Fixtures"; import { DatasetTransformationsDraft } from "../../../models/TransformationDraft"; -import { apiId, errorCode, invalidInputErrCode } from "../../../controllers/DatasetUpdate/DatasetUpdate" -import { Dataset } from "../../../models/Dataset"; -import { DatasetTransformations } from "../../../models/Transformation"; +import { apiId, invalidInputErrCode } from "../../../controllers/DatasetUpdate/DatasetUpdate" import { sequelize } from "../../../connections/databaseConnection"; + chai.use(spies); chai.should(); chai.use(chaiHttp); @@ -25,17 +24,12 @@ describe("DATASET UPDATE API", () => { it("Dataset updation success: When minimal request payload provided", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ id: "telemetry", status: "Draft", version_key: validVersionKey, type: "dataset" }) + return Promise.resolve({ id: "telemetry", status: "Draft", version_key: validVersionKey, type: "event", api_version: "v2" }) }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) + chai .request(app) .patch("/v2/datasets/update") @@ -56,50 +50,18 @@ describe("DATASET UPDATE API", () => { it("Dataset updation success: When full request payload provided", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", status: "Draft", type: "dataset", version_key: validVersionKey, tags: ["tag1", "tag2"], denorm_config: { + id: "telemetry", status: "Draft", type: "event", version_key: validVersionKey, tags: ["tag1", "tag2"], denorm_config: { denorm_fields: [{ "denorm_key": "actor.id", "denorm_out_field": "mid" }] - } + }, api_version: "v2" }) }) - chai.spy.on(Dataset, "findOne", () => { - return Promise.resolve({ - "data_schema": { - "$schema": "https://json-schema.org/draft/2020-12/schema", "type": "object", - "properties": { - "eid": { "type": "string" }, - "ets": { "type": "string" } - }, - "additionalProperties": true - }, - }) - }) - chai.spy.on(DatasetTransformations, "findAll", () => { - return Promise.resolve() - }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve([{ field_key: "key2" }, { field_key: "key3" }]) - }) - chai.spy.on(DatasetTransformationsDraft, "bulkCreate", () => { - return Promise.resolve({}) - }) - chai.spy.on(DatasetTransformationsDraft, "update", () => { - return Promise.resolve({}) - }) - chai.spy.on(DatasetTransformationsDraft, "destroy", () => { - return Promise.resolve({}) - }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) + chai .request(app) .patch("/v2/datasets/update") @@ -118,7 +80,7 @@ describe("DATASET UPDATE API", () => { }); it("Dataset updation failure: When no fields with dataset_id is provided in the request payload", (done) => { - + chai .request(app) .patch("/v2/datasets/update") @@ -139,7 +101,7 @@ describe("DATASET UPDATE API", () => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve(null) }) - + chai .request(app) .patch("/v2/datasets/update") @@ -150,7 +112,7 @@ describe("DATASET UPDATE API", () => { res.body.id.should.be.eq(apiId); res.body.params.status.should.be.eq("FAILED") res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Dataset does not exists to update") + res.body.error.message.should.be.eq("Dataset does not exists with id:telemetry") res.body.error.code.should.be.eq("DATASET_NOT_EXISTS") done(); }); @@ -158,9 +120,9 @@ describe("DATASET UPDATE API", () => { it("Dataset updation failure: When dataset to update is outdated", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ id: "telemetry", status: "Draft", version_key: "1813444815918", api_version: "v2" }) + return Promise.resolve({ id: "telemetry", type: "event", status: "Draft", version_key: "1813444815918", api_version: "v2" }) }) - + chai .request(app) .patch("/v2/datasets/update") @@ -177,11 +139,32 @@ describe("DATASET UPDATE API", () => { }); }); + it("Dataset updation failure: When dataset to update is of api_version v1", (done) => { + chai.spy.on(DatasetDraft, "findOne", () => { + return Promise.resolve({ id: "telemetry", type: "event", status: "Draft", version_key: "1813444815918", api_version: "v1" }) + }) + + chai + .request(app) + .patch("/v2/datasets/update") + .send({ ...requestStructure, request: { dataset_id: "telemetry", version_key: validVersionKey, name: "telemetry" } }) + .end((err, res) => { + res.should.have.status(httpStatus.NOT_FOUND); + res.body.should.be.a("object") + res.body.id.should.be.eq(apiId); + res.body.params.status.should.be.eq("FAILED") + res.body.params.msgid.should.be.eq(msgid) + res.body.error.message.should.be.eq("Draft dataset api version is not v2. Perform a read api call with mode=edit to migrate the dataset") + res.body.error.code.should.be.eq("DATASET_API_VERSION_MISMATCH") + done(); + }); + }); + it("Dataset updation failure: Dataset to update is not in draft state", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ status: "Live" }) + return Promise.resolve({ id: "telemetry", type: "event", status: "Live", version_key: "1713444815918", api_version: "v2" }) }) - + chai .request(app) .patch("/v2/datasets/update") @@ -212,8 +195,7 @@ describe("DATASET UPDATE API", () => { res.body.id.should.be.eq(apiId); res.body.params.status.should.be.eq("FAILED") res.body.params.msgid.should.be.eq(msgid) - res.body.error.message.should.be.eq("Failed to update dataset") - res.body.error.code.should.be.eq(errorCode) + res.body.error.code.should.be.eq("INTERNAL_SERVER_ERROR") done(); }); }); @@ -226,17 +208,11 @@ describe("DATASET UPDATE API", () => { it("Success: Dataset name updated successfully", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { - return Promise.resolve({ id: "telemetry", status: "Draft", version_key: validVersionKey, type: "dataset" }) + return Promise.resolve({ id: "telemetry", status: "Draft", version_key: validVersionKey, type: "event", api_version: "v2" }) }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) chai .request(app) .patch("/v2/datasets/update") @@ -255,7 +231,7 @@ describe("DATASET UPDATE API", () => { }); it("Failure: Failed to update the dataset name", (done) => { - + chai .request(app) .patch("/v2/datasets/update") @@ -282,21 +258,12 @@ describe("DATASET UPDATE API", () => { it("Success: Dataset data schema updated successfully", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", status: "Draft", version_key: validVersionKey, type: "dataset" + id: "telemetry", status: "Draft", version_key: validVersionKey, type: "event", api_version: "v2" }) }) - chai.spy.on(DatasetTransformationsDraft, "findAll", () => { - return Promise.resolve([{ field_key: "key2" }, { field_key: "key3" }]) - }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) chai .request(app) .patch("/v2/datasets/update") @@ -341,18 +308,12 @@ describe("DATASET UPDATE API", () => { it("Success: Dataset config updated successfully", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", status: "Draft", version_key: validVersionKey, type: "dataset" + id: "telemetry", status: "Draft", version_key: validVersionKey, type: "event", api_version: "v2" }) }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) chai .request(app) .patch("/v2/datasets/update") diff --git a/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetValidation.spec.ts b/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetValidation.spec.ts index 04667f3e..d5feed17 100644 --- a/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetValidation.spec.ts +++ b/api-service/src/tests/DatasetManagement/DatasetUpdate/DatasetValidation.spec.ts @@ -1,9 +1,9 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai, { expect } from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import httpStatus from "http-status"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import { DatasetDraft } from "../../../models/DatasetDraft"; import _ from "lodash"; import { TestInputsForDatasetUpdate, msgid, requestStructure, validVersionKey } from "./Fixtures"; @@ -23,18 +23,13 @@ describe("DATASET VALIDATION CONFIG UPDATE", () => { it("Success: Dataset validation configs updated when validation is true", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", status: "Draft", version_key: validVersionKey, type:"dataset" + id: "telemetry", status: "Draft", version_key: validVersionKey, type:"event", api_version: "v2" }) }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) + chai .request(app) .patch("/v2/datasets/update") @@ -55,22 +50,17 @@ describe("DATASET VALIDATION CONFIG UPDATE", () => { it("Success: Dataset validation configs updated with default values when validation is false", (done) => { chai.spy.on(DatasetDraft, "findOne", () => { return Promise.resolve({ - id: "telemetry", status: "Draft", version_key: validVersionKey, type:"dataset" + id: "telemetry", status: "Draft", version_key: validVersionKey, type:"event", api_version: "v2" }) }) chai.spy.on(DatasetDraft, "update", () => { return Promise.resolve({ dataValues: { id: "telemetry", message: "Dataset is updated successfully" } }) }) - const t = chai.spy.on(sequelize, "transaction", () => { - return Promise.resolve(sequelize.transaction) - }) - chai.spy.on(t, "commit", () => { - return Promise.resolve({}) - }) + chai .request(app) .patch("/v2/datasets/update") - .send({ ...requestStructure, request: { dataset_id: "telemetry", version_key: validVersionKey, validation_config: { "validate": false } } }) + .send({ ...requestStructure, request: { dataset_id: "telemetry", version_key: validVersionKey, validation_config: { "validate": false, "mode": "Strict" } } }) .end((err, res) => { res.should.have.status(httpStatus.OK); res.body.should.be.a("object") diff --git a/api-service/src/tests/DatasetManagement/DatasetUpdate/Fixtures.ts b/api-service/src/tests/DatasetManagement/DatasetUpdate/Fixtures.ts index 4dd1455e..ae9628a4 100644 --- a/api-service/src/tests/DatasetManagement/DatasetUpdate/Fixtures.ts +++ b/api-service/src/tests/DatasetManagement/DatasetUpdate/Fixtures.ts @@ -15,7 +15,8 @@ export const TestInputsForDatasetUpdate = { ...requestStructure, request: { "dataset_id": "telemetry", "version_key": validVersionKey, - "name": "telemetry" + "name": "telemetry", + "sample_data":{"events":{}} } }, @@ -25,11 +26,8 @@ export const TestInputsForDatasetUpdate = { "version_key": validVersionKey, "tags": [ { - "values": [ - "tag1", - "tag2" - ], - "action": "add" + "value": "tag1", + "action": "upsert" }] } }, @@ -40,10 +38,7 @@ export const TestInputsForDatasetUpdate = { "version_key": validVersionKey, "tags": [ { - "values": [ - "tag1", - "tag2" - ], + "value": "tag1", "action": "remove" }] } @@ -56,11 +51,12 @@ export const TestInputsForDatasetUpdate = { "denorm_config": { "denorm_fields": [ { - "values": { + "value": { "denorm_key": "actor.id", - "denorm_out_field": "userdata" + "denorm_out_field": "userdata", + "dataset_id": "master" }, - "action": "add" + "action": "upsert" } ] } @@ -74,7 +70,7 @@ export const TestInputsForDatasetUpdate = { "denorm_config": { "denorm_fields": [ { - "values": { + "value": { "denorm_key": "actor.id", "denorm_out_field": "userdata" }, @@ -89,16 +85,23 @@ export const TestInputsForDatasetUpdate = { ...requestStructure, request: { "dataset_id": "telemetry", "version_key": validVersionKey, - "transformation_config": [ - { - "values": { - "field_key": "key1", - "transformation_function": {}, - "mode": "Strict", - "metadata": {} - }, - "action": "add" - }] + "transformations_config": [{ "value": { "field_key": "key1", "transformation_function": { "type": "mask", "expr": "eid", "datatype": "string", "category": "pii" }, "mode": "Strict" }, "action": "upsert" }], + } + }, + + DATASET_UPDATE_CONNECTORS_ADD: { + ...requestStructure, request: { + "dataset_id": "telemetry", + "version_key": validVersionKey, + "connectors_config":[{"value":{"id":"6c3fc8c2-357d-489b-b0c9-afdde6e5c6c0","connector_id":"kafka","connector_config":{"type":"kafka","topic":"telemetry.ingest","kafkaBrokers":"kafka-headless.kafka.svc:9092"},"version":"v1"}, "action": "upsert"}], + } + }, + + DATASET_UPDATE_CONNECTORS_REMOVE: { + ...requestStructure, request: { + "dataset_id": "telemetry", + "version_key": validVersionKey, + "connectors_config":[{"value":{"id":"6c3fc8c2-357d-489b-b0c9-afdde6e5c6c0","connector_id":"kafka","connector_config":{"type":"kafka","topic":"telemetry.ingest","kafkaBrokers":"kafka-headless.kafka.svc:9092"},"version":"v1"}, "action": "upsert"}], } }, @@ -198,8 +201,18 @@ export const TestInputsForDatasetUpdate = { "dataset_id": "telemetry", "version_key": validVersionKey, "dataset_config": { - "data_key": "mid", - "timestamp_key": "ets" + "indexing_config": { + "olap_store_enabled": false, + "lakehouse_enabled": true, + "cache_enabled": false + }, + "keys_config": { + "timestamp_key": "ets", + "data_key": "ets" + }, + "file_upload_path": [ + "telemetry.json" + ] } } }, @@ -208,16 +221,7 @@ export const TestInputsForDatasetUpdate = { ...requestStructure, request: { "dataset_id": "telemetry", "version_key": validVersionKey, - "transformation_config": [ - { - "values": { - "field_key": "key1", - "transformation_function": {}, - "mode": "Strict", - "metadata": {} - }, - "action": "remove" - }] + "transformations_config": [{ "value": { "field_key": "key1", "transformation_function": { "type": "mask", "expr": "eid", "datatype": "string", "category": "pii" }, "mode": "Strict" }, "action": "upsert" }], } }, @@ -282,69 +286,46 @@ export const TestInputsForDatasetUpdate = { "denorm_config": { "denorm_fields": [ { - "values": { + "value": { "denorm_key": "actor.id", - "denorm_out_field": "userdata" + "denorm_out_field": "userdata", + "dataset_id": "master" }, - "action": "add" + "action": "upsert" }, { - "values": { + "value": { "denorm_key": "actor.id", - "denorm_out_field": "mid" + "denorm_out_field": "mid", + "dataset_id": "master" }, "action": "remove" } ] }, - "transformation_config": [ - { - "values": { - "field_key": "key1", - "transformation_function": {}, - "mode": "Strict", - "metadata": {} - }, - "action": "add" + "transformations_config": [{ "value": { "field_key": "key1", "transformation_function": { "type": "mask", "expr": "eid", "datatype": "string", "category": "pii" }, "mode": "Strict" }, "action": "upsert" }, { "value": { "field_key": "key2", "transformation_function": { "type": "mask", "expr": "eid", "datatype": "string", "category": "pii" }, "mode": "Strict" }, "action": "remove" }], + "dataset_config": { + "indexing_config": { + "olap_store_enabled": false, + "lakehouse_enabled": true, + "cache_enabled": false }, - { - "values": { - "field_key": "key2", - "transformation_function": {}, - "mode": "Strict", - "metadata": {} - }, - "action": "remove" + "keys_config": { + "timestamp_key": "ets", + "data_key": "ets" }, - { - "values": { - "field_key": "key3", - "transformation_function": {}, - "mode": "Strict", - "metadata": {} - }, - "action": "update" - } - ], - "dataset_config": { - "data_key": "mid", - "timestamp_key": "ets", - "file_upload_path": ["/config/file.json"] + "file_upload_path": [ + "telemetry.json" + ] }, "tags": [ { - "values": [ - "tag1", - "tag2" - ], + "value": "tag1", "action": "remove" }, { - "values": [ - "tag3", - "tag4" - ], - "action": "add" + "value": "tag3", + "action": "upsert" } ] } @@ -361,44 +342,20 @@ export const TestInputsForDatasetUpdate = { "denorm_key": "actor.id", "denorm_out_field": "userdata" }, - "action": "add" + "action": "upsert" }, { "values": { "denorm_key": "actor.id", "denorm_out_field": "userdata" }, - "action": "add" + "action": "upsert" } ] } } }, - DATASET_UPDATE_WITH_SAME_TAGS_ADD: { - ...requestStructure, request: { - "dataset_id": "telemetry", - "version_key": validVersionKey, - "name": "sb-telemetry", - "tags": [ - { - "values": [ - "tag1", - "tag1" - ], - "action": "remove" - }, - { - "values": [ - "tag4", - "tag4" - ], - "action": "add" - } - ] - } - }, - DATASET_UPDATE_WITH_SAME_DENORM_REMOVE: { ...requestStructure, request: { "dataset_id": "telemetry", @@ -407,16 +364,18 @@ export const TestInputsForDatasetUpdate = { "denorm_config": { "denorm_fields": [ { - "values": { + "value": { "denorm_key": "actor.id", - "denorm_out_field": "mid" + "denorm_out_field": "mid", + "dataset_id": "master" }, "action": "remove" }, { - "values": { + "value": { "denorm_key": "actor.id", - "denorm_out_field": "mid" + "denorm_out_field": "mid", + "dataset_id": "master" }, "action": "remove" } @@ -425,67 +384,32 @@ export const TestInputsForDatasetUpdate = { } }, + DATASET_UPDATE_WITH_EXISTING_DENORM: { + ...requestStructure, request: { + "dataset_id": "telemetry", + "version_key": validVersionKey, + "name": "sb-telemetry", + "denorm_config": { + "denorm_fields": [ + { + "value": { + "denorm_key": "actor.id", + "denorm_out_field": "mid", + "dataset_id": "master" + }, + "action": "upsert" + } + ] + } + } + }, + DATASET_UPDATE_WITH_SAME_TRANSFORMATION_ADD_REMOVE: { ...requestStructure, request: { "dataset_id": "telemetry", "version_key": validVersionKey, "name": "sb-telemetry", - "transformation_config": [ - { - "values": { - "field_key": "key1", - "transformation_function": {}, - "mode": "Strict", - "metadata": {} - }, - "action": "add" - }, - { - "values": { - "field_key": "key1", - "transformation_function": {}, - "mode": "Strict", - "metadata": {} - }, - "action": "add" - }, - { - "values": { - "field_key": "key2", - "transformation_function": {}, - "mode": "Strict", - "metadata": {} - }, - "action": "remove" - }, - { - "values": { - "field_key": "key2", - "transformation_function": {}, - "mode": "Strict", - "metadata": {} - }, - "action": "remove" - }, - { - "values": { - "field_key": "key3", - "transformation_function": {}, - "mode": "Strict", - "metadata": {} - }, - "action": "update" - }, - { - "values": { - "field_key": "key3", - "transformation_function": {}, - "mode": "Strict", - "metadata": {} - }, - "action": "update" - } - ] + "transformations_config": [{ "value": { "field_key": "key1", "transformation_function": { "type": "mask", "expr": "eid", "datatype": "string", "category": "pii" }, "mode": "Strict" }, "action": "upsert" }, { "value": { "field_key": "key1", "transformation_function": { "type": "mask", "expr": "eid", "datatype": "string", "category": "pii" }, "mode": "Strict" }, "action": "upsert" }] } } } diff --git a/api-service/src/tests/DatasetManagement/GenerateSignedURL/Fixtures.ts b/api-service/src/tests/DatasetManagement/GenerateSignedURL/Fixtures.ts index ee904424..06dccdaf 100644 --- a/api-service/src/tests/DatasetManagement/GenerateSignedURL/Fixtures.ts +++ b/api-service/src/tests/DatasetManagement/GenerateSignedURL/Fixtures.ts @@ -55,21 +55,21 @@ export const TestInputsForGenerateURL = { VALID_RESPONSE_FOR_MULTIFILES: [ { "filePath": `container/api-service/user-upload/telemetry.json`, - "fileName": 'telemetry.json', - "preSignedUrl": 'https://obsrv-data.s3.ap-south-1.amazonaws.com/container/api-service/user-upload/telemetry.json?X-Amz-Algorithm=AWS4-HMAC' + "fileName": "telemetry.json", + "preSignedUrl": "https://obsrv-data.s3.ap-south-1.amazonaws.com/container/api-service/user-upload/telemetry.json?X-Amz-Algorithm=AWS4-HMAC" }, { - "filePath": 'container/api-service/user-upload/school-data.json', - "fileName": 'school-data.json', - "preSignedUrl": 'https://obsrv-data.s3.ap-south-1.amazonaws.com/container/api-service/user-upload/school-data.json?X-Amz-Algorithm=AWS4-HMAC' + "filePath": "container/api-service/user-upload/school-data.json", + "fileName": "school-data.json", + "preSignedUrl": "https://obsrv-data.s3.ap-south-1.amazonaws.com/container/api-service/user-upload/school-data.json?X-Amz-Algorithm=AWS4-HMAC" } ], VALID_RESPONSE_FOR_SINGLE_FILE: [ { - "filePath": 'container/api-service/user-upload/telemetry.json', - "fileName": 'telemetry.json', - "preSignedUrl": 'https://obsrv-data.s3.ap-south-1.amazonaws.com/container/api-service/user-upload/telemetry.json?X-Amz-Algorithm=AWS4-HMAC' + "filePath": "container/api-service/user-upload/telemetry.json", + "fileName": "telemetry.json", + "preSignedUrl": "https://obsrv-data.s3.ap-south-1.amazonaws.com/container/api-service/user-upload/telemetry.json?X-Amz-Algorithm=AWS4-HMAC" } ] } \ No newline at end of file diff --git a/api-service/src/tests/DatasetManagement/GenerateSignedURL/GenerateSignedURL.spec.ts b/api-service/src/tests/DatasetManagement/GenerateSignedURL/GenerateSignedURL.spec.ts index 8d30ddb6..597b3b7c 100644 --- a/api-service/src/tests/DatasetManagement/GenerateSignedURL/GenerateSignedURL.spec.ts +++ b/api-service/src/tests/DatasetManagement/GenerateSignedURL/GenerateSignedURL.spec.ts @@ -1,9 +1,9 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai, { expect } from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; import httpStatus from "http-status"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import _ from "lodash"; import { apiId, code } from "../../../controllers/GenerateSignedURL/GenerateSignedURL"; import { TestInputsForGenerateURL } from "./Fixtures"; diff --git a/api-service/src/tests/QueryTemplates/CreateTemplate/CreateTemplate.spec.ts b/api-service/src/tests/QueryTemplates/CreateTemplate/CreateTemplate.spec.ts index b7f60e27..4bedf75f 100644 --- a/api-service/src/tests/QueryTemplates/CreateTemplate/CreateTemplate.spec.ts +++ b/api-service/src/tests/QueryTemplates/CreateTemplate/CreateTemplate.spec.ts @@ -1,11 +1,11 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import { createTemplateFixtures } from "./Fixtures" import { QueryTemplate } from "../../../models/QueryTemplate"; -const apiId = 'api.query.template.create' +const apiId = "api.query.template.create" const msgid = "4a7f14c3-d61e-4d4f-be78-181834eeff6d"; chai.use(spies); chai.should(); diff --git a/api-service/src/tests/QueryTemplates/DeleteTemplate/DeleteTemplate.spec.ts b/api-service/src/tests/QueryTemplates/DeleteTemplate/DeleteTemplate.spec.ts index 4108415b..47377590 100644 --- a/api-service/src/tests/QueryTemplates/DeleteTemplate/DeleteTemplate.spec.ts +++ b/api-service/src/tests/QueryTemplates/DeleteTemplate/DeleteTemplate.spec.ts @@ -1,10 +1,10 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import { QueryTemplate } from "../../../models/QueryTemplate"; -const apiId = 'api.query.template.delete' +const apiId = "api.query.template.delete" chai.use(spies); chai.should(); @@ -20,7 +20,7 @@ describe("DELETE QUERY TEMPLATE API", () => { chai.spy.on(QueryTemplate, "destroy", () => { return Promise.resolve({ dataValues: { - template_id: 'sql1' + template_id: "sql1" } }) }) diff --git a/api-service/src/tests/QueryTemplates/ListTemplates/ListTemplates.spec.ts b/api-service/src/tests/QueryTemplates/ListTemplates/ListTemplates.spec.ts index e9f843d1..16800dcd 100644 --- a/api-service/src/tests/QueryTemplates/ListTemplates/ListTemplates.spec.ts +++ b/api-service/src/tests/QueryTemplates/ListTemplates/ListTemplates.spec.ts @@ -1,11 +1,11 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import { QueryTemplate } from "../../../models/QueryTemplate"; import { listTemplateFixtures } from "./Fixtures"; -const apiId = 'api.query.template.list' +const apiId = "api.query.template.list" const msgid = "4a7f14c3-d61e-4d4f-be78-181834eeff6d"; chai.use(spies); chai.should(); diff --git a/api-service/src/tests/QueryTemplates/ReadTemplate/ReadTemplate.spec.ts b/api-service/src/tests/QueryTemplates/ReadTemplate/ReadTemplate.spec.ts index 1f6e129e..6a7078f2 100644 --- a/api-service/src/tests/QueryTemplates/ReadTemplate/ReadTemplate.spec.ts +++ b/api-service/src/tests/QueryTemplates/ReadTemplate/ReadTemplate.spec.ts @@ -1,10 +1,10 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import { QueryTemplate } from "../../../models/QueryTemplate"; -const apiId = 'api.query.template.read' +const apiId = "api.query.template.read" chai.use(spies); chai.should(); @@ -20,12 +20,12 @@ describe("READ QUERY TEMPLATE API", () => { chai.spy.on(QueryTemplate, "findOne", () => { return Promise.resolve({ dataValues: { - template_id: 'sql1', - template_name: 'sql1', - query: '"SELECT * FROM {{DATASET}} WHERE __time BETWEEN TIMESTAMP {{STARTDATE}} AND TIMESTAMP {{ENDDATE}} LIMIT 1"', - query_type: 'sql', - created_by: 'SYSTEM', - updated_by: 'SYSTEM', + template_id: "sql1", + template_name: "sql1", + query: "\"SELECT * FROM {{DATASET}} WHERE __time BETWEEN TIMESTAMP {{STARTDATE}} AND TIMESTAMP {{ENDDATE}} LIMIT 1\"", + query_type: "sql", + created_by: "SYSTEM", + updated_by: "SYSTEM", created_date: "2024-04-29T11:29:58.759Z", updated_date: "2024-04-29T11:29:58.759Z" } diff --git a/api-service/src/tests/QueryTemplates/TemplateQuerying/TemplateQuerying.spec.ts b/api-service/src/tests/QueryTemplates/TemplateQuerying/TemplateQuerying.spec.ts index ff384181..8da1818b 100644 --- a/api-service/src/tests/QueryTemplates/TemplateQuerying/TemplateQuerying.spec.ts +++ b/api-service/src/tests/QueryTemplates/TemplateQuerying/TemplateQuerying.spec.ts @@ -1,14 +1,14 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import { QueryTemplate } from "../../../models/QueryTemplate"; import { Datasource } from "../../../models/Datasource"; import nock from "nock"; import { config } from "../../../configs/Config"; import { templateQueryApiFixtures } from "./Fixtures"; -const apiId = 'api.query.template.query'; +const apiId = "api.query.template.query"; const msgid = "4a7f14c3-d61e-4d4f-be78-181834eeff6d" chai.use(spies); @@ -34,12 +34,12 @@ describe("QUERY TEMPLATE API", () => { chai.spy.on(QueryTemplate, "findOne", () => { return Promise.resolve({ dataValues: { - template_id: 'sql1', - template_name: 'sql1', - query: '"SELECT * FROM {{DATASET}} WHERE \"__time\" BETWEEN TIMESTAMP {{STARTDATE}} AND TIMESTAMP {{ENDDATE}}"', - query_type: 'sql', - created_by: 'SYSTEM', - updated_by: 'SYSTEM', + template_id: "sql1", + template_name: "sql1", + query: "\"SELECT * FROM {{DATASET}} WHERE \"__time\" BETWEEN TIMESTAMP {{STARTDATE}} AND TIMESTAMP {{ENDDATE}}\"", + query_type: "sql", + created_by: "SYSTEM", + updated_by: "SYSTEM", created_date: "2024-04 - 30T05: 57:04.387Z", updated_date: "2024-04 - 30T05: 57:04.387Z" } @@ -77,12 +77,12 @@ describe("QUERY TEMPLATE API", () => { chai.spy.on(QueryTemplate, "findOne", () => { return Promise.resolve({ dataValues: { - template_id: 'jsontemplate1', - template_name: 'jsontemplate1', - query: '{"queryType":"timeseries","datasetId":"{{DATASET}}","intervals":"{{STARTDATE}}/{{ENDDATE}}","limit":"{{LIMIT}}","aggregations":[{"type":"filtered","aggregator":{"type":"count","name":"a0"},"filter":{"type":"not","field":{"type":"null","column":"school_id"}},"name":"school_id"}]}', - query_type: 'json', - created_by: 'SYSTEM', - updated_by: 'SYSTEM', + template_id: "jsontemplate1", + template_name: "jsontemplate1", + query: "{\"queryType\":\"timeseries\",\"datasetId\":\"{{DATASET}}\",\"intervals\":\"{{STARTDATE}}/{{ENDDATE}}\",\"limit\":\"{{LIMIT}}\",\"aggregations\":[{\"type\":\"filtered\",\"aggregator\":{\"type\":\"count\",\"name\":\"a0\"},\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"null\",\"column\":\"school_id\"}},\"name\":\"school_id\"}]}", + query_type: "json", + created_by: "SYSTEM", + updated_by: "SYSTEM", created_date: "2024-04-28T23:28:35.868Z", updated_date: "2024-04-28T23:28:35.868Z" } @@ -121,12 +121,12 @@ describe("QUERY TEMPLATE API", () => { chai.spy.on(QueryTemplate, "findOne", () => { return Promise.resolve({ dataValues: { - template_id: 'jsontemplate1', - template_name: 'jsontemplate1', - query: '{"queryType":"timeseries","datasetId"::::"{{DATASET}}","intervals":"{{STARTDATE}}/{{ENDDATE}}","limit":"{{LIMIT}}","aggregations":[{"type":"filtered","aggregator":{"type":"count","name":"a0"},"filter":{"type":"not","field":{"type":"null","column":"school_id"}},"name":"school_id"}]}', - query_type: 'json', - created_by: 'SYSTEM', - updated_by: 'SYSTEM', + template_id: "jsontemplate1", + template_name: "jsontemplate1", + query: "{\"queryType\":\"timeseries\",\"datasetId\"::::\"{{DATASET}}\",\"intervals\":\"{{STARTDATE}}/{{ENDDATE}}\",\"limit\":\"{{LIMIT}}\",\"aggregations\":[{\"type\":\"filtered\",\"aggregator\":{\"type\":\"count\",\"name\":\"a0\"},\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"null\",\"column\":\"school_id\"}},\"name\":\"school_id\"}]}", + query_type: "json", + created_by: "SYSTEM", + updated_by: "SYSTEM", created_date: "2024-04-28T23:28:35.868Z", updated_date: "2024-04-28T23:28:35.868Z" } diff --git a/api-service/src/tests/QueryTemplates/UpdateTemplate/UpdateTemplate.spec.ts b/api-service/src/tests/QueryTemplates/UpdateTemplate/UpdateTemplate.spec.ts index 195018c3..af50d77a 100644 --- a/api-service/src/tests/QueryTemplates/UpdateTemplate/UpdateTemplate.spec.ts +++ b/api-service/src/tests/QueryTemplates/UpdateTemplate/UpdateTemplate.spec.ts @@ -1,11 +1,11 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import { updateTemplateFixtures } from "./Fixtures" import { QueryTemplate } from "../../../models/QueryTemplate"; -const apiId = 'api.query.template.update' +const apiId = "api.query.template.update" const msgid = "4a7f14c3-d61e-4d4f-be78-181834eeff6d"; chai.use(spies); chai.should(); diff --git a/api-service/src/tests/QueryWrapper/SqlWrapper/SqlWrapper.spec.ts b/api-service/src/tests/QueryWrapper/SqlWrapper/SqlWrapper.spec.ts index bd1e1a31..510b8af4 100644 --- a/api-service/src/tests/QueryWrapper/SqlWrapper/SqlWrapper.spec.ts +++ b/api-service/src/tests/QueryWrapper/SqlWrapper/SqlWrapper.spec.ts @@ -1,8 +1,8 @@ -import app from "../../../../app"; +import app from "../../../app"; import chai from "chai"; import chaiHttp from "chai-http"; import spies from "chai-spies"; -import { describe, it } from 'mocha'; +import { describe, it } from "mocha"; import _ from "lodash"; import { TestInputsForSqlWrapper } from "./Fixtures"; import httpStatus from "http-status"; diff --git a/api-service/src/types/ConfigModels.ts b/api-service/src/types/ConfigModels.ts index 320cbb32..a03ec5f4 100644 --- a/api-service/src/types/ConfigModels.ts +++ b/api-service/src/types/ConfigModels.ts @@ -1,5 +1,5 @@ import { IngestionConfig } from "./IngestionModels"; -import { IDataSourceRules, IRules } from "./QueryModels"; +import { IDataSourceRules } from "./QueryModels"; export interface ExtractionConfig { is_batch_event: boolean; diff --git a/command-service/Dockerfile b/command-service/Dockerfile index 2aa9a1b5..2ca4edef 100644 --- a/command-service/Dockerfile +++ b/command-service/Dockerfile @@ -1,7 +1,7 @@ FROM --platform=linux/amd64 python:3.12-alpine COPY --from=ubuntu /usr/local/bin /usr/local/bin -RUN apk update && apk add curl jq && curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl" && chmod +x kubectl && mv kubectl /usr/local/bin/ +RUN apk update && apk add curl jq vim && curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl" && chmod +x kubectl && mv kubectl /usr/local/bin/ RUN cd /tmp && curl -OL https://get.helm.sh/helm-v3.13.2-linux-amd64.tar.gz \ && tar -zxvf helm-v3.13.2-linux-amd64.tar.gz \ diff --git a/command-service/helm-charts/flink/Chart.lock b/command-service/helm-charts/flink-connector/Chart.lock similarity index 100% rename from command-service/helm-charts/flink/Chart.lock rename to command-service/helm-charts/flink-connector/Chart.lock diff --git a/command-service/helm-charts/flink/Chart.yaml b/command-service/helm-charts/flink-connector/Chart.yaml similarity index 100% rename from command-service/helm-charts/flink/Chart.yaml rename to command-service/helm-charts/flink-connector/Chart.yaml diff --git a/command-service/helm-charts/flink/charts/.helmignore b/command-service/helm-charts/flink-connector/charts/.helmignore similarity index 100% rename from command-service/helm-charts/flink/charts/.helmignore rename to command-service/helm-charts/flink-connector/charts/.helmignore diff --git a/command-service/helm-charts/flink/charts/common/Chart.yaml b/command-service/helm-charts/flink-connector/charts/common/Chart.yaml similarity index 100% rename from command-service/helm-charts/flink/charts/common/Chart.yaml rename to command-service/helm-charts/flink-connector/charts/common/Chart.yaml diff --git a/command-service/helm-charts/flink/charts/common/templates/_affinities.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/_affinities.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/_affinities.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/_affinities.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/_capabilities.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/_capabilities.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/_capabilities.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/_capabilities.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/_configs.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/_configs.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/_configs.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/_configs.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/_errors.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/_errors.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/_errors.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/_errors.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/_images.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/_images.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/_images.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/_images.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/_ingress.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/_ingress.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/_ingress.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/_ingress.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/_labels.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/_labels.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/_labels.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/_labels.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/_names.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/_names.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/_names.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/_names.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/_secrets.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/_secrets.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/_secrets.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/_secrets.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/_storage.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/_storage.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/_storage.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/_storage.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/_tplvalues.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/_tplvalues.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/_tplvalues.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/_tplvalues.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/_utils.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/_utils.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/_utils.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/_utils.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/_variables.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/_variables.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/_variables.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/_variables.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/_warnings.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/_warnings.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/_warnings.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/_warnings.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/validations/_cassandra.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/validations/_cassandra.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/validations/_cassandra.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/validations/_cassandra.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/validations/_mariadb.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/validations/_mariadb.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/validations/_mariadb.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/validations/_mariadb.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/validations/_mongodb.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/validations/_mongodb.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/validations/_mongodb.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/validations/_mongodb.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/validations/_mysql.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/validations/_mysql.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/validations/_mysql.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/validations/_mysql.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/validations/_postgresql.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/validations/_postgresql.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/validations/_postgresql.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/validations/_postgresql.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/validations/_redis.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/validations/_redis.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/validations/_redis.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/validations/_redis.tpl diff --git a/command-service/helm-charts/flink/charts/common/templates/validations/_validations.tpl b/command-service/helm-charts/flink-connector/charts/common/templates/validations/_validations.tpl similarity index 100% rename from command-service/helm-charts/flink/charts/common/templates/validations/_validations.tpl rename to command-service/helm-charts/flink-connector/charts/common/templates/validations/_validations.tpl diff --git a/command-service/helm-charts/flink/charts/common/values.yaml b/command-service/helm-charts/flink-connector/charts/common/values.yaml similarity index 100% rename from command-service/helm-charts/flink/charts/common/values.yaml rename to command-service/helm-charts/flink-connector/charts/common/values.yaml diff --git a/command-service/helm-charts/flink/templates/NOTES.txt b/command-service/helm-charts/flink-connector/templates/NOTES.txt similarity index 100% rename from command-service/helm-charts/flink/templates/NOTES.txt rename to command-service/helm-charts/flink-connector/templates/NOTES.txt diff --git a/command-service/helm-charts/flink/templates/_base_serviceAccount.tpl b/command-service/helm-charts/flink-connector/templates/_base_serviceAccount.tpl similarity index 100% rename from command-service/helm-charts/flink/templates/_base_serviceAccount.tpl rename to command-service/helm-charts/flink-connector/templates/_base_serviceAccount.tpl diff --git a/command-service/helm-charts/flink/templates/_helpers.tpl b/command-service/helm-charts/flink-connector/templates/_helpers.tpl similarity index 100% rename from command-service/helm-charts/flink/templates/_helpers.tpl rename to command-service/helm-charts/flink-connector/templates/_helpers.tpl diff --git a/command-service/helm-charts/flink/templates/_image_flink.tpl b/command-service/helm-charts/flink-connector/templates/_image_flink.tpl similarity index 57% rename from command-service/helm-charts/flink/templates/_image_flink.tpl rename to command-service/helm-charts/flink-connector/templates/_image_flink.tpl index d0d7c2da..5c542f64 100644 --- a/command-service/helm-charts/flink/templates/_image_flink.tpl +++ b/command-service/helm-charts/flink-connector/templates/_image_flink.tpl @@ -3,12 +3,13 @@ {{- $context := .context }} {{- $scope := .scope }} {{- with $scope }} -{{- $registry := default $context.Values.global.image.registry .registry }} -{{- $image := printf "%s/%s" $registry .repository}} +{{- $registry := default $context.Values.registry .registry }} +{{- $repository := default $context.Values.repository .repository }} +{{- $image := printf "%s/%s" $registry $repository}} {{- if .digest }} {{- printf "%s@%s" $image .digest }} {{- else }} -{{- $tag := default "latest" .tag }} +{{- $tag := default $context.Values.tag .tag }} {{- printf "%s:%s" $image $tag }} {{- end }} {{- end }} diff --git a/command-service/helm-charts/flink/templates/_namespace.tpl b/command-service/helm-charts/flink-connector/templates/_namespace.tpl similarity index 100% rename from command-service/helm-charts/flink/templates/_namespace.tpl rename to command-service/helm-charts/flink-connector/templates/_namespace.tpl diff --git a/command-service/helm-charts/flink/templates/configmap.yaml b/command-service/helm-charts/flink-connector/templates/configmap.yaml similarity index 100% rename from command-service/helm-charts/flink/templates/configmap.yaml rename to command-service/helm-charts/flink-connector/templates/configmap.yaml diff --git a/command-service/helm-charts/flink/templates/deployment.yaml b/command-service/helm-charts/flink-connector/templates/deployment.yaml similarity index 76% rename from command-service/helm-charts/flink/templates/deployment.yaml rename to command-service/helm-charts/flink-connector/templates/deployment.yaml index 1cd613b3..3da5d6ce 100644 --- a/command-service/helm-charts/flink/templates/deployment.yaml +++ b/command-service/helm-charts/flink-connector/templates/deployment.yaml @@ -51,37 +51,17 @@ spec: image: {{ include "base.image.flink" (dict "context" $ "scope" $jobData) }} imagePullPolicy: {{ default .Values.imagePullPolicy "Always" }} workingDir: {{ .Values.taskmanager.flink_work_dir }} - # args: ["taskmanager"] - # env: - # - name: FLINK_PROPERTIES - # value: |+ - # jobmanager.rpc.address: {{ $jobName }}-jobmanager - # taskmanager.rpc.port=6122 - # taskmanager.numberOfTaskSlots: 2 - # metrics.reporters: prom - # metrics.reporter.prom.factory.class: org.apache.flink.metrics.prometheus.PrometheusReporterFactory - # metrics.reporter.prom.host: {{ $jobName }}-taskmanager - # metrics.reporter.prom.port: 9251 - command: ["/opt/flink/bin/taskmanager.sh"] - args: ["start-foreground", - {{- if eq .Values.checkpoint_store_type "azure" }} - "-Dfs.azure.account.key.{{ .Values.global.azure_storage_account_name }}.blob.core.windows.net={{ .Values.global.azure_storage_account_key }}", - {{- end }} - {{- if and (eq .Values.checkpoint_store_type "s3") (ne .Values.s3_auth_type "serviceAccount") }} - "-Ds3.access-key={{ .Values.s3_access_key }}", - "-Ds3.secret-key={{ .Values.s3_secret_key }}", - "-Ds3.endpoint={{ .Values.s3_endpoint }}", - "-Ds3.path.style.access={{ .Values.s3_path_style_access }}", - {{- end }} - {{- if eq .Values.checkpoint_store_type "gcs" }} - "-Dgoogle.cloud.auth.service.account.enable=true", - {{- end }} - "-Dweb.submit.enable=false", - "-Dmetrics.reporter.prom.class=org.apache.flink.metrics.prometheus.PrometheusReporter", - "-Dmetrics.reporter.prom.host={{ $jobName }}-taskmanager", - "-Dmetrics.reporter.prom.port=9251-9260", - "-Djobmanager.rpc.address={{ $jobName }}-jobmanager", - "-Dtaskmanager.rpc.port={{ .Values.taskmanager.rpc_port }}"] + args: ["taskmanager"] + env: + - name: FLINK_PROPERTIES + value: |+ + jobmanager.rpc.address: {{ $jobName }}-jobmanager + taskmanager.rpc.port=6122 + taskmanager.numberOfTaskSlots: 2 + metrics.reporters: prom + metrics.reporter.prom.factory.class: org.apache.flink.metrics.prometheus.PrometheusReporterFactory + metrics.reporter.prom.host: {{ $jobName }}-taskmanager + metrics.reporter.prom.port: 9251 ports: - containerPort: {{ .Values.taskmanager.rpc_port }} name: rpc @@ -89,9 +69,12 @@ spec: securityContext: {{- toYaml .Values.securityContext | nindent 12 }} volumeMounts: - - name: flink-config-volume - mountPath: /data/flink/conf/flink-connector.conf - subPath: flink-connector.conf + # - name: flink-config-volume + # mountPath: /data/flink/conf/connectors-scala-config.conf + # subPath: connectors-scala-config.conf + # - name: flink-config-volume + # mountPath: /data/flink/conf/connectors-python-config.conf + # subPath: connectors-python-config.yaml # - name: flink-config-volume # mountPath: /opt/flink/conf/flink-conf.yaml # subPath: flink-conf.yaml @@ -99,12 +82,14 @@ spec: mountPath: /opt/flink/conf/log4j-console.properties subPath: log4j-console.properties volumes: - - name: flink-config-volume - configMap: - name: flink-connector-conf - items: - - key: connectors-scala-config.conf - path: flink-connector.conf + # - name: flink-config-volume + # configMap: + # name: flink-connector-conf + # items: + # - key: connectors-scala-config.conf + # path: connectors-scala-config.conf + # - key: connectors-python-config.yaml + # path: connectors-python-config.yaml - name: flink-common-volume configMap: name: {{ $jobName }}-config @@ -140,7 +125,7 @@ spec: app.kubernetes.io/component: {{ printf "%s-%s" $jobName $component }} component: {{ printf "%s-%s" $jobName $component }} annotations: - checksum/config: {{ .Files.Glob "configs/*" | toYaml | sha256sum }} + checksum/config: {{ .Files.Glob "cFonfigs/*" | toYaml | sha256sum }} checksum/job-config: {{ $jobData | toYaml | sha256sum }} spec: serviceAccountName: {{ include "base.serviceaccountname" . }} @@ -171,21 +156,26 @@ spec: metrics.reporter.prom.factory.class: org.apache.flink.metrics.prometheus.PrometheusReporterFactory metrics.reporter.prom.host: {{ $jobName }}-jobmanager metrics.reporter.prom.port: 9250 - volumeMounts: - name: flink-config-volume - mountPath: /data/flink/conf/flink-connector.conf - subPath: flink-connector.conf + mountPath: /data/flink/conf/connectors-scala-config.conf + subPath: connectors-scala-config.conf + - name: flink-config-volume + mountPath: /data/flink/conf/connectors-python-config.conf + subPath: connectors-python-config.conf - name: data mountPath: /flink/connectors - name: flink-common-volume mountPath: /opt/flink/conf/log4j-console.properties subPath: log4j-console.properties - + - name: {{ $jobName }}-job-submit image: {{ include "base.image.flink" (dict "context" $ "scope" $jobData) }} imagePullPolicy: {{ default .Values.imagePullPolicy "Always" }} workingDir: /opt/flink + env: + - name: CONNECTOR_ID + value: {{ index $jobData "connector_id" }} command: - /bin/bash - -c @@ -194,8 +184,8 @@ spec: sleep 30s; /opt/flink/bin/flink run -m \ {{ $jobName }}-jobmanager.{{ include "base.namespace" . }}.svc.cluster.local:8081 \ - /flink/connectors/{{ $jobName }}-1.0.0/{{ $jobName }}-1.0.0.jar \ - --config.file.path /data/flink/conf/flink-connector.conf \ + /flink/connectors/{{ index $jobData "source" }}/{{ index $jobData "main_program" }} \ + --config.file.path /data/flink/conf/connectors-scala-config.conf \ {{- if eq .Values.checkpoint_store_type "azure" }} "-Dfs.azure.account.key.{{ .Values.global.azure_storage_account_name }}.blob.core.windows.net={{ .Values.global.azure_storage_account_key }}" \ {{- end }} @@ -219,8 +209,11 @@ spec: {{- toYaml .Values.securityContext | nindent 12 }} volumeMounts: - name: flink-config-volume - mountPath: /data/flink/conf/flink-connector.conf - subPath: flink-connector.conf + mountPath: /data/flink/conf/connectors-scala-config.conf + subPath: connectors-scala-config.conf + - name: flink-config-volume + mountPath: /data/flink/conf/connectors-python-config.yaml + subPath: connectors-python-config.yaml - name: data mountPath: /flink/connectors - name: flink-common-volume @@ -234,7 +227,9 @@ spec: name: flink-connector-conf items: - key: connectors-scala-config.conf - path: flink-connector.conf + path: connectors-scala-config.conf + - key: connectors-python-config.yaml + path: connectors-python-config.yaml - name: flink-common-volume configMap: name: {{ $jobName }}-config diff --git a/command-service/helm-charts/flink/templates/hpa.yaml b/command-service/helm-charts/flink-connector/templates/hpa.yaml similarity index 100% rename from command-service/helm-charts/flink/templates/hpa.yaml rename to command-service/helm-charts/flink-connector/templates/hpa.yaml diff --git a/command-service/helm-charts/flink/templates/ingress.yaml b/command-service/helm-charts/flink-connector/templates/ingress.yaml similarity index 100% rename from command-service/helm-charts/flink/templates/ingress.yaml rename to command-service/helm-charts/flink-connector/templates/ingress.yaml diff --git a/command-service/helm-charts/flink/templates/service.yaml b/command-service/helm-charts/flink-connector/templates/service.yaml similarity index 100% rename from command-service/helm-charts/flink/templates/service.yaml rename to command-service/helm-charts/flink-connector/templates/service.yaml diff --git a/command-service/helm-charts/flink/templates/serviceaccount.yaml b/command-service/helm-charts/flink-connector/templates/serviceaccount.yaml similarity index 100% rename from command-service/helm-charts/flink/templates/serviceaccount.yaml rename to command-service/helm-charts/flink-connector/templates/serviceaccount.yaml diff --git a/command-service/helm-charts/flink/templates/servicemonitor.yaml b/command-service/helm-charts/flink-connector/templates/servicemonitor.yaml similarity index 100% rename from command-service/helm-charts/flink/templates/servicemonitor.yaml rename to command-service/helm-charts/flink-connector/templates/servicemonitor.yaml diff --git a/command-service/helm-charts/flink/values.yaml b/command-service/helm-charts/flink-connector/values.yaml similarity index 90% rename from command-service/helm-charts/flink/values.yaml rename to command-service/helm-charts/flink-connector/values.yaml index f0ea188c..ff505a84 100644 --- a/command-service/helm-charts/flink/values.yaml +++ b/command-service/helm-charts/flink-connector/values.yaml @@ -14,9 +14,9 @@ commonLabels: # repository: sunbirded.azurecr.io/data-pipeline # tag: "release-5.2.0_RC1_2c615f8_12" # docker pull sunbirded.azurecr.io/sunbird-datapipeline:release-4.9.0_RC4_1 -registry: surabhi1510 -repository: flink-python-3.11 -tag: 1.0.0 +registry: sanketikahub +repository: flink-connectors +tag: 1.17.2-scala_2.12-java11 imagePullSecrets: [] ## Databases @@ -264,27 +264,6 @@ baseconfig: | port = "{{ .Values.global.cassandra.port }}" } -# !!! Don't override the resources here. It's just a template -# Proper way to override resouce is to -# flink_jobs: -# master-data-processor: -# resources: -# taskmanager: -# resources: -# requests: -# cpu: 100m -# memory: 100Mi -# limits: -# cpu: 1 -# memory: 1024Mi -# jobmanager: -# resources: -# requests: -# cpu: 100m -# memory: 100Mi -# limits: -# cpu: 1 -# memory: 1024Mi flink_resources: taskmanager: resources: @@ -303,7 +282,7 @@ flink_resources: cpu: 1 memory: 1024Mi -serviceMonitor: +serviceMonitor: jobmanager: enabled: true interval: 30s @@ -322,14 +301,7 @@ serviceMonitor: port: prom flink_jobs: - kafka-connector: - enabled: true - registry: surabhi1510 - repository: flink-python-3.11 - tag: 1.0.0 - imagePullSecrets: [] - - job_classname: org.sunbird.obsrv.connector.KafkaConnector + commonAnnotations: reloader.stakater.com/auto: "true" \ No newline at end of file diff --git a/command-service/helm-charts/spark-connector-cron/Chart.yaml b/command-service/helm-charts/spark-connector-cron/Chart.yaml index a7a290f9..7c047bec 100644 --- a/command-service/helm-charts/spark-connector-cron/Chart.yaml +++ b/command-service/helm-charts/spark-connector-cron/Chart.yaml @@ -7,5 +7,5 @@ dependencies: description: A production-ready Helm chart base template maintainers: - name: NimbusHub.in -name: spark-submit-cron +name: spark-connector-cron version: 0.1.0 diff --git a/command-service/src/command/alert_manager_command.py b/command-service/src/command/alert_manager_command.py index 880fde1c..1c989a5c 100644 --- a/command-service/src/command/alert_manager_command.py +++ b/command-service/src/command/alert_manager_command.py @@ -81,13 +81,15 @@ def execute(self, command_payload: CommandPayload, action: Action): return ActionResponse(status="OK", status_code=200) def get_dataset(self, dataset_id: str) -> str: - query = f"SELECT * FROM datasets WHERE dataset_id='{dataset_id}'" - result = self.db_service.execute_select_one(sql=query) + query = f"SELECT * FROM datasets WHERE dataset_id= %s" + params = (dataset_id,) + result = self.db_service.execute_select_one(sql=query, params=params) return result def get_dataset_source_config(self, dataset_id: str) -> str: - query = f"SELECT * FROM dataset_source_config WHERE dataset_id='{dataset_id}'" - result = self.db_service.execute_select_all(sql=query) + query = f"SELECT * FROM dataset_source_config WHERE dataset_id= %s" + params = (dataset_id,) + result = self.db_service.execute_select_all(sql=query, params=params) return result def get_modified_metric( diff --git a/command-service/src/command/connector_command.py b/command-service/src/command/connector_command.py index 2dbed28d..7b2d2b13 100644 --- a/command-service/src/command/connector_command.py +++ b/command-service/src/command/connector_command.py @@ -37,24 +37,27 @@ def execute(self, command_payload: CommandPayload, action: Action): return result def _deploy_connectors(self, dataset_id, active_connectors, is_masterdata): - result = None - # self._stop_connector_jobs(dataset_id, active_connectors, is_masterdata) + result = None + self._stop_connector_jobs(is_masterdata, self.connector_job_config["spark"]["namespace"]) result = self._install_jobs(dataset_id, active_connectors, is_masterdata) - return result + return result - def _stop_connector_jobs(self, dataset_id, active_connectors, is_masterdata): - managed_releases = [] - connector_jar_config = self.config.find("connector_job") - masterdata_jar_config = self.config.find("masterdata_job") - for connector_type in connector_jar_config: - for release in connector_jar_config[connector_type]: - managed_releases.append(release["release_name"]) - if is_masterdata: - for release in masterdata_jar_config: - managed_releases.append(release["release_name"]) - - helm_ls_cmd = ["helm", "ls", "--namespace", self.connector_job_ns] + def _stop_connector_jobs(self, is_masterdata, namespace): + print(f"Uninstalling jobs for {namespace}..") + base_helm_chart = self.connector_job_config["spark"]["base_helm_chart"] + + # managed_releases = [] + # connector_jar_config = self.config.find("connector_job") + # masterdata_jar_config = self.config.find("masterdata_job") + # for connector_type in connector_jar_config: + # for release in connector_jar_config[connector_type]: + # managed_releases.append(release["release_name"]) + # if is_masterdata: + # for release in masterdata_jar_config: + # managed_releases.append(release["release_name"]) + + helm_ls_cmd = ["helm", "ls", "--namespace", namespace] helm_ls_result = subprocess.run( helm_ls_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) @@ -63,14 +66,14 @@ def _stop_connector_jobs(self, dataset_id, active_connectors, is_masterdata): jobs = helm_ls_result.stdout.decode() for job in jobs.splitlines()[1:]: release_name = job.split()[0] - if release_name in managed_releases: + if base_helm_chart in job: print("Uninstalling job {0}".format(release_name)) helm_uninstall_cmd = [ "helm", "uninstall", release_name, "--namespace", - self.connector_job_ns, + namespace, ] helm_uninstall_result = subprocess.run( helm_uninstall_cmd, @@ -86,32 +89,146 @@ def _stop_connector_jobs(self, dataset_id, active_connectors, is_masterdata): def _install_jobs(self, dataset_id, active_connectors, is_masterdata): result = None - for connector in active_connectors: print("Installing connector {0}".format(connector)) if connector.connector_runtime == "spark": - result = self._perform_spark_install(connector) + result = self._perform_spark_install(dataset_id, connector) + elif connector.connector_runtime == "flink": + result = self._perform_flink_install(dataset_id, connector) else: print( f"Connector {connector.connector_id} is not supported for deployment" ) break - + # if is_masterdata: # print("Installing masterdata job") # masterdata_jar_config = self.config.find("masterdata_job") # for release in masterdata_jar_config: # result = self._perform_install(release) return result + + def _perform_flink_install(self, dataset_id, connector_instance): + err = None + result = None + release_name = connector_instance.connector_id + runtime = connector_instance.connector_runtime + namespace = self.connector_job_config["flink"]["namespace"] + job_name = release_name.replace(".", "-") + helm_ls_cmd = ["helm", "ls", "--namespace", namespace] + + helm_ls_result = subprocess.run( + helm_ls_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + + if helm_ls_result.returncode == 0: + jobs = helm_ls_result.stdout.decode() + + deployment_exists = any(job_name in line for line in jobs.splitlines()[1:]) + if deployment_exists: + restart_cmd = f"kubectl delete pods --selector app.kubernetes.io/name=flink,component={job_name}-jobmanager --namespace {namespace} && kubectl delete pods --selector app.kubernetes.io/name=flink,component={job_name}-taskmanager --namespace {namespace}".format( + namespace=namespace, job_name=job_name + ) + print("Restart command: ", restart_cmd) + # Run the helm command + helm_install_result = subprocess.run( + restart_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + shell=True, + ) + if helm_install_result.returncode == 0: + print(f"Job {job_name} restart succeeded...") + else: + err = True + return ActionResponse( + status="ERROR", + status_code=500, + error_message="FLINK_HELM_LIST_EXCEPTION", + ) + print(f"Error restarting pod: {helm_ls_result.stderr.decode()}") + + if err is None: + result = ActionResponse(status="OK", status_code=200) + + return result + else: + if self._get_live_instances(runtime="flink", connector_instance=connector_instance): + connector_source = json.loads(connector_instance.connector_source) + flink_jobs = dict() + flink_jobs[job_name] = { + "enabled": "true", + "connector_id": connector_instance.connector_id, + "source": connector_source.get("source"), + "main_program": connector_source.get("main_program") + } + + set_json_value = json.dumps(flink_jobs) + helm_install_cmd = [ + "helm", + "upgrade", + "--install", + job_name, + f"""{self.config.find("helm_charts_base_dir")}/{self.connector_job_config["flink"]["base_helm_chart"]}""", + "--namespace", + namespace, + "--create-namespace", + "--set-json", + f"flink_jobs={set_json_value.replace(" ", "")}" + ] + + print("flink connector installation: ", " ".join(helm_install_cmd)) + + helm_install_result = subprocess.run( + helm_install_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + + print(helm_install_result) + + if helm_install_result.returncode == 0: + print(f"Job '{job_name}' deployment succeeded...") + else: + err = True + result = ActionResponse( + status="ERROR", + status_code=500, + error_message="FLINK_CONNECTOR_HELM_INSTALLATION_EXCEPTION", + ) + print( + f"Error installing job '{job_name}': {helm_install_result.stderr.decode()}" + ) + + if err is None: + result = ActionResponse(status="OK", status_code=200) - def _perform_spark_install(self, connector_instance): + return result + else: + self._stop_connector_jobs(is_masterdata=False, namespace="flink") + else: + print(f"Error checking Flink deployments: {helm_ls_result.stderr.decode()}") + return ActionResponse( + status="ERROR", + status_code=500, + error_message="FLINK_HELM_LIST_EXCEPTION", + ) + + def _perform_spark_install(self, dataset_id, connector_instance): err = None result = None release_name = connector_instance.id - # print("Instance -->>", connector_instance) connector_source = json.loads(connector_instance.connector_source) - print(connector_source) + schedule = connector_instance.operations_config["schedule"] + + schedule_configs = { + "Hourly": "0 * * * *", # Runs at the start of every hour + "Weekly": "0 0 * * 0", # Runs at midnight every Sunday + "Monthly": "0 0 1 * *", # Runs at midnight on the 1st day of every month + "Yearly": "0 0 1 1 *" # Runs at midnight on January 1st each year + } + + namespace = self.connector_job_config["spark"]["namespace"] + helm_install_cmd = [ "helm", "upgrade", @@ -119,7 +236,7 @@ def _perform_spark_install(self, connector_instance): release_name, f"""{self.config.find("helm_charts_base_dir")}/{self.connector_job_config["spark"]["base_helm_chart"]}""", "--namespace", - self.connector_job_config["spark"]["namespace"], + namespace, "--create-namespace", "--set", "technology={}".format(connector_instance.technology), @@ -132,16 +249,17 @@ def _perform_spark_install(self, connector_instance): "--set", "main_file={}".format(connector_source["main_program"]), "--set", - "cronSchedule={}".format(connector_instance.operations_config["schedule"]) + "cronSchedule={}".format(schedule_configs[schedule]) ] - print(" ".join(helm_install_cmd)) + print("spark connector installation:", " ".join(helm_install_cmd)) helm_install_result = subprocess.run( helm_install_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) if helm_install_result.returncode == 0: - print(f"Job {release_name} deployment succeeded...") + print(f"Job '{release_name}' update succeeded...") + result = ActionResponse(status="OK", status_code=200) else: err = True result = ActionResponse( @@ -149,25 +267,23 @@ def _perform_spark_install(self, connector_instance): status_code=500, error_message="SPARK_CRON_HELM_INSTALLATION_EXCEPTION", ) - print( - f"Error re-installing job {release_name}: {helm_install_result.stderr.decode()}" - ) - - if err is None: - result = ActionResponse(status="OK", status_code=200) + print(f"Error updating job '{release_name}': {helm_install_result.stderr.decode()}") + + if result is None: + result = ActionResponse(status="ERROR", status_code=500, error_message="UNKNOWN_ERROR") return result def _get_connector_details(self, dataset_id): active_connectors = [] - records = self.db_service.execute_select_all( - f""" - SELECT ci.id, ci.connector_id, ci.operations_config, cr.runtime as connector_runtime, cr.source as connector_source, cr.technology - FROM connector_instances ci - JOIN connector_registry cr on ci.connector_id = cr.id - WHERE ci.status='{DatasetStatusType.Live.name}' and ci.dataset_id = '{dataset_id}' - """ - ) + query = f""" + SELECT ci.id, ci.connector_id, ci.operations_config, cr.runtime as connector_runtime, cr.source as connector_source, cr.technology, cr.version + FROM connector_instances ci + JOIN connector_registry cr on ci.connector_id = cr.id + WHERE ci.status= %s and ci.dataset_id = %s + """ + params = (DatasetStatusType.Live.name, dataset_id,) + records = self.db_service.execute_select_all(sql=query, params=params) for record in records: active_connectors.append(from_dict( @@ -178,15 +294,77 @@ def _get_connector_details(self, dataset_id): def _get_masterdata_details(self, dataset_id): is_masterdata = False - rows = self.db_service.execute_select_all( - f""" - SELECT * - FROM datasets - WHERE status='{DatasetStatusType.Live.name}' AND dataset_id = '{dataset_id}' AND type = 'master' - """ - ) - + query = f""" + SELECT * + FROM datasets + WHERE status= %s AND dataset_id = %s AND type = 'master' + """ + params = (DatasetStatusType.Live.name, dataset_id,) + rows = self.db_service.execute_select_all(sql=query, params=params) if len(rows) > 0: is_masterdata = True return is_masterdata + + ## TODO: check for connector_id as well + def _get_live_instances(self, runtime, connector_instance): + has_live_instances = False + query = f""" + SELECT d.id AS dataset_id, ci.id AS connector_instance_id, ci.connector_id + FROM connector_instances ci + JOIN connector_registry cr ON ci.connector_id = cr.id + JOIN datasets d ON ci.dataset_id = d.id + WHERE cr.runtime = %s AND ci.status = %s AND ci.connector_id = %s; + """ + params = (runtime, DatasetStatusType.Live.name, connector_instance.connector_id) + rows = self.db_service.execute_select_all(sql=query, params=params) + if len(rows) > 0: + has_live_instances = True + + return has_live_instances + + # def _perform_install(self, release): + # err = None + # result = None + # release_name = release["release_name"] + # helm_install_cmd = [ + # "helm", + # "upgrade", + # "--install", + # release_name, + # self.connector_job_chart_dir, + # "--namespace", + # self.connector_job_ns, + # "--create-namespace", + # "--set", + # "file.path={}".format(release["jar"]), + # "--set", + # "class.name={}".format(release["class"]), + # "--set", + # "job.name={}".format(release_name), + # "--set", + # "args={}".format(",".join(release["args"])), + # "--set", + # "schedule={}".format(release["schedule"]), + # ] + # helm_install_result = subprocess.run( + # helm_install_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE + # ) + # if helm_install_result.returncode == 0: + # print(f"Job {release_name} deployment succeeded...") + # else: + # err = True + # result = ActionResponse( + # status="ERROR", + # status_code=500, + # error_message="FLINK_HELM_INSTALLATION_EXCEPTION", + # ) + # print( + # f"Error re-installing job {release_name}: {helm_install_result.stderr.decode()}" + # ) + + # if err is None: + # result = ActionResponse(status="OK", status_code=200) + + # return result + diff --git a/command-service/src/command/connector_registry.py b/command-service/src/command/connector_registry.py index 36cb0b57..a91f651d 100644 --- a/command-service/src/command/connector_registry.py +++ b/command-service/src/command/connector_registry.py @@ -1,9 +1,11 @@ import json import os +import base64 import tarfile import uuid import zipfile from datetime import datetime +from pathlib import Path import requests from fastapi import status @@ -184,6 +186,7 @@ def process_metadata(self, rel_path, connector_source) -> RegistryResponse: } connector_id = obj["id"].replace(" ", "-") + self.update_connector_registry(connector_id, self.metadata['metadata']['version']) registry_meta = ConnectorRegsitryv2(connector_id, obj['name'], 'source', @@ -194,7 +197,7 @@ def process_metadata(self, rel_path, connector_source) -> RegistryResponse: self.metadata['metadata']['runtime'], self.metadata['metadata']['licence'], self.metadata['metadata']['owner'], - obj['icon'], + self.load_file_bytes(obj["icon"]), 'Live', rel_path, json.dumps(source), @@ -205,8 +208,8 @@ def process_metadata(self, rel_path, connector_source) -> RegistryResponse: 'SYSTEM', datetime.now().strftime("%Y-%m-%d %H:%M:%S") ) - query = self.build_insert_query(registry_meta) - success = self.execute_query(query) + query, params = self.build_insert_query(registry_meta) + success = self.execute_query(query, params) if not success: return RegistryResponse( status="failure", @@ -225,7 +228,7 @@ def process_metadata(self, rel_path, connector_source) -> RegistryResponse: connector_id = ( self.metadata.get("metadata", {}).get("id", "").replace(" ", "-") ) - + self.update_connector_registry(connector_id, self.metadata['metadata']['version']) source = { "source": connector_source, "main_class": self.metadata["metadata"]["main_class"], @@ -243,7 +246,7 @@ def process_metadata(self, rel_path, connector_source) -> RegistryResponse: self.metadata['metadata']['runtime'], self.metadata['metadata']['licence'], self.metadata['metadata']['owner'], - self.metadata['metadata']['icon'], + self.load_file_bytes(self.metadata['metadata']["icon"]), 'Live', rel_path, json.dumps(source), @@ -254,8 +257,8 @@ def process_metadata(self, rel_path, connector_source) -> RegistryResponse: 'SYSTEM', datetime.now().strftime("%Y-%m-%d %H:%M:%S") ) - query = self.build_insert_query(registry_meta) - success = self.execute_query(query) + query, params = self.build_insert_query(registry_meta) + success = self.execute_query(query, params) if not success: return RegistryResponse( status="failure", @@ -269,9 +272,9 @@ def process_metadata(self, rel_path, connector_source) -> RegistryResponse: statusCode=status.HTTP_200_OK, ) - def execute_query(self, query) -> bool: + def execute_query(self, query, params) -> bool: try: - result = self.db_service.execute_upsert(query) + result = self.db_service.execute_upsert(sql=query, params=params) return result > 0 # Assuming the result is the number of affected rows except Exception as e: print( @@ -307,28 +310,116 @@ def download_file(self, url, destination) -> bool: def build_insert_query(self, registry_meta: ConnectorRegsitryv2): ui_spec_json = json.dumps(registry_meta.ui_spec) - return f""" INSERT INTO connector_registry (id, connector_id, name, type, category, version, description, technology, runtime, licence, owner, iconurl, status, source_url, source, ui_spec, created_by, updated_by, created_date, updated_date, live_date) VALUES - ( '{registry_meta.id}-{registry_meta.version}', '{registry_meta.id}', '{registry_meta.name}', '{registry_meta.type}', '{registry_meta.category}', '{registry_meta.version}', '{registry_meta.description}', '{registry_meta.technology}', '{registry_meta.runtime}', '{registry_meta.licence}', '{registry_meta.owner}', '{registry_meta.iconurl}', '{registry_meta.status}', '{registry_meta.source_url}', '{registry_meta.source}', '{ui_spec_json}', 'SYSTEM', 'SYSTEM', '{datetime.now()}', '{datetime.now()}', '{datetime.now()}' ) - ON CONFLICT (connector_id, version) DO UPDATE - SET id = '{registry_meta.id}-{registry_meta.version}', - name = '{registry_meta.name}', - type = '{registry_meta.type}', - category = '{registry_meta.category}', - version = '{registry_meta.version}', - description = '{registry_meta.description}', - technology = '{registry_meta.technology}', - runtime = '{registry_meta.runtime}', - licence = '{registry_meta.licence}', - owner = '{registry_meta.owner}', - iconurl = '{registry_meta.iconurl}', - status = '{registry_meta.status}', - source_url = '{registry_meta.source_url}', - source = '{registry_meta.source}', - ui_spec = '{ui_spec_json}'::jsonb, - updated_date = '{datetime.now()}' - ;; + query =f""" + INSERT INTO connector_registry ( + id, connector_id, name, type, category, version, description, + technology, runtime, licence, owner, iconurl, status, source_url, + source, ui_spec, created_by, updated_by, created_date, updated_date, live_date + ) VALUES ( + %s, %s, %s, %s, %s, %s, %s, + %s, %s, %s, %s, %s, %s, %s, + %s, %s, %s, %s, %s, %s, %s + ) ON CONFLICT ( + connector_id, version + ) DO UPDATE SET + id = %s, + name = %s, + type = %s, + category = %s, + version = %s, + description = %s, + technology = %s, + runtime = %s, + licence = %s, + owner = %s, + iconurl = %s, + status = %s, + source_url = %s, + source = %s, + ui_spec = %s::jsonb, + updated_date = %s + ;; """ + params = ( + registry_meta.id + "-" + registry_meta.version, + registry_meta.id, + registry_meta.name, + registry_meta.type, + registry_meta.category, + registry_meta.version, + registry_meta.description, + + registry_meta.technology, + registry_meta.runtime, + registry_meta.licence, + registry_meta.owner, + registry_meta.iconurl, + registry_meta.status, + registry_meta.source_url, + + registry_meta.source, + ui_spec_json, + 'SYSTEM', + 'SYSTEM', + datetime.now(), + datetime.now(), + datetime.now(), + + registry_meta.id + "-" + registry_meta.version, + registry_meta.name, + registry_meta.type, + registry_meta.category, + registry_meta.version, + registry_meta.description, + registry_meta.technology, + registry_meta.runtime, + registry_meta.licence, + registry_meta.owner, + registry_meta.iconurl, + registry_meta.status, + registry_meta.source_url, + registry_meta.source, + ui_spec_json, + datetime.now(), + ) + return query, params + + def load_file_bytes(self, rel_path: str) -> bytes | None: + file_path = Path(self.extraction_path) + for item in file_path.glob("*/{}".format(rel_path)): + try: + with open(item, 'rb') as file: + file_content = file.read() + encoded = base64.b64encode(file_content).decode("ascii") + except IsADirectoryError: + print( + f"Connector Registry | No value for icon URL given at metadata: {rel_path}" + ) + return None + except FileNotFoundError: + print( + f"Connector Registry | No file present at indicated relative path: {rel_path}" + ) + return None + except (ValueError or TypeError) as e: + print( + f"Connector Registry | File content not byte like: {e}" + ) + return None + return encoded + def update_connector_registry(self, _id, ver): + try: + result = self.db_service.execute_upsert( + f"UPDATE connector_registry SET status = 'Retired', updated_date = now() WHERE connector_id = %s AND status = 'Live' AND version != %s", (_id, ver) + ) + print( + f"Connector Registry | Updated {result} existing rows with connector_id: {_id} and version: {ver}" + ) + except Exception as e: + print( + f"Connector Registry | An error occurred during the execution of Query: {e}" + ) class ExtractionUtil: def extract_gz(tar_path, extract_path): diff --git a/command-service/src/command/dataset_command.py b/command-service/src/command/dataset_command.py index 165efd38..db7afa7d 100644 --- a/command-service/src/command/dataset_command.py +++ b/command-service/src/command/dataset_command.py @@ -30,9 +30,9 @@ def __init__( def _get_draft_dataset_record(self, dataset_id): query = f""" - SELECT "type", MAX(version) AS max_version FROM datasets_draft WHERE dataset_id = '{dataset_id}' GROUP BY 1 + SELECT "type", MAX(version) AS max_version FROM datasets_draft WHERE dataset_id = %s GROUP BY 1 """ - dataset_record = self.db_service.execute_select_one(query) + dataset_record = self.db_service.execute_select_one(sql=query, params=(dataset_id,)) if dataset_record is not None: return dataset_record return None @@ -40,19 +40,22 @@ def _get_draft_dataset_record(self, dataset_id): def _get_draft_dataset(self, dataset_id): query = f""" SELECT * FROM datasets_draft - WHERE dataset_id = '{dataset_id}' AND (status = '{DatasetStatusType.Publish.name}' OR status = '{DatasetStatusType.ReadyToPublish.name}') AND version = (SELECT MAX(version) - FROM datasets_draft WHERE dataset_id = '{dataset_id}' AND (status = '{DatasetStatusType.Publish.name}' OR status = '{DatasetStatusType.ReadyToPublish.name}')) + WHERE dataset_id = %s AND (status = %s OR status = %s ) AND version = (SELECT MAX(version) + FROM datasets_draft WHERE dataset_id = %s AND (status = %s OR status = %s )) """ - dataset_record = self.db_service.execute_select_one(query) + params = (dataset_id, DatasetStatusType.Publish.name, DatasetStatusType.ReadyToPublish.name, + dataset_id, DatasetStatusType.Publish.name, DatasetStatusType.ReadyToPublish.name,) + dataset_record = self.db_service.execute_select_one(sql=query, params=params) if dataset_record is not None: return dataset_record return None def _check_for_live_record(self, dataset_id): query = f""" - SELECT * FROM datasets WHERE dataset_id = '{dataset_id}' AND status = '{DatasetStatusType.Live.name}' + SELECT * FROM datasets WHERE dataset_id = %s AND status = %s """ - result = self.db_service.execute_select_one(query) + params = (dataset_id, DatasetStatusType.Live.name, ) + result = self.db_service.execute_select_one(sql=query, params=params) live_dataset = dict() if result is not None: live_dataset = from_dict(data_class=DatasetsLive, data=result) diff --git a/command-service/src/command/db_command.py b/command-service/src/command/db_command.py index 6d2c7d46..4cb62e2c 100644 --- a/command-service/src/command/db_command.py +++ b/command-service/src/command/db_command.py @@ -63,56 +63,99 @@ def _insert_dataset_record(self, dataset_id, data_version, live_dataset, draft_d draft_dataset = from_dict(data_class = DatasetsDraft, data = draft_dataset_record) draft_dataset_id = draft_dataset.id current_timestamp = dt.now() + params = ( + dataset_id, + dataset_id, + draft_dataset.type, + draft_dataset.name, + json.dumps(draft_dataset.extraction_config).replace("'", "''"), + json.dumps(draft_dataset.validation_config).replace("'", "''"), + json.dumps(draft_dataset.dedup_config).replace("'", "''"), + json.dumps(draft_dataset.denorm_config).replace("'", "''"), + json.dumps(draft_dataset.data_schema).replace("'", "''"), + json.dumps(draft_dataset.router_config).replace("'", "''"), + json.dumps(draft_dataset.dataset_config).replace("'", "''"), + DatasetStatusType.Live.name, + json.dumps(draft_dataset.tags).replace("'", "''").replace("[", "{").replace("]", "}") if draft_dataset.tags is not None else json.dumps({}), + draft_dataset.api_version, + draft_dataset.version, + json.dumps(draft_dataset.sample_data).replace("'", "''"), + draft_dataset.entry_topic, + draft_dataset.created_by, + draft_dataset.updated_by, + current_timestamp, + current_timestamp, + current_timestamp, + + draft_dataset.name, + json.dumps(draft_dataset.extraction_config).replace("'", "''"), + json.dumps(draft_dataset.validation_config).replace("'", "''"), + json.dumps(draft_dataset.dedup_config).replace("'", "''"), + json.dumps(draft_dataset.denorm_config).replace("'", "''"), + json.dumps(draft_dataset.data_schema).replace("'", "''"), + json.dumps(draft_dataset.router_config).replace("'", "''"), + json.dumps(draft_dataset.dataset_config).replace("'", "''"), + json.dumps(draft_dataset.tags).replace("'", "''").replace("[", "{").replace("]", "}") if draft_dataset.tags is not None else json.dumps({}), + data_version if live_dataset is not None else 1, + draft_dataset.api_version, + draft_dataset.version, + json.dumps(draft_dataset.sample_data).replace("'", "''"), + draft_dataset.entry_topic, + draft_dataset.updated_by, + current_timestamp, + current_timestamp, + DatasetStatusType.Live.name, + ) insert_query = f""" INSERT INTO datasets(id, dataset_id, "type", name, extraction_config, validation_config, dedup_config, denorm_config, data_schema, router_config, dataset_config, status, tags, data_version, api_version, version, sample_data, entry_topic, created_by, updated_by, created_date, updated_date, published_date) VALUES ( - '{dataset_id}', - '{dataset_id}', - '{draft_dataset.type}', - '{draft_dataset.name}', - '{json.dumps(draft_dataset.extraction_config).replace("'", "''")}', - '{json.dumps(draft_dataset.validation_config).replace("'", "''")}', - '{json.dumps(draft_dataset.dedup_config).replace("'", "''")}', - '{json.dumps(draft_dataset.denorm_config).replace("'", "''")}', - '{json.dumps(draft_dataset.data_schema).replace("'", "''")}', - '{json.dumps(draft_dataset.router_config).replace("'", "''")}', - '{json.dumps(draft_dataset.dataset_config).replace("'", "''")}', - '{DatasetStatusType.Live.name}', - '{json.dumps(draft_dataset.tags).replace("'", "''").replace("[", "{").replace("]", "}")}', + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, 1, - '{draft_dataset.api_version}', - {draft_dataset.version}, - '{json.dumps(draft_dataset.sample_data).replace("'", "''")}', - '{draft_dataset.entry_topic}', - '{draft_dataset.created_by}', - '{draft_dataset.updated_by}', - '{current_timestamp}', - '{current_timestamp}', - '{current_timestamp}' + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s ) ON CONFLICT (id) DO UPDATE - SET name = '{draft_dataset.name}', - extraction_config = '{json.dumps(draft_dataset.extraction_config).replace("'", "''")}', - validation_config = '{json.dumps(draft_dataset.validation_config).replace("'", "''")}', - dedup_config = '{json.dumps(draft_dataset.dedup_config).replace("'", "''")}', - denorm_config = '{json.dumps(draft_dataset.denorm_config).replace("'", "''")}', - data_schema = '{json.dumps(draft_dataset.data_schema).replace("'", "''")}', - router_config = '{json.dumps(draft_dataset.router_config).replace("'", "''")}', - dataset_config = '{json.dumps(draft_dataset.dataset_config).replace("'", "''")}', - tags = '{json.dumps(draft_dataset.tags).replace("'", "''").replace("[", "{").replace("]", "}")}', - data_version = {data_version if live_dataset is not None else 1}, - api_version = '{draft_dataset.api_version}', - version = {draft_dataset.version}, - sample_data = '{json.dumps(draft_dataset.sample_data).replace("'", "''")}', - entry_topic = '{draft_dataset.entry_topic}', - updated_by = '{draft_dataset.updated_by}', - updated_date = '{current_timestamp}', - published_date = '{current_timestamp}', - status = '{DatasetStatusType.Live.name}'; + SET name = %s, + extraction_config = %s, + validation_config = %s, + dedup_config = %s, + denorm_config = %s, + data_schema = %s, + router_config = %s, + dataset_config = %s, + tags = %s, + data_version = %s, + api_version = %s, + version = %s, + sample_data = %s, + entry_topic = %s, + updated_by = %s, + updated_date = %s, + published_date = %s, + status = %s; """ - self.db_service.execute_upsert(insert_query) + self.db_service.execute_upsert(insert_query, params) print(f"Dataset {dataset_id} record inserted successfully...") return draft_dataset_id @@ -120,51 +163,84 @@ def _insert_datasource_record(self, dataset_id, draft_dataset_id): result = {} draft_datasource_record = self.db_service.execute_select_all( - f"SELECT * FROM datasources_draft WHERE dataset_id = '{draft_dataset_id}'" + sql=f"SELECT * FROM datasources_draft WHERE dataset_id = %s", + params=(draft_dataset_id,) ) if draft_datasource_record is None: return result for record in draft_datasource_record: draft_datasource = from_dict(data_class=DatasourcesDraft, data=record) current_timestamp = dt.now() + params = ( + draft_datasource.id, + draft_datasource.datasource, + dataset_id, + draft_datasource.datasource_ref, + json.dumps(draft_datasource.ingestion_spec).replace("'", "''"), + draft_datasource.type, + json.dumps(draft_datasource.retention_period).replace("'", "''"), + json.dumps(draft_datasource.archival_policy).replace("'", "''"), + json.dumps(draft_datasource.purge_policy).replace("'", "''"), + json.dumps(draft_datasource.backup_config).replace("'", "''"), + DatasetStatusType.Live.name, + draft_datasource.created_by, + draft_datasource.updated_by, + current_timestamp, + current_timestamp, + current_timestamp, + json.dumps(draft_datasource.metadata).replace("'", "''"), + + draft_datasource.datasource, + json.dumps(draft_datasource.ingestion_spec).replace("'", "''"), + draft_datasource.type, + json.dumps(draft_datasource.retention_period).replace("'", "''"), + json.dumps(draft_datasource.archival_policy).replace("'", "''"), + json.dumps(draft_datasource.purge_policy).replace("'", "''"), + json.dumps(draft_datasource.backup_config).replace("'", "''"), + draft_datasource.updated_by, + current_timestamp, + current_timestamp, + json.dumps(draft_datasource.metadata).replace("'", "''"), + DatasetStatusType.Live.name, + ) insert_query = f""" INSERT INTO datasources(id, datasource, dataset_id, datasource_ref, ingestion_spec, type, retention_period, archival_policy, purge_policy, backup_config, status, created_by, updated_by, created_date, updated_date, published_date, metadata) VALUES ( - '{draft_datasource.id}', - '{draft_datasource.datasource}', - '{dataset_id}', - '{draft_datasource.datasource_ref}', - '{json.dumps(draft_datasource.ingestion_spec).replace("'", "''")}', - '{draft_datasource.type}', - '{json.dumps(draft_datasource.retention_period).replace("'", "''")}', - '{json.dumps(draft_datasource.archival_policy).replace("'", "''")}', - '{json.dumps(draft_datasource.purge_policy).replace("'", "''")}', - '{json.dumps(draft_datasource.backup_config).replace("'", "''")}', - '{DatasetStatusType.Live.name}', - '{draft_datasource.created_by}', - '{draft_datasource.updated_by}', - '{current_timestamp}', - '{current_timestamp}', - '{current_timestamp}', - '{json.dumps(draft_datasource.metadata).replace("'", "''")}' + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s ) ON CONFLICT (id) DO UPDATE - SET datasource_ref = '{draft_datasource.datasource}', - ingestion_spec = '{json.dumps(draft_datasource.ingestion_spec).replace("'", "''")}', - type = '{draft_datasource.type}', - retention_period = '{json.dumps(draft_datasource.retention_period).replace("'", "''")}', - archival_policy = '{json.dumps(draft_datasource.archival_policy).replace("'", "''")}', - purge_policy = '{json.dumps(draft_datasource.purge_policy).replace("'", "''")}', - backup_config = '{json.dumps(draft_datasource.backup_config).replace("'", "''")}', - updated_by = '{draft_datasource.updated_by}', - updated_date = '{current_timestamp}', - published_date = '{current_timestamp}', - metadata = '{json.dumps(draft_datasource.metadata).replace("'", "''")}', - status = '{DatasetStatusType.Live.name}'; + SET datasource_ref = %s, + ingestion_spec = %s, + type = %s, + retention_period = %s, + archival_policy = %s, + purge_policy = %s, + backup_config = %s, + updated_by = %s, + updated_date = %s, + published_date = %s, + metadata = %s, + status = %s; """ - result = self.db_service.execute_upsert(insert_query) + result = self.db_service.execute_upsert(sql=insert_query, params=params) print( f"Datasource {draft_datasource.id} record inserted successfully..." ) @@ -183,63 +259,105 @@ def _insert_connector_instances(self, dataset_id, draft_dataset_record): ) current_timestamp = dt.now() if connector_config.version == 'v2': + params = ( + connector_config.id, + dataset_id, + connector_config.connector_id, + json.dumps(connector_config.connector_config).replace("'", "''"), + json.dumps(connector_config.operations_config).replace("'", "''"), + connector_config.data_format, + DatasetStatusType.Live.name, + json.dumps(emptyJson), + json.dumps(emptyJson), + draft_dataset_record.get('created_by'), + draft_dataset_record.get('updated_by'), + current_timestamp, + current_timestamp, + current_timestamp, + + json.dumps(connector_config.connector_config).replace("'", "''"), + json.dumps(connector_config.operations_config).replace("'", "''"), + connector_config.data_format, + draft_dataset_record.get('updated_by'), + current_timestamp, + current_timestamp, + DatasetStatusType.Live.name, + ) insert_query = f""" INSERT INTO connector_instances(id, dataset_id, connector_id, connector_config, operations_config, data_format, status, connector_state, connector_stats, created_by, updated_by, created_date, updated_date, published_date) VALUES ( - '{connector_config.id}', - '{dataset_id}', - '{connector_config.connector_id}', - '{json.dumps(connector_config.connector_config).replace("'", "''")}', - '{json.dumps(connector_config.operations_config).replace("'", "''")}', - '{connector_config.data_format}', - '{DatasetStatusType.Live.name}', - '{json.dumps(emptyJson)}', - '{json.dumps(emptyJson)}', - '{draft_dataset_record.get('created_by')}', - '{draft_dataset_record.get('updated_by')}', - '{current_timestamp}', - '{current_timestamp}', - '{current_timestamp}' + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s ) ON CONFLICT (id) DO UPDATE - SET connector_config = '{json.dumps(connector_config.connector_config).replace("'", "''")}', - operations_config = '{json.dumps(connector_config.operations_config).replace("'", "''")}', - data_format = '{connector_config.data_format}', - updated_by = '{draft_dataset_record.get('updated_by')}', - updated_date = '{current_timestamp}', - published_date = '{current_timestamp}', - status = '{DatasetStatusType.Live.name}'; + SET connector_config = %s, + operations_config = %s, + data_format = %s, + updated_by = %s, + updated_date = %s, + published_date = %s, + status = %s; """ - result = self.db_service.execute_upsert(insert_query) + result = self.db_service.execute_upsert(sql=insert_query, params=params) print( f"Connector[v2] Instance record for [dataset={dataset_id},connector={connector_config.connector_id},id={connector_config.id}] inserted successfully..." ) else: + params = ( + connector_config.id, + dataset_id, + connector_config.connector_id, + json.dumps(connector_config.connector_config).replace("'", "''"), + DatasetStatusType.Live.name, + draft_dataset_record.get('created_by'), + draft_dataset_record.get('updated_by'), + current_timestamp, + current_timestamp, + current_timestamp, + + json.dumps(connector_config.connector_config).replace("'", "''"), + draft_dataset_record.get('updated_by'), + current_timestamp, + current_timestamp, + DatasetStatusType.Live.name, + ) insert_query = f""" INSERT INTO dataset_source_config(id, dataset_id, connector_type, connector_config, status, created_by, updated_by, created_date, updated_date, published_date) VALUES ( - '{connector_config.id}', - '{dataset_id}', - '{connector_config.connector_id}', - '{json.dumps(connector_config.connector_config).replace("'", "''")}', - '{DatasetStatusType.Live.name}', - '{draft_dataset_record.get('created_by')}', - '{draft_dataset_record.get('updated_by')}', - '{current_timestamp}', - '{current_timestamp}', - '{current_timestamp}' + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s ) ON CONFLICT (id) DO UPDATE - SET connector_config = '{json.dumps(connector_config.connector_config).replace("'", "''")}', - updated_by = '{draft_dataset_record.get('updated_by')}', - updated_date = '{current_timestamp}', - published_date = '{current_timestamp}', - status = '{DatasetStatusType.Live.name}'; + SET connector_config = %s, + updated_by = %s, + updated_date = %s, + published_date = %s, + status = %s; """ - result = self.db_service.execute_upsert(insert_query) + result = self.db_service.execute_upsert(sql=insert_query, params=params) print( f"Connector[v1] record for [dataset={dataset_id},connector={connector_config.connector_id},id={connector_config.id}] inserted successfully..." ) @@ -252,7 +370,7 @@ def _insert_dataset_transformations(self, dataset_id, draft_dataset_record): result = {} current_timestamp = dt.now() # Delete existing transformations - self.db_service.execute_delete(f"""DELETE from dataset_transformations where dataset_id = '{dataset_id}'""") + self.db_service.execute_delete(sql=f"""DELETE from dataset_transformations where dataset_id = %s""", params=(dataset_id,)) print(f"Dataset Transformation for {dataset_id} are deleted successfully...") if draft_dataset_transformations_record is None: @@ -262,31 +380,50 @@ def _insert_dataset_transformations(self, dataset_id, draft_dataset_record): transformation = from_dict( data_class=DatasetTransformationsDraft, data=record ) + params = ( + dataset_id + '_' + transformation.field_key, + dataset_id, + transformation.field_key, + json.dumps(transformation.transformation_function).replace("'", "''"), + DatasetStatusType.Live.name, + transformation.mode, + draft_dataset_record.get('created_by'), + draft_dataset_record.get('updated_by'), + current_timestamp, + current_timestamp, + current_timestamp, + ) insert_query = f""" INSERT INTO dataset_transformations(id, dataset_id, field_key, transformation_function, status, mode, created_by, updated_by, created_date, updated_date, published_date) VALUES ( - '{dataset_id + '_' + transformation.field_key}', - '{dataset_id}', - '{transformation.field_key}', - '{json.dumps(transformation.transformation_function).replace("'", "''")}', - '{DatasetStatusType.Live.name}', - '{transformation.mode}', - '{draft_dataset_record.get('created_by')}', - '{draft_dataset_record.get('updated_by')}', - '{current_timestamp}', - '{current_timestamp}', - '{current_timestamp}' + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s ) """ - result = self.db_service.execute_upsert(insert_query) + result = self.db_service.execute_upsert(sql=insert_query, params=params) print(f"Dataset Transformation {dataset_id + '_' + transformation.field_key} record inserted successfully...") return result def _delete_draft_dataset(self, dataset_id, draft_dataset_id): - self.db_service.execute_delete(f"""DELETE from datasources_draft where dataset_id = '{draft_dataset_id}'""") + self.db_service.execute_delete(sql=f"""DELETE from datasources_draft where dataset_id = %s""", params=(draft_dataset_id,)) print(f"Draft datasources/tables for {dataset_id} are deleted successfully...") - self.db_service.execute_delete(f"""DELETE from datasets_draft where id = '{draft_dataset_id}'""") + self.db_service.execute_delete(sql=f"""DELETE from dataset_transformations_draft where dataset_id = %s""", params=(draft_dataset_id,)) + print(f"Draft transformations/tables for {dataset_id} are deleted successfully...") + + self.db_service.execute_delete(sql=f"""DELETE from dataset_source_config_draft where dataset_id = %s""", params=(draft_dataset_id,)) + print(f"Draft source config/tables for {dataset_id} are deleted successfully...") + + self.db_service.execute_delete(sql=f"""DELETE from datasets_draft where id = %s""", params=(draft_dataset_id,)) print(f"Draft Dataset for {dataset_id} is deleted successfully...") \ No newline at end of file diff --git a/command-service/src/command/druid_command.py b/command-service/src/command/druid_command.py index 1bd95ab2..901b18ad 100644 --- a/command-service/src/command/druid_command.py +++ b/command-service/src/command/druid_command.py @@ -27,7 +27,8 @@ def execute(self, command_payload: CommandPayload, action: Action): def _submit_ingestion_task(self, dataset_id): datasources_records = self.db_service.execute_select_all( - f"SELECT dso.*, dt.type as dataset_type FROM datasources dso, datasets dt WHERE dso.dataset_id = '{dataset_id}' AND dso.dataset_id = dt.id" + sql=f"SELECT dso.*, dt.type as dataset_type FROM datasources dso, datasets dt WHERE dso.dataset_id = %s AND dso.dataset_id = dt.id", + params=(dataset_id,) ) if datasources_records is not None: print( diff --git a/command-service/src/config/service_config.yml b/command-service/src/config/service_config.yml index a26d4a53..2c432ba0 100644 --- a/command-service/src/config/service_config.yml +++ b/command-service/src/config/service_config.yml @@ -172,7 +172,7 @@ connector_jobs: namespace: spark base_helm_chart: spark-connector-cron flink: - namespace: streaming-connectors + namespace: flink base_helm_chart: flink-connector connector_registry: diff --git a/command-service/src/service/db_service.py b/command-service/src/service/db_service.py index 864b4612..ded56b91 100644 --- a/command-service/src/service/db_service.py +++ b/command-service/src/service/db_service.py @@ -38,36 +38,36 @@ def connect(self): return db_connection # @reconnect - def execute_select_one(self, sql): + def execute_select_one(self, sql, params): db_connection = self.connect() cursor = db_connection.cursor(cursor_factory=psycopg2.extras.DictCursor) - cursor.execute(sql) + cursor.execute(sql, params) result = cursor.fetchone() db_connection.close() return result # @reconnect - def execute_select_all(self, sql): + def execute_select_all(self, sql, params): db_connection = self.connect() cursor = db_connection.cursor(cursor_factory=psycopg2.extras.DictCursor) - cursor.execute(sql) + cursor.execute(sql, params) result = cursor.fetchall() db_connection.close() return result # @reconnect - def execute_upsert(self, sql): + def execute_upsert(self, sql, params): db_connection = self.connect() cursor = db_connection.cursor(cursor_factory=psycopg2.extras.DictCursor) - cursor.execute(sql) + cursor.execute(sql, params) record_count = cursor.rowcount db_connection.close() # print(f"{record_count} inserted/updated successfully") return record_count # @reconnect - def execute_delete(self, sql): + def execute_delete(self, sql, params): db_connection = self.connect() cursor = db_connection.cursor(cursor_factory=psycopg2.extras.DictCursor) - cursor.execute(sql) + cursor.execute(sql, params) db_connection.close()