From f2ce8b730a17696ca2a361f1af1bc589dec99185 Mon Sep 17 00:00:00 2001 From: Tiffany K Date: Fri, 17 Nov 2023 13:59:31 -0800 Subject: [PATCH] feat(2767): Add stage setup and teardown jobIds (#570) --- index.js | 2 + lib/event.js | 17 ++ lib/helper.js | 14 +- lib/pipeline.js | 126 ++++++++++- lib/stage.js | 4 +- lib/stageBuild.js | 20 ++ lib/stageBuildFactory.js | 54 +++++ lib/stageFactory.js | 14 +- package.json | 6 +- test/data/parserWithStages.json | 103 ++++++++- test/data/workflowGraphWithStages.json | 47 +++++ test/lib/build.test.js | 21 +- test/lib/event.test.js | 57 +++++ test/lib/pipeline.test.js | 278 +++++++++++++++++++++++++ 14 files changed, 741 insertions(+), 22 deletions(-) create mode 100644 lib/stageBuild.js create mode 100644 lib/stageBuildFactory.js create mode 100644 test/data/workflowGraphWithStages.json diff --git a/index.js b/index.js index 76ab1a29..1c3fa510 100644 --- a/index.js +++ b/index.js @@ -11,6 +11,7 @@ const JobFactory = require('./lib/jobFactory'); const PipelineFactory = require('./lib/pipelineFactory'); const SecretFactory = require('./lib/secretFactory'); const StageFactory = require('./lib/stageFactory'); +const StageBuildFactory = require('./lib/stageBuildFactory'); const StepFactory = require('./lib/stepFactory'); const TemplateFactory = require('./lib/templateFactory'); const TemplateTagFactory = require('./lib/templateTagFactory'); @@ -35,6 +36,7 @@ module.exports = { PipelineFactory, SecretFactory, StageFactory, + StageBuildFactory, StepFactory, TemplateFactory, TemplateTagFactory, diff --git a/lib/event.js b/lib/event.js index 1e882276..a66589cb 100644 --- a/lib/event.js +++ b/lib/event.js @@ -15,8 +15,25 @@ class EventModel extends BaseModel { super('event', config); } + /** + * Return stage builds that belong to this event + * @method getStageBuilds + * @return {Promise} Resolves to an array of stage builds + */ + async getStageBuilds() { + // Lazy load factory dependency to prevent circular dependency issues + // https://nodejs.org/api/modules.html#modules_cycles + /* eslint-disable global-require */ + const StageBuildFactory = require('./stageBuildFactory'); + /* eslint-enable global-require */ + const stageBuildFactory = StageBuildFactory.getInstance(); + + return stageBuildFactory.list({ params: { eventId: this.id } }); + } + /** * Return builds that belong to this event + * @method getBuilds * @param {String} [config.startTime] Search for builds after this startTime * @param {String} [config.endTime] Search for builds before this endTime * @param {String} [config.sort] Ascending or descending diff --git a/lib/helper.js b/lib/helper.js index 604d58f8..930b6d18 100644 --- a/lib/helper.js +++ b/lib/helper.js @@ -10,6 +10,7 @@ const DEFAULT_KEY = 'default'; const EXECUTOR_ANNOTATION = 'screwdriver.cd/executor'; const EXECUTOR_ANNOTATION_BETA = 'beta.screwdriver.cd/executor'; const SCM_ORG_REGEX = /^([^/]+)\/.*/; +const STAGE_PREFIX = 'stage@'; /** * Get the value of the annotation that matches name @@ -398,6 +399,16 @@ async function getBookendKey({ buildClusterName, annotations, pipeline, provider }; } +/** + * Returns full stage name with correct formatting and setup or teardown suffix (e.g. stage@deploy:setup) + * @param {String} stageName Stage name + * @param {String} type Type of stage job, either 'setup' or 'teardown' + * @return {String} Full stage name + */ +function getFullStageJobName({ stageName, jobName }) { + return `${STAGE_PREFIX}${stageName}:${jobName}`; +} + module.exports = { getAnnotations, convertToBool, @@ -405,5 +416,6 @@ module.exports = { getAllRecords, getBuildClusterName, getToken, - getBookendKey + getBookendKey, + getFullStageJobName }; diff --git a/lib/pipeline.js b/lib/pipeline.js index 660a58f9..62c3f210 100644 --- a/lib/pipeline.js +++ b/lib/pipeline.js @@ -22,7 +22,7 @@ const MAX_EVENT_DELETE_COUNT = 100; const DEFAULT_PAGE = 1; const SCM_NO_ACCESS_STATUSES = [401, 404]; -const { getAllRecords, getBuildClusterName } = require('./helper'); +const { getAllRecords, getBuildClusterName, getFullStageJobName } = require('./helper'); const JOB_CHUNK_SIZE = process.env.JOBS_PARALLEL_COUNT || 5; const SD_API_URI = process.env.URI; @@ -418,6 +418,7 @@ class PipelineModel extends BaseModel { /** * archive closed PR jobs + * @method _archiveClosePRs * @param {Array} existingPrJobs List pipeline's existing pull request jobs (excludes already archived jobs for closed PRs) * @param {Array} openedPRs List of opened PRs coming from SCM * @param {Promise} @@ -852,6 +853,110 @@ class PipelineModel extends BaseModel { return Promise.allSettled([...toCreateOrUpdate, ...toDeactivate]); } + /** + * Converts the simplified stages into a more consistent format + * + * This is because the user can provide the stage information as: + * - {"name": { "jobs": ["job1", "job2", "job3"], "description": "Description" }, + * { "name2": { "jobs": ["job4", "job5"] } } + * + * We will convert it to a more standard format: + * - [{ "name": "name", "jobs": [1, 2, 3], "pipelineId": 123, "description": "value" }, + * { "name": "name2", "jobs": [4, 5], "pipelineId": 123 }] + * @method convertStages + * @param {Object} config config + * @param {Object} config.pipeline Pipeline + * @param {Object} config.stages Pipeline stages + * @return {Array} New array with stages after up-converting + */ + async _convertStages({ pipelineId, stages, pipelineJobs }) { + const newStages = []; + + // Convert stages from object to array of objects + Object.entries(stages).forEach(([key, value]) => { + const newStage = { + name: key, + pipelineId, + ...value + }; + + // Convert the jobNames to jobIds + newStage.jobIds = value.jobs.map(jobName => { + return pipelineJobs.find(j => j.name === jobName).id; + }); + + delete newStage.jobs; // extra field from yaml parser + + // Check for setup and teardown + const setupJobName = getFullStageJobName({ stageName: key, jobName: 'setup' }); + const teardownJobName = getFullStageJobName({ stageName: key, jobName: 'teardown' }); + + newStage.setup = pipelineJobs.find(j => j.name === setupJobName).id; + newStage.teardown = pipelineJobs.find(j => j.name === teardownJobName).id; + + newStages.push(newStage); + }); + + return newStages; + } + + /** + * Sync stages + * 1. Convert new stages into correct format, prepopulate with jobIds + * 2.a. Create stages if they are defined and were not already in the database + * 2.b. Update existing stages with the new configuration + * 2.c. Archive existing stages if they no longer exist in the configuration + * @method _createOrUpdateStages + * @param {Object} config config + * @param {Object} config.pipeline Pipeline + * @return {Promise} + */ + async _createOrUpdateStages({ parsedConfig, pipelineId, stageFactory, pipelineJobs }) { + // Get new stages + const stages = parsedConfig.stages || {}; + + // list stage names from this pipeline that already exist + const existingStages = await stageFactory.list({ params: { pipelineId } }); + const existingStageNames = existingStages.map(stage => stage.name); + // Format new stage data + const convertedStages = await this._convertStages({ pipelineId, stages, pipelineJobs }); + const convertedStageNames = convertedStages.map(stage => stage.name); + + const stagesToUpdate = convertedStages.filter(stage => existingStageNames.includes(stage.name)); + const stagesToCreate = convertedStages.filter(stage => !existingStageNames.includes(stage.name)); + const stagesToArchive = existingStages.filter(stage => !convertedStageNames.includes(stage.name)); + const processed = []; + + // Archive outdated stages + stagesToArchive.forEach(stage => { + const existingStage = existingStages.find(s => s.name === stage.name); + + existingStage.archived = true; + + logger.info(`Archiving stage:${JSON.stringify(stage)} for pipelineId:${pipelineId}.`); + processed.push(existingStage.update()); + }); + + // Update existing stages + stagesToUpdate.forEach(stage => { + const existingStage = existingStages.find(s => s.name === stage.name); + + Object.assign(existingStage, stage); + existingStage.archived = false; + + logger.info(`Updating stage:${JSON.stringify(stage)} for pipelineId:${pipelineId}.`); + processed.push(existingStage.update()); + }); + + // Create new stages + stagesToCreate.forEach(stage => { + logger.info(`Creating stage:${JSON.stringify(stage)} for pipelineId:${pipelineId}.`); + processed.push(stageFactory.create(stage)); + }); + + return Promise.all(processed); + } + /** * Sync the pipeline by looking up screwdriver.yaml * Create, update, or disable jobs if necessary. @@ -867,7 +972,14 @@ class PipelineModel extends BaseModel { /* eslint-disable global-require */ const JobFactory = require('./jobFactory'); /* eslint-enable global-require */ - const factory = JobFactory.getInstance(); + const jobFactory = JobFactory.getInstance(); + + // Lazy load factory dependency to prevent circular dependency issues + // https://nodejs.org/api/modules.html#modules_cycles + /* eslint-disable global-require */ + const StageFactory = require('./stageFactory'); + /* eslint-enable global-require */ + const stageFactory = StageFactory.getInstance(); // get the pipeline configuration const parsedConfig = await this.getConfiguration({ ref }); @@ -970,7 +1082,7 @@ class PipelineModel extends BaseModel { // If the job has not been processed, create it (new jobs) if (!jobsProcessed.includes(jobName)) { - updatedJobs.push(await factory.create(jobConfig)); + updatedJobs.push(await jobFactory.create(jobConfig)); await syncExternalTriggers({ pipelineId, @@ -982,6 +1094,14 @@ class PipelineModel extends BaseModel { ); } + // Sync stages + await this._createOrUpdateStages({ + parsedConfig, + pipelineId, + stageFactory, + pipelineJobs: updatedJobs + }); + const { nodes } = this.workflowGraph; // Add jobId to workflowGraph.nodes diff --git a/lib/stage.js b/lib/stage.js index 50a0b270..f1c6a79a 100644 --- a/lib/stage.js +++ b/lib/stage.js @@ -9,9 +9,11 @@ class StageModel extends BaseModel { * @param {Object} config * @param {Object} config.datastore Object that will perform operations on the datastore * @param {String} [config.description] Stage description - * @param {Array} [config.jobIds=[]] Job Ids that belong to this stage + * @param {Array} [config.jobIds=[]] Job IDs that belong to this stage * @param {String} config.name Name of the stage * @param {Number} config.pipelineId Pipeline the stage belongs to + * @param {Array} [config.setup] Setup job IDs + * @param {Array} [config.teardown] Teardown job IDs */ constructor(config) { super('stage', config); diff --git a/lib/stageBuild.js b/lib/stageBuild.js new file mode 100644 index 00000000..930b1f30 --- /dev/null +++ b/lib/stageBuild.js @@ -0,0 +1,20 @@ +'use strict'; + +const BaseModel = require('./base'); + +class StageBuildModel extends BaseModel { + /** + * Construct a StageBuildModel object + * @method constructor + * @param {Object} config + * @param {Object} config.datastore Object that will perform operations on the datastore + * @param {Number} config.eventId Event ID + * @param {Number} config.stageId Stage ID + * @param {String} config.status Stage build status + */ + constructor(config) { + super('stageBuild', config); + } +} + +module.exports = StageBuildModel; diff --git a/lib/stageBuildFactory.js b/lib/stageBuildFactory.js new file mode 100644 index 00000000..c4bbd316 --- /dev/null +++ b/lib/stageBuildFactory.js @@ -0,0 +1,54 @@ +'use strict'; + +const BaseFactory = require('./baseFactory'); +const StageBuild = require('./stageBuild'); +let instance; + +class StageBuildFactory extends BaseFactory { + /** + * Construct a StageBuildFactory object + * @method constructor + * @param {Object} config + * @param {Datastore} config.datastore Object that will perform operations on the datastore + */ + constructor(config) { + super('stageBuild', config); + } + + /** + * Instantiate a StageBuild class + * @method createClass + * @param {Object} config StageBuild data + * @return {StageBuild} + */ + createClass(config) { + return new StageBuild(config); + } + + /** + * Create a StageBuild model + * @param {Object} config + * @param {Number} config.eventId Event ID + * @param {Number} config.stageId Stage ID + * @param {Object} config.workflowGraph Stage workflowGraph + * @memberof StageBuildFactory + */ + create(config) { + return super.create(config); + } + + /** + * Get an instance of the StageBuildFactory + * @method getInstance + * @param {Object} config + * @param {Datastore} config.datastore + * @return {StageBuildFactory} + */ + static getInstance(config) { + instance = BaseFactory.getInstance(StageBuildFactory, instance, config); + + return instance; + } +} + +module.exports = StageBuildFactory; diff --git a/lib/stageFactory.js b/lib/stageFactory.js index 92d4a204..5285bc29 100644 --- a/lib/stageFactory.js +++ b/lib/stageFactory.js @@ -27,14 +27,18 @@ class StageFactory extends BaseFactory { /** * Create a Stage model - * @param {Object} config - * @param {String} [config.description] Stage description - * @param {Array} [config.jobIds=[]] Job Ids that belong to this stage - * @param {String} config.name Name of the stage - * @param {String} config.pipelineId Pipeline the stage belongs to + * @param {Object} config + * @param {String} [config.description] Stage description + * @param {Array} [config.jobIds=[]] Job IDs that belong to this stage + * @param {String} config.name Name of the stage + * @param {Number} config.pipelineId Pipeline the stage belongs to + * @param {Array} [config.setup] Setup job IDs + * @param {Array} [config.teardown] Teardown job IDs * @memberof StageFactory */ create(config) { + config.archived = false; + return super.create(config); } diff --git a/package.json b/package.json index 4fc62ca2..ce78c97e 100644 --- a/package.json +++ b/package.json @@ -57,9 +57,9 @@ "docker-parse-image": "^3.0.1", "js-yaml": "^4.1.0", "lodash": "^4.17.21", - "screwdriver-config-parser": "^8.0.0", - "screwdriver-data-schema": "^22.6.1", + "screwdriver-config-parser": "^8.0.3", + "screwdriver-data-schema": "^22.9.7", "screwdriver-logger": "^2.0.0", - "screwdriver-workflow-parser": "^4.0.0" + "screwdriver-workflow-parser": "^4.1.0" } } diff --git a/test/data/parserWithStages.json b/test/data/parserWithStages.json index f47520d6..7c5a058a 100644 --- a/test/data/parserWithStages.json +++ b/test/data/parserWithStages.json @@ -17,7 +17,7 @@ "NODE_ENV": "test", "NODE_VERSION": "4" }, - "requires": ["~pr", "~commit", "~sd@12345:test"] + "requires": ["stage@canary:setup"] }, { "image": "node:5", @@ -35,7 +35,7 @@ "NODE_ENV": "test", "NODE_VERSION": "5" }, - "requires": ["~pr", "~commit", "~sd@12345:test"] + "requires": ["stage@canary:setup"] }, { "image": "node:6", @@ -53,7 +53,7 @@ "NODE_ENV": "test", "NODE_VERSION": "6" }, - "requires": ["~pr", "~commit", "~sd@12345:test"] + "requires": ["stage@canary:setup"] } ], "publish": [ @@ -79,6 +79,77 @@ }, "requires": ["main"] } + ], + "A": [ + { + "image": "node:4", + "commands": [ + { + "name": "echo", + "command": "echo hi" + } + ], + "requires": ["stage@deploy:setup"] + } + ], + "B": [ + { + "image": "node:4", + "commands": [ + { + "name": "echo", + "command": "echo bye" + } + ], + "requires": ["A"] + } + ], + "stage@canary:setup": [ + { + "image": "node:4", + "commands": [ + { + "name": "announce", + "command": "post banner" + } + ], + "requires": ["~pr", "~commit", "~sd@12345:test"] + } + ], + "stage@canary:teardown": [ + { + "image": "node:4", + "commands": [ + { + "name": "publish", + "command": "publish blog" + } + ], + "requires": ["publish"] + } + ], + "stage@deploy:setup": [ + { + "image": "node:4", + "commands": [ + { + "name": "announce", + "command": "post banner" + } + ] + } + ], + "stage@deploy:teardown": [ + { + "image": "node:4", + "commands": [ + { + "name": "publish", + "command": "publish blog" + } + ], + "requires": ["B"] + } ] }, "workflow": [], @@ -86,13 +157,24 @@ "nodes": [ { "name": "~pr" }, { "name": "~commit" }, + { "name": "stage@canary:setup" }, { "name": "main" }, - { "name": "publish" } + { "name": "publish" }, + { "name": "stage@canary:teardown" }, + { "name": "stage@deploy:setup" }, + { "name": "A" }, + { "name": "B" }, + { "name": "stage@deploy:teardown" } ], "edges": [ - { "src": "~pr", "dest": "main" }, - { "src": "~commit", "dest": "main" }, - { "src": "main", "dest": "publish" } + { "src": "~pr", "dest": "stage@canary:setup" }, + { "src": "~commit", "dest": "stage@canary:setup" }, + { "src": "stage@canary:setup", "dest": "main" }, + { "src": "main", "dest": "publish" }, + { "src": "publish", "dest": "stage@canary:teardown" }, + { "src": "stage@deploy:setup", "dest": "A" }, + { "src": "A", "dest": "B" }, + { "src": "B", "dest": "stage@deploy:teardown" } ] }, "annotations": { @@ -101,7 +183,12 @@ "stages": { "canary": { "description": "Canary deployment", - "jobs": ["main", "publish"] + "jobs": ["main", "publish"], + "requires": ["~pr", "~commit", "~sd@12345:test"] + }, + "deploy": { + "description": "Prod deployment", + "jobs": ["A", "B"] } } } diff --git a/test/data/workflowGraphWithStages.json b/test/data/workflowGraphWithStages.json new file mode 100644 index 00000000..5396d981 --- /dev/null +++ b/test/data/workflowGraphWithStages.json @@ -0,0 +1,47 @@ +{ + "nodes": [ + { "name": "~pr" }, + { "name": "~commit" }, + { "name": "stage@alpha:setup", "stageName": "alpha" }, + { "name": "alpha-deploy", "stageName": "alpha" }, + { "name": "alpha-test", "stageName": "alpha" }, + { "name": "alpha-certify", "stageName": "alpha" }, + { "name": "stage@alpha:teardown", "stageName": "alpha" }, + { "name": "stage@beta:setup", "stageName": "beta" }, + { "name": "beta-deploy", "stageName": "beta" }, + { "name": "beta-test", "stageName": "beta" }, + { "name": "beta-certify", "stageName": "beta" }, + { "name": "stage@beta:teardown", "stageName": "beta" }, + { "name": "stage@gamma:setup", "stageName": "gamma" }, + { "name": "triggering-a-stage" }, + { "name": "gamma-deploy", "stageName": "gamma" }, + { "name": "gamma-test-integration", "stageName": "gamma" }, + { "name": "gamma-test-functional", "stageName": "gamma" }, + { "name": "gamma-certify", "stageName": "gamma" }, + { "name": "stage@gamma:teardown", "stageName": "gamma" }, + { "name": "triggered-by-a-stage-job" }, + { "name": "triggered-after-a-stage" } + ], + "edges": [ + { "src": "~commit", "dest": "stage@alpha:setup" }, + { "src": "stage@alpha:setup", "dest": "alpha-deploy" }, + { "src": "alpha-deploy", "dest": "alpha-test" }, + { "src": "alpha-test", "dest": "alpha-certify" }, + { "src": "alpha-certify", "dest": "stage@alpha:teardown" }, + { "src": "stage@alpha:teardown", "dest": "stage@beta:setup" }, + { "src": "stage@beta:setup", "dest": "beta-deploy" }, + { "src": "beta-deploy", "dest": "beta-test" }, + { "src": "beta-test", "dest": "beta-certify" }, + { "src": "beta-certify", "dest": "stage@beta:teardown" }, + { "src": "triggering-a-stage", "dest": "stage@gamma:setup" }, + { "src": "stage@gamma:setup", "dest": "gamma-deploy" }, + { "src": "gamma-deploy", "dest": "gamma-test-integration" }, + { "src": "gamma-deploy", "dest": "gamma-test-functional" }, + { "src": "gamma-test-integration", "dest": "gamma-certify", "join": true }, + { "src": "gamma-test-functional", "dest": "gamma-certify", "join": true }, + { "src": "gamma-certify", "dest": "stage@gamma:teardown" }, + { "src": "~commit", "dest": "triggering-a-stage" }, + { "src": "gamma-test-integration", "dest": "triggered-by-a-stage-job" }, + { "src": "stage@gamma:teardown", "dest": "triggered-after-a-stage" } + ] +} diff --git a/test/lib/build.test.js b/test/lib/build.test.js index 07a03dda..3c0fec21 100644 --- a/test/lib/build.test.js +++ b/test/lib/build.test.js @@ -8,6 +8,8 @@ const { SCM_STATE_MAP } = require('screwdriver-data-schema').plugins.scm; sinon.assert.expose(assert, { prefix: '' }); +const WORKFLOWGRAPH_WITH_STAGES = require('../data/workflowGraphWithStages.json'); + describe('Build Model', () => { const annotations = {}; const freezeWindows = ['* * ? * 1', '0-59 0-23 * 1 ?']; @@ -57,6 +59,8 @@ describe('Build Model', () => { let userFactoryMock; let jobFactoryMock; let pipelineFactoryMock; + let stageFactoryMock; + let stageBuildFactoryMock; let stepFactoryMock; let templateFactoryMock; let scmMock; @@ -100,6 +104,12 @@ describe('Build Model', () => { pipelineFactoryMock = { get: sinon.stub().resolves(null) }; + stageFactoryMock = { + get: sinon.stub().resolves([]) + }; + stageBuildFactoryMock = { + get: sinon.stub().resolves({}) + }; stepFactoryMock = { list: sinon.stub().resolves([]), removeSteps: sinon.stub().resolves([]) @@ -113,7 +123,8 @@ describe('Build Model', () => { scmUri, scmContext, admin: Promise.resolve(adminUser), - token: Promise.resolve('foo') + token: Promise.resolve('foo'), + workflowGraph: WORKFLOWGRAPH_WITH_STAGES }; jobMock = { id: jobId, @@ -146,6 +157,12 @@ describe('Build Model', () => { const sF = { getInstance: sinon.stub().returns(stepFactoryMock) }; + const stageF = { + getInstance: sinon.stub().returns(stageFactoryMock) + }; + const stageBuildF = { + getInstance: sinon.stub().returns(stageBuildFactoryMock) + }; const tF = { getInstance: sinon.stub().returns(templateFactoryMock) }; @@ -154,6 +171,8 @@ describe('Build Model', () => { mockery.registerMock('./userFactory', uF); mockery.registerMock('./jobFactory', jF); mockery.registerMock('./stepFactory', sF); + mockery.registerMock('./stageFactory', stageF); + mockery.registerMock('./stageBuildFactory', stageBuildF); mockery.registerMock('./templateFactory', tF); mockery.registerMock('screwdriver-hashr', hashaMock); diff --git a/test/lib/event.test.js b/test/lib/event.test.js index 396cfd89..dbf4f737 100644 --- a/test/lib/event.test.js +++ b/test/lib/event.test.js @@ -6,14 +6,19 @@ const sinon = require('sinon'); const schema = require('screwdriver-data-schema'); sinon.assert.expose(assert, { prefix: '' }); +const WORKFLOWGRAPH_WITH_STAGES = require('../data/workflowGraphWithStages.json'); describe('Event Model', () => { let buildFactoryMock; + let stageFactoryMock; + let stageBuildFactoryMock; let EventModel; let datastore; let event; let BaseModel; let createConfig; + let mockStages; + let mockStageBuild; before(() => { mockery.enable({ @@ -23,15 +28,44 @@ describe('Event Model', () => { }); beforeEach(() => { + mockStages = [ + { + id: 555, + pipelineId: 123345, + name: 'deploy', + jobIds: [1, 2, 3, 4], + description: 'Deploys canary jobs', + setup: [222], + teardown: [333] + } + ]; + mockStageBuild = { + id: 8888, + stageId: 555 + }; datastore = {}; buildFactoryMock = { list: sinon.stub().resolves(null) }; + stageFactoryMock = { + list: sinon.stub().resolves(mockStages) + }; + stageBuildFactoryMock = { + list: sinon.stub().resolves([mockStageBuild]) + }; mockery.registerMock('./buildFactory', { getInstance: sinon.stub().returns(buildFactoryMock) }); + mockery.registerMock('./stageFactory', { + getInstance: sinon.stub().returns(stageFactoryMock) + }); + + mockery.registerMock('./stageBuildFactory', { + getInstance: sinon.stub().returns(stageBuildFactoryMock) + }); + // eslint-disable-next-line global-require EventModel = require('../../lib/event'); @@ -40,6 +74,8 @@ describe('Event Model', () => { createConfig = { id: 1234, + pipelineId: 12345, + workflowGraph: WORKFLOWGRAPH_WITH_STAGES, datastore }; event = new EventModel(createConfig); @@ -63,6 +99,27 @@ describe('Event Model', () => { }); }); + describe('getStageBuilds', () => { + it('resolves with stage builds', () => { + const expectedStageBuildConfig = { + params: { + eventId: 1234 + } + }; + const expectedStageBuilds = [ + { + id: 8888, + stageId: 555 + } + ]; + + return event.getStageBuilds().then(result => { + assert.calledWith(stageBuildFactoryMock.list, expectedStageBuildConfig); + assert.deepEqual(result, expectedStageBuilds); + }); + }); + }); + describe('getBuilds', () => { it('use the default config when not passed in', () => { const expected = { diff --git a/test/lib/pipeline.test.js b/test/lib/pipeline.test.js index 28dcefbc..acd5a185 100644 --- a/test/lib/pipeline.test.js +++ b/test/lib/pipeline.test.js @@ -17,6 +17,7 @@ const SHARED_PROVIDER_YAML = '../data/sharedProvider.yaml'; const PROVIDER_YAML = '../data/provider.yaml'; const PARSED_YAML_WITH_PROVIDER = require('../data/parserWithProvider.json'); const PARSED_YAML = require('../data/parser.json'); +const PARSED_YAML_WITH_STAGES = require('../data/parserWithStages.json'); const PARSED_YAML_WITH_REQUIRES = require('../data/parserWithRequires.json'); const PARSED_YAML_PR = require('../data/parserWithWorkflowGraphPR.json'); const PARSED_YAML_WITH_ERRORS = require('../data/parserWithErrors.json'); @@ -75,6 +76,7 @@ describe('Pipeline Model', () => { let configPipelineMock; let childPipelineMock; let buildClusterFactory; + let stageFactoryMock; const dateNow = 1111111111; const scmUri = 'github.com:12345:master'; @@ -124,6 +126,8 @@ describe('Pipeline Model', () => { scmOrganizations: ['screwdriver'] }; + let stageMocks; + const decorateJobMock = job => { const decorated = hoek.clone(job); @@ -143,6 +147,24 @@ describe('Pipeline Model', () => { return decorateJobMock(j); }; + const decorateStageMock = stage => { + const decorated = hoek.clone(stage); + + sinon.stub(decorated, 'update').callsFake(async () => { + return decorated; + }); + + return decorated; + }; + + const getStageMocks = s => { + if (Array.isArray(s)) { + return s.map(decorateStageMock); + } + + return decorateStageMock(s); + }; + before(() => { mockery.enable({ useCleanCache: true, @@ -290,6 +312,28 @@ describe('Pipeline Model', () => { getReadOnlyInfo: sinon.stub().returns({}) }; parserMock = sinon.stub(); + + stageMocks = getStageMocks([ + { + id: 555, + name: 'outdated', + pipelineId: 123, + description: 'Old stage', + jobIds: [1, 2], + archived: false, + update() {} + }, + { + id: 8888, + name: 'canary', + pipelineId: 123, + description: 'Canary deployment', + jobIds: [3, 4], + archived: false, + update() {} + } + ]); + pipelineFactoryMock.getExternalJoinFlag.returns(false); pipelineFactoryMock.getNotificationsValidationErrFlag.returns(true); @@ -297,6 +341,11 @@ describe('Pipeline Model', () => { list: sinon.stub().resolves([]), get: sinon.stub().resolves(externalBuildCluster) }; + stageFactoryMock = { + get: sinon.stub().resolves({ id: 8888, name: 'canary' }), + list: sinon.stub().resolves(stageMocks), + create: sinon.stub().resolves({ id: 8889, name: 'deploy' }) + }; buildClusterFactory = { getInstance: sinon.stub().returns(buildClusterFactoryMock) }; @@ -334,6 +383,9 @@ describe('Pipeline Model', () => { getInstance: sinon.stub().returns(tokenFactoryMock) }); mockery.registerMock('./buildClusterFactory', buildClusterFactory); + mockery.registerMock('./stageFactory', { + getInstance: sinon.stub().returns(stageFactoryMock) + }); // eslint-disable-next-line global-require PipelineModel = require('../../lib/pipeline'); @@ -469,10 +521,22 @@ describe('Pipeline Model', () => { describe('sync', () => { let publishMock; let mainMock; + let aMock; + let bMock; let externalMock; let mainModelMock; let publishModelMock; + let setupModelMock; + let teardownModelMock; + let aModelMock; + let bModelMock; + let setupModelMock2; + let teardownModelMock2; let externalModelMock; + let stageSetupMock; + let stageTeardownMock; + let stageSetupMock2; + let stageTeardownMock2; let parserConfig; beforeEach(() => { @@ -564,6 +628,48 @@ describe('Pipeline Model', () => { name: 'publish', state: 'ENABLED' }; + setupModelMock = { + isPR: sinon.stub().returns(false), + update: sinon.stub(), + id: 5, + name: 'stage@canary:setup', + state: 'ENABLED' + }; + teardownModelMock = { + isPR: sinon.stub().returns(false), + update: sinon.stub(), + id: 6, + name: 'stage@canary:teardown', + state: 'ENABLED' + }; + aModelMock = { + isPR: sinon.stub().returns(false), + update: sinon.stub(), + id: 3, + name: 'A', + state: 'ENABLED' + }; + bModelMock = { + isPR: sinon.stub().returns(false), + update: sinon.stub(), + id: 4, + name: 'B', + state: 'ENABLED' + }; + setupModelMock2 = { + isPR: sinon.stub().returns(false), + update: sinon.stub(), + id: 7, + name: 'stage@deploy:setup', + state: 'ENABLED' + }; + teardownModelMock2 = { + isPR: sinon.stub().returns(false), + update: sinon.stub(), + id: 8, + name: 'stage@deploy:teardown', + state: 'ENABLED' + }; externalModelMock = { isPR: sinon.stub().returns(false), update: sinon.stub(), @@ -617,6 +723,28 @@ describe('Pipeline Model', () => { } ] }; + aMock = { + pipelineId: testId, + name: 'a', + permutations: [ + { + commands: [{ command: 'echo hi', name: 'echo' }], + environment: { NODE_ENV: 'test', NODE_VERSION: '4' }, + image: 'node:4' + } + ] + }; + bMock = { + pipelineId: testId, + name: 'b', + permutations: [ + { + commands: [{ command: 'echo bye', name: 'echo' }], + environment: { NODE_ENV: 'test', NODE_VERSION: '4' }, + image: 'node:4' + } + ] + }; externalMock = { pipelineId: testId, name: 'main', @@ -632,6 +760,46 @@ describe('Pipeline Model', () => { } ] }; + stageSetupMock = { + pipelineId: testId, + name: 'stage@canary:setup', + permutations: [ + { + commands: [{ name: 'announce', command: 'post banner' }], + image: 'node:4' + } + ] + }; + stageTeardownMock = { + pipelineId: testId, + name: 'stage@canary:teardown', + permutations: [ + { + commands: [{ name: 'publish', command: 'publish blog' }], + image: 'node:4' + } + ] + }; + stageSetupMock2 = { + pipelineId: testId, + name: 'stage@deploy:setup', + permutations: [ + { + commands: [{ name: 'announce', command: 'post banner' }], + image: 'node:4' + } + ] + }; + stageTeardownMock2 = { + pipelineId: testId, + name: 'stage@deploy:teardown', + permutations: [ + { + commands: [{ name: 'publish', command: 'publish blog' }], + image: 'node:4' + } + ] + }; }); it('create external trigger in datastore for new jobs', () => { @@ -728,6 +896,116 @@ describe('Pipeline Model', () => { }); }); + it('stores workflowGraph to pipeline with stage', () => { + mainMock.permutations[0].requires = ['~pr', '~commit', '~sd@12345:test', 'stage@canary:setup']; + mainMock.permutations[1].requires = ['~pr', '~commit', '~sd@12345:test', 'stage@canary:setup']; + mainMock.permutations[2].requires = ['~pr', '~commit', '~sd@12345:test', 'stage@canary:setup']; + aMock.permutations[0].requires = ['stage@deploy:setup']; + mainModelMock.update.resolves(mainModelMock); + publishModelMock.update.resolves(publishModelMock); + setupModelMock.update.resolves(setupModelMock); + teardownModelMock.update.resolves(teardownModelMock); + aModelMock.update.resolves(aModelMock); + bModelMock.update.resolves(bModelMock); + setupModelMock2.update.resolves(setupModelMock2); + teardownModelMock2.update.resolves(teardownModelMock2); + publishMock.permutations[0].requires = ['main']; + parserMock.withArgs(parserConfig).resolves(PARSED_YAML_WITH_STAGES); + jobs = [ + mainModelMock, + publishModelMock, + aModelMock, + bModelMock, + setupModelMock, + teardownModelMock, + setupModelMock2, + teardownModelMock2 + ]; + sinon.spy(pipeline, 'update'); + jobFactoryMock.list.resolves(jobs); + jobFactoryMock.create.withArgs(mainMock).resolves(mainModelMock); + jobFactoryMock.create.withArgs(publishMock).resolves(publishModelMock); + jobFactoryMock.create.withArgs(stageSetupMock).resolves({ + isPR: sinon.stub().returns(false), + update: sinon.stub(), + id: 50, + name: 'stage@canary:setup', + state: 'ENABLED' + }); + jobFactoryMock.create.withArgs(stageTeardownMock).resolves({ + isPR: sinon.stub().returns(false), + update: sinon.stub(), + id: 51, + name: 'stage@canary:teardown', + state: 'ENABLED' + }); + jobFactoryMock.create.withArgs(aMock).resolves(aModelMock); + jobFactoryMock.create.withArgs(bMock).resolves(bModelMock); + jobFactoryMock.create.withArgs(stageSetupMock2).resolves({ + isPR: sinon.stub().returns(false), + update: sinon.stub(), + id: 50, + name: 'stage@deploy:setup', + state: 'ENABLED' + }); + jobFactoryMock.create.withArgs(stageTeardownMock2).resolves({ + isPR: sinon.stub().returns(false), + update: sinon.stub(), + id: 51, + name: 'stage@deploy:teardown', + state: 'ENABLED' + }); + pipelineFactoryMock.get.resolves({ + id: testId, + update: sinon.stub().resolves(null), + remove: sinon.stub().resolves(null), + workflowGraph: { + nodes: [ + { name: '~pr' }, + { name: '~commit' }, + { name: 'main', id: 3 }, + { name: 'stage@canary', stageId: 30 } + ] + } + }); + + return pipeline.sync().then(() => { + assert.calledOnce(pipeline.update); + assert.deepEqual(pipeline.workflowGraph, PARSED_YAML_WITH_STAGES.workflowGraph); + assert.calledWith(stageFactoryMock.create, { + name: 'deploy', + pipelineId: 123, + description: 'Prod deployment', + jobIds: [3, 4], + setup: 7, + teardown: 8 + }); + assert.calledOnce(stageMocks[0].update); + assert.deepEqual(stageMocks[0], { + id: 555, + name: 'outdated', + pipelineId: 123, + description: 'Old stage', + jobIds: [1, 2], + archived: true, + update: stageMocks[0].update + }); + assert.calledOnce(stageMocks[1].update); + assert.deepEqual(stageMocks[1], { + id: 8888, + name: 'canary', + pipelineId: 123, + description: 'Canary deployment', + jobIds: [1, 2], + setup: 5, + teardown: 6, + archived: false, + requires: ['~pr', '~commit', '~sd@12345:test'], + update: stageMocks[1].update + }); + }); + }); + it('adds subscribed pipelines from config to the model', () => { jobs = []; sinon.spy(pipeline, 'update');