diff --git a/src/sbvr-api/permissions.ts b/src/sbvr-api/permissions.ts index 63afaf893..fbf0943dd 100644 --- a/src/sbvr-api/permissions.ts +++ b/src/sbvr-api/permissions.ts @@ -1514,7 +1514,7 @@ export const resolveApiKey = async ( tx?: Tx, ): Promise => { const apiKey = - req.params[paramName] ?? req.body[paramName] ?? req.query[paramName]; + req.params?.[paramName] ?? req.body?.[paramName] ?? req.query?.[paramName]; if (apiKey == null) { return; } diff --git a/src/sbvr-api/sbvr-utils.ts b/src/sbvr-api/sbvr-utils.ts index 83ff6ac51..1607e3d91 100644 --- a/src/sbvr-api/sbvr-utils.ts +++ b/src/sbvr-api/sbvr-utils.ts @@ -101,6 +101,8 @@ import { setExecutedMigrations, } from '../migrator/utils'; +const validBatchMethods = new Set(['PUT', 'POST', 'PATCH', 'DELETE', 'GET']); + const LF2AbstractSQLTranslator = LF2AbstractSQL.createTranslator(sbvrTypes); const LF2AbstractSQLTranslatorVersion = `${LF2AbstractSQLVersion}+${sbvrTypesVersion}`; @@ -140,6 +142,7 @@ export interface ApiKey extends Actor { } export interface Response { + id?: string; statusCode: number; headers?: | { @@ -1143,6 +1146,7 @@ const $getAffectedIds = async ({ const parsedRequest: uriParser.ParsedODataRequest & Partial> = await uriParser.parseOData({ + id: request.batchRequestId, method: request.method, url: `/${request.vocabulary}${request.url}`, }); @@ -1192,11 +1196,103 @@ export const getModel = (vocabulary: string) => { return models[vocabulary]; }; +const validateBatch = (req: Express.Request) => { + const { requests } = req.body as { requests: uriParser.UnparsedRequest[] }; + if (!Array.isArray(requests)) { + throw new BadRequestError( + 'Batch requests must include an array of requests in the body via the "requests" property', + ); + } + if (req.headers != null && req.headers['content-type'] == null) { + throw new BadRequestError( + 'Headers in a batch request must include a "content-type" header if they are provided', + ); + } + if ( + requests.find( + (request) => + request.headers?.authorization != null || + request.url?.includes('apikey='), + ) != null + ) { + throw new BadRequestError( + 'Authorization may only be passed to the main batch request', + ); + } + const ids = new Set( + requests + .map((request) => request.id) + .filter((id) => typeof id === 'string') as string[], + ); + if (ids.size !== requests.length) { + throw new BadRequestError( + 'All requests in a batch request must have unique string ids', + ); + } + + for (const request of requests) { + if ( + request.headers != null && + request.headers['content-type'] == null && + (req.headers == null || req.headers['content-type'] == null) + ) { + throw new BadRequestError( + 'Requests of a batch request that have headers must include a "content-type" header', + ); + } + if (request.method == null) { + throw new BadRequestError( + 'Requests of a batch request must have a "method"', + ); + } + const upperCaseMethod = request.method.toUpperCase(); + if (!validBatchMethods.has(upperCaseMethod)) { + throw new BadRequestError( + `Requests of a batch request must have a method matching one of the following: ${Array.from( + validBatchMethods, + ).join(', ')}`, + ); + } + if ( + request.body !== undefined && + (upperCaseMethod === 'GET' || upperCaseMethod === 'DELETE') + ) { + throw new BadRequestError( + 'GET and DELETE requests of a batch request must not have a body', + ); + } + } + + const urls = new Set( + requests.map((request) => request.url), + ); + if (urls.has(undefined)) { + throw new BadRequestError('Requests of a batch request must have a "url"'); + } + const containsBatch = + Array.from(urls).filter((url) => !!url?.includes('/$batch')).length > 0; + if (containsBatch) { + throw new BadRequestError('Batch requests cannot contain batch requests'); + } + const urlModels = new Set( + Array.from(urls.values()).map((url: string) => url.split('/')[1]), + ); + if (urlModels.size > 1) { + throw new BadRequestError( + 'Batch requests must consist of requests for only one model', + ); + } +}; + const runODataRequest = (req: Express.Request, vocabulary: string) => { if (env.DEBUG) { api[vocabulary].logger.log('Parsing', req.method, req.url); } + if (req.url.startsWith(`/${vocabulary}/$batch`)) { + validateBatch(req); + } + // Get the hooks for the current method/vocabulary as we know it, // in order to run PREPARSE hooks, before parsing gets us more info const { versions } = models[vocabulary]; @@ -1244,11 +1340,20 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { await runHooks('PREPARSE', reqHooks, { req, tx: req.tx }); let requests: uriParser.UnparsedRequest[]; // Check if it is a single request or a batch - if (req.batch != null && req.batch.length > 0) { - requests = req.batch; + if (req.url.startsWith(`/${vocabulary}/$batch`)) { + await Promise.all( + req.body.requests.map( + async (request: HookReq) => + await runHooks('PREPARSE', reqHooks, { + req: request, + tx: req.tx, + }), + ), + ); + requests = req.body.requests; } else { const { method, url, body } = req; - requests = [{ method, url, data: body }]; + requests = [{ method, url, body }]; } const prepareRequest = async ( @@ -1312,7 +1417,13 @@ const runODataRequest = (req: Express.Request, vocabulary: string) => { // Parse the OData requests const results = await mappingFn(requests, async (requestPart) => { - const parsedRequest = await uriParser.parseOData(requestPart); + const parsedRequest = await uriParser.parseOData( + requestPart, + req.url.startsWith(`/${vocabulary}/$batch`) && + !requestPart.url.includes(`/${vocabulary}/$batch`) + ? req.headers + : undefined, + ); let request: uriParser.ODataRequest | uriParser.ODataRequest[]; if (Array.isArray(parsedRequest)) { @@ -1392,7 +1503,10 @@ export const handleODataRequest: Express.Handler = async (req, res, next) => { res.set('Cache-Control', 'no-cache'); // If we are dealing with a single request unpack the response and respond normally - if (req.batch == null || req.batch.length === 0) { + if ( + !req.url.startsWith(`/${apiRoot}/$batch`) || + req.body.requests?.length === 0 + ) { let [response] = responses; if (response instanceof HttpError) { response = httpErrorToResponse(response); @@ -1401,15 +1515,15 @@ export const handleODataRequest: Express.Handler = async (req, res, next) => { // Otherwise its a multipart request and we reply with the appropriate multipart response } else { - (res.status(200) as any).sendMulti( - responses.map((response) => { + res.status(200).json({ + responses: responses.map((response) => { if (response instanceof HttpError) { return httpErrorToResponse(response); } else { return response; } }), - ); + }); } } catch (e: any) { if (handleHttpErrors(req, res, e)) { @@ -1436,7 +1550,7 @@ export const handleHttpErrors = ( for (const handleErrorFn of handleErrorFns) { handleErrorFn(req, err); } - const response = httpErrorToResponse(err); + const response = httpErrorToResponse(err, req); handleResponse(res, response); return true; } @@ -1455,10 +1569,12 @@ const handleResponse = (res: Express.Response, response: Response): void => { const httpErrorToResponse = ( err: HttpError, + req?: Express.Request, ): RequiredField => { + const message = err.getResponseBody(); return { statusCode: err.status, - body: err.getResponseBody(), + body: req != null && 'batch' in req ? { responses: [], message } : message, headers: err.headers, }; }; @@ -1572,7 +1688,8 @@ const runChangeSet = throw new Error('No request id'); } result.headers ??= {}; - result.headers['content-id'] = request.id; + result.headers['content-id'] = request.batchRequestId; + result.id = request.batchRequestId; changeSetResults.set(request.id, result); }; @@ -1611,22 +1728,32 @@ const prepareResponse = async ( result: any, tx: Db.Tx, ): Promise => { + let response: Response; switch (request.method) { case 'GET': - return await respondGet(req, request, result, tx); + response = await respondGet(req, request, result, tx); + break; case 'POST': - return await respondPost(req, request, result, tx); + response = await respondPost(req, request, result, tx); + break; case 'PUT': case 'PATCH': case 'MERGE': - return await respondPut(req, request, result, tx); + response = await respondPut(req, request, result, tx); + break; case 'DELETE': - return await respondDelete(req, request, result, tx); + response = await respondDelete(req, request, result, tx); + break; case 'OPTIONS': - return await respondOptions(req, request, result, tx); + response = await respondOptions(req, request, result, tx); + break; default: throw new MethodNotAllowedError(); } + if (request.batchRequestId != null) { + response['id'] = request.batchRequestId; + } + return response; }; const checkReadOnlyRequests = (request: uriParser.ODataRequest) => { diff --git a/src/sbvr-api/uri-parser.ts b/src/sbvr-api/uri-parser.ts index 253258e74..0d62cec92 100644 --- a/src/sbvr-api/uri-parser.ts +++ b/src/sbvr-api/uri-parser.ts @@ -25,19 +25,22 @@ import { TranslationError, } from './errors'; import * as sbvrUtils from './sbvr-utils'; +import { IncomingHttpHeaders } from 'http'; export type OdataBinds = ODataBinds; export interface UnparsedRequest { + id?: string; method: string; url: string; - data?: any; - headers?: { [header: string]: string }; + body?: any; + headers?: IncomingHttpHeaders; changeSet?: UnparsedRequest[]; _isChangeSet?: boolean; } export interface ParsedODataRequest { + headers?: IncomingHttpHeaders; method: SupportedMethod; url: string; vocabulary: string; @@ -48,6 +51,7 @@ export interface ParsedODataRequest { odataBinds: OdataBinds; custom: AnyObject; id?: number | undefined; + batchRequestId?: string; _defer?: boolean; } export interface ODataRequest extends ParsedODataRequest { @@ -263,15 +267,19 @@ export const metadataEndpoints = ['$metadata', '$serviceroot']; export async function parseOData( b: UnparsedRequest & { _isChangeSet?: false }, + headers?: IncomingHttpHeaders, ): Promise; export async function parseOData( b: UnparsedRequest & { _isChangeSet: true }, + headers?: IncomingHttpHeaders, ): Promise; export async function parseOData( b: UnparsedRequest, + headers?: IncomingHttpHeaders, ): Promise; export async function parseOData( b: UnparsedRequest, + batchHeaders?: IncomingHttpHeaders, ): Promise { try { if (b._isChangeSet && b.changeSet != null) { @@ -292,12 +300,14 @@ export async function parseOData( const odata = memoizedParseOdata(url); return { + batchRequestId: b.id, + headers: { ...batchHeaders, ...b.headers }, method: b.method as SupportedMethod, url, vocabulary: apiRoot, resourceName: odata.tree.resource, originalResourceName: odata.tree.resource, - values: b.data ?? {}, + values: b.body ?? {}, odataQuery: odata.tree, odataBinds: odata.binds, custom: {}, @@ -362,7 +372,7 @@ const parseODataChangeset = ( originalResourceName: odata.tree.resource, odataBinds: odata.binds, odataQuery: odata.tree, - values: b.data ?? {}, + values: b.body ?? {}, custom: {}, id: contentId, _defer: defer, @@ -379,7 +389,7 @@ const splitApiRoot = (url: string) => { }; const mustExtractHeader = ( - body: { headers?: { [header: string]: string } }, + body: { headers?: IncomingHttpHeaders }, header: string, ) => { const h: any = body.headers?.[header]?.[0]; diff --git a/test/07-batch.test.ts b/test/07-batch.test.ts new file mode 100644 index 000000000..5d3d5695b --- /dev/null +++ b/test/07-batch.test.ts @@ -0,0 +1,515 @@ +const configPath = __dirname + '/fixtures/07-batch/config'; +const hooksPath = __dirname + '/fixtures/07-batch/translations/hooks'; +import { testInit, testDeInit, testLocalServer } from './lib/test-init'; +import { faker } from '@faker-js/faker'; +import { expect } from 'chai'; +import * as supertest from 'supertest'; + +const validBatchMethods = ['PUT', 'POST', 'PATCH', 'DELETE', 'GET']; + +const validBatchPostRequest = { + id: '1', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, +}; + +// TODO: figure out how to not persist the results across describes +describe('07 batch tests', function () { + let pineServer: Awaited>; + before(async () => { + pineServer = await testInit({ + configPath, + hooksPath, + deleteDb: true, + }); + // setup faker so that test date uniqueness is set for all test cases + faker.seed(); + }); + + after(async () => { + testDeInit(pineServer); + }); + + describe('test non-atomic batch requests', () => { + it('should create two students', async () => { + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100000, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100001, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(200); + const res = await supertest(testLocalServer) + .get('/university/student') + .expect(200); + expect(res.body) + .to.be.an('object') + .that.has.ownProperty('d') + .to.be.an('array') + .of.length(2); + }); + + it('successful request should have `responses` in its body', async () => { + const id = Math.random().toString(); + const res = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id, + method: 'GET', + url: '/university/student', + }, + ], + }) + .expect(200); + expect(res.body) + .to.be.an('object') + .that.has.ownProperty('responses') + .to.be.an('array') + .of.length(1); + expect(res.body.responses[0].body) + .to.be.an('object') + .that.has.ownProperty('d') + .to.be.an('array') + .of.length(2); + }); + + it("ids of responses in a successful request should match the original requests' ids", async () => { + const id = Math.random().toString(); + const id2 = id + 'test'; + const res = await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id, + method: 'GET', + url: '/university/student', + }, + { + id: id2, + method: 'GET', + url: '/university/student', + }, + ], + }) + .expect(200); + expect(res.body.responses[0].id).to.equal(id); + expect(res.body.responses[1].id).to.equal(id2); + }); + + it('should fail if the body does not have a valid "requests" property', async () => { + await supertest(testLocalServer) + .post('/university/$batch') + .send({}) + .expect( + 400, + '"Batch requests must include an array of requests in the body via the \\"requests\\" property"', + ); + await supertest(testLocalServer) + .post('/university/$batch') + .send({ requests: 'test' }) + .expect( + 400, + '"Batch requests must include an array of requests in the body via the \\"requests\\" property"', + ); + }); + + it('should not complete following requests if an earlier request fails', async () => { + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: null, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect(200); + const res = await supertest(testLocalServer) + .get('/university/student') + .expect(200); + expect(res.body) + .to.be.an('object') + .that.has.ownProperty('d') + .to.be.an('array') + .of.length(2); + }); + + it('should fail if any request does not have a string id', async () => { + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect( + 400, + '"All requests in a batch request must have unique string ids"', + ); + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: 0, + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: 'hello', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect( + 400, + '"All requests in a batch request must have unique string ids"', + ); + }); + + it('should fail if not all requests have a unique id', async () => { + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect( + 400, + '"All requests in a batch request must have unique string ids"', + ); + }); + + it('should fail if any of the requests is a batch request', async () => { + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/$batch', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + validBatchPostRequest, + ], + }) + .expect(400, '"Batch requests cannot contain batch requests"'); + }); + + it('should fail if any of the requests does not have a url property', async () => { + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + validBatchPostRequest, + ], + }) + .expect(400, '"Requests of a batch request must have a \\"url\\""'); + }); + + it('should fail if any of the requests does not have a valid value for method', async () => { + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + validBatchPostRequest, + ], + }) + .expect(400, '"Requests of a batch request must have a \\"method\\""'); + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'MERGE', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + validBatchPostRequest, + ], + }) + .expect( + 400, + `"Requests of a batch request must have a method matching one of the following: ${validBatchMethods.join( + ', ', + )}"`, + ); + }); + + it('should fail if any of the requests have method GET or DELETE and have a body', async () => { + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'GET', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect( + 400, + '"GET and DELETE requests of a batch request must not have a body"', + ); + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/university/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + { + id: '1', + method: 'DELETE', + url: '/university/student', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'bar', + }, + }, + ], + }) + .expect( + 400, + '"GET and DELETE requests of a batch request must not have a body"', + ); + }); + + it('should fail if trying to query cross-model in one batch', async () => { + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/v1/student', + body: { + matrix_number: 100003, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + validBatchPostRequest, + ], + }) + .expect( + 400, + '"Batch requests must consist of requests for only one model"', + ); + }); + + it('Should error if any authorization is passed in a request in the requests array', async () => { + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/v1/student?apikey=some_key', + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + ], + }) + .expect( + 400, + '"Authorization may only be passed to the main batch request"', + ); + + await supertest(testLocalServer) + .post('/university/$batch') + .send({ + requests: [ + { + id: '0', + method: 'POST', + url: '/v1/student', + headers: { authorization: 'Bearer test' }, + body: { + matrix_number: 100004, + name: faker.name.firstName(), + last_name: faker.name.lastName(), + studies_at__campus: 'foo', + }, + }, + ], + }) + .expect( + 400, + '"Authorization may only be passed to the main batch request"', + ); + }); + }); +}); diff --git a/test/fixtures/07-batch/config.ts b/test/fixtures/07-batch/config.ts new file mode 100644 index 000000000..0218d533d --- /dev/null +++ b/test/fixtures/07-batch/config.ts @@ -0,0 +1,34 @@ +import { getAbstractSqlModelFromFile } from '../../../src/bin/utils'; +import type { ConfigLoader } from '../../../src/server-glue/module'; + +const apiRoot = 'university'; +const modelName = 'university'; +const modelFile = __dirname + '/university.sbvr'; + +import { v1AbstractSqlModel, v1Translations } from './translations/v1'; + +export const abstractSql = getAbstractSqlModelFromFile(modelFile); + +export default { + models: [ + { + modelName, + abstractSql, + apiRoot, + }, + { + apiRoot: 'v1', + modelName: 'v1', + abstractSql: v1AbstractSqlModel, + translateTo: 'university', + translations: v1Translations, + }, + ], + users: [ + { + username: 'guest', + password: ' ', + permissions: ['resource.all'], + }, + ], +} as ConfigLoader.Config; diff --git a/test/fixtures/07-batch/translations/hooks.ts b/test/fixtures/07-batch/translations/hooks.ts new file mode 100644 index 000000000..cc91e7ed6 --- /dev/null +++ b/test/fixtures/07-batch/translations/hooks.ts @@ -0,0 +1 @@ +import('./v1/hooks'); diff --git a/test/fixtures/07-batch/translations/v1/hooks.ts b/test/fixtures/07-batch/translations/v1/hooks.ts new file mode 100644 index 000000000..4d78f1b0e --- /dev/null +++ b/test/fixtures/07-batch/translations/v1/hooks.ts @@ -0,0 +1,16 @@ +import { sbvrUtils } from '../../../../../src/server-glue/module'; + +const addHook = ( + methods: Array[0]>, + resource: string, + hook: sbvrUtils.Hooks, +) => { + methods.map((method) => sbvrUtils.addPureHook(method, 'v1', resource, hook)); +}; + +addHook(['PUT', 'POST', 'PATCH'], 'student', { + async POSTPARSE({ request }) { + request.values.last_name = request.values.lastname; + delete request.values.lastname; + }, +}); diff --git a/test/fixtures/07-batch/translations/v1/index.ts b/test/fixtures/07-batch/translations/v1/index.ts new file mode 100644 index 000000000..fa5eaf622 --- /dev/null +++ b/test/fixtures/07-batch/translations/v1/index.ts @@ -0,0 +1,16 @@ +import { ConfigLoader } from '../../../../../src/server-glue/module'; +import { getAbstractSqlModelFromFile } from '../../../../../src/bin/utils'; + +export const toVersion = 'university'; + +export const v1AbstractSqlModel = getAbstractSqlModelFromFile( + __dirname + '/university.sbvr', +); + +v1AbstractSqlModel.relationships['version'] = { v1: {} }; + +export const v1Translations: ConfigLoader.Model['translations'] = { + student: { + lastname: 'last name', + }, +}; diff --git a/test/fixtures/07-batch/translations/v1/university.sbvr b/test/fixtures/07-batch/translations/v1/university.sbvr new file mode 100644 index 000000000..625b24773 --- /dev/null +++ b/test/fixtures/07-batch/translations/v1/university.sbvr @@ -0,0 +1,28 @@ +Vocabulary: university + +Term: name + Concept Type: Short Text (Type) + +Term: lastname + Concept Type: Short Text (Type) + +Term: matrix number + Concept Type: Integer (Type) + +Term: campus + Concept Type: Short Text (Type) + +Term: student + +Fact Type: student has matrix number + Necessity: each student has exactly one matrix number + Necessity: each matrix number is of exactly one student + +Fact Type: student has name + Necessity: each student has exactly one name + +Fact Type: student has lastname + Necessity: each student has exactly one lastname + +Fact Type: student studies at campus + Necessity: each student studies at exactly one campus \ No newline at end of file diff --git a/test/fixtures/07-batch/university.sbvr b/test/fixtures/07-batch/university.sbvr new file mode 100644 index 000000000..eeeda27d1 --- /dev/null +++ b/test/fixtures/07-batch/university.sbvr @@ -0,0 +1,28 @@ +Vocabulary: university + +Term: name + Concept Type: Short Text (Type) + +Term: last name + Concept Type: Short Text (Type) + +Term: matrix number + Concept Type: Integer (Type) + +Term: campus + Concept Type: Short Text (Type) + +Term: student + +Fact Type: student has matrix number + Necessity: each student has exactly one matrix number + Necessity: each matrix number is of exactly one student + +Fact Type: student has name + Necessity: each student has exactly one name + +Fact Type: student has last name + Necessity: each student has exactly one last name + +Fact Type: student studies at campus + Necessity: each student studies at exactly one campus \ No newline at end of file