diff --git a/Dockerfile b/Dockerfile index 14d2a837..c94e42d8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,7 +11,7 @@ ENV NODE_ENV=production WORKDIR /app/ COPY package*.json ./ RUN npm ci --ignore-scripts -COPY --from=build /app/build /app/build +COPY --from=build /app/build/src /app/build/src COPY --from=build /app/shacl /app/shacl COPY --from=build /app/assets /app/assets USER node diff --git a/jest.config.js b/jest.config.js index 297035df..a7128383 100644 --- a/jest.config.js +++ b/jest.config.js @@ -2,6 +2,7 @@ export default { preset: 'ts-jest/presets/default-esm', extensionsToTreatAsEsm: ['.ts'], testTimeout: 10000, + testPathIgnorePatterns: ['build/test'], collectCoverage: true, collectCoverageFrom: [ '**/src/**/*.ts', // Include files that are not covered by tests. @@ -10,8 +11,8 @@ export default { coverageReporters: ['json-summary', 'text'], coverageThreshold: { global: { - lines: 69.73, - statements: 69.97, + lines: 69.31, + statements: 69.55, branches: 60.52, functions: 67.01, }, diff --git a/package-lock.json b/package-lock.json index 0fcdc84c..c6861588 100644 --- a/package-lock.json +++ b/package-lock.json @@ -45,7 +45,7 @@ "@types/rdf-dataset-ext": "^1.0.1", "@types/rdf-ext": "^2.0.1", "chokidar": "^4.0.0", - "gts": "^5.2.0", + "gts": "^6.0.0", "husky": "^9.0.11", "jest": "^29.5.0", "jest-coverage-thresholds-bumper": "^1.1.0", @@ -5240,9 +5240,9 @@ } }, "node_modules/@eslint/js": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz", - "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", + "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } @@ -5397,11 +5397,12 @@ } }, "node_modules/@humanwhocodes/config-array": { - "version": "0.11.14", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", - "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==", + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", + "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==", + "deprecated": "Use @eslint/config-array instead", "dependencies": { - "@humanwhocodes/object-schema": "^2.0.2", + "@humanwhocodes/object-schema": "^2.0.3", "debug": "^4.3.1", "minimatch": "^3.0.5" }, @@ -5444,7 +5445,8 @@ "node_modules/@humanwhocodes/object-schema": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", - "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==" + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "deprecated": "Use @eslint/object-schema instead" }, "node_modules/@isaacs/cliui": { "version": "8.0.2", @@ -8819,15 +8821,16 @@ } }, "node_modules/eslint": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", - "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", + "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", + "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.4", - "@eslint/js": "8.57.0", - "@humanwhocodes/config-array": "^0.11.14", + "@eslint/js": "8.57.1", + "@humanwhocodes/config-array": "^0.13.0", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "@ungap/structured-clone": "^1.2.0", @@ -8986,13 +8989,13 @@ } }, "node_modules/eslint-plugin-prettier": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.1.3.tgz", - "integrity": "sha512-C9GCVAs4Eq7ZC/XFQHITLiHJxQngdtraXaM+LoUFoFp/lHNl2Zn8f3WQbe9HvTBBQ9YnKFB0/2Ajdqwo5D1EAw==", + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.2.1.tgz", + "integrity": "sha512-gH3iR3g4JfF+yYPaJYkN7jEl9QbweL/YfkoRlNnuIEHEz1vHVlCmWOS+eGGiRuzHQXdJFCOTxRgvju9b8VUmrw==", "dev": true, "dependencies": { "prettier-linter-helpers": "^1.0.0", - "synckit": "^0.8.6" + "synckit": "^0.9.1" }, "engines": { "node": "^14.18.0 || >=16.0.0" @@ -10025,24 +10028,24 @@ } }, "node_modules/gts": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/gts/-/gts-5.3.1.tgz", - "integrity": "sha512-P9F+krJkGOkisUX+P9pfUas1Xy+U+CxBFZT62uInkJbgvZpnW1ug/pIcMJJmLOthMq1J88lpQUGhXDC9UTvVcw==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/gts/-/gts-6.0.0.tgz", + "integrity": "sha512-wTZZYGwa67j88rEzW8nyshxHJKNk8ytGjYHu+Y1uC/B+oMTEqMNXikULzQfc5/BcmtVQ/FWurj84Q/WWS6Wd3g==", "dev": true, "dependencies": { "@typescript-eslint/eslint-plugin": "5.62.0", "@typescript-eslint/parser": "5.62.0", "chalk": "^4.1.2", - "eslint": "8.57.0", + "eslint": "8.57.1", "eslint-config-prettier": "9.1.0", "eslint-plugin-n": "15.7.0", - "eslint-plugin-prettier": "5.1.3", + "eslint-plugin-prettier": "5.2.1", "execa": "^5.0.0", "inquirer": "^7.3.3", "json5": "^2.1.3", "meow": "^9.0.0", "ncp": "^2.0.0", - "prettier": "3.2.5", + "prettier": "3.3.3", "rimraf": "3.0.2", "write-file-atomic": "^4.0.0" }, @@ -10050,10 +10053,10 @@ "gts": "build/src/cli.js" }, "engines": { - "node": ">=14" + "node": ">=18" }, "peerDependencies": { - "typescript": ">=3" + "typescript": ">=5.6.3" } }, "node_modules/hard-rejection": { @@ -12472,9 +12475,9 @@ } }, "node_modules/prettier": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.5.tgz", - "integrity": "sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz", + "integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==", "dev": true, "bin": { "prettier": "bin/prettier.cjs" @@ -14720,9 +14723,9 @@ } }, "node_modules/synckit": { - "version": "0.8.8", - "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.8.8.tgz", - "integrity": "sha512-HwOKAP7Wc5aRGYdKH+dw0PRRpbO841v2DENBtjnR5HFWoiNByAl7vrx3p0G/rCyYXQsrxqtX48TImFtPcIHSpQ==", + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.9.2.tgz", + "integrity": "sha512-vrozgXDQwYO72vHjUb/HnFbQx1exDjoKzqx23aXEg2a9VIg2TSFZ8FmeZpTjUCFMYw7mpX4BE2SFu8wI7asYsw==", "dev": true, "dependencies": { "@pkgr/core": "^0.1.0", @@ -14736,9 +14739,9 @@ } }, "node_modules/synckit/node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==", + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz", + "integrity": "sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA==", "dev": true }, "node_modules/test-exclude": { diff --git a/package.json b/package.json index 34fc9a9f..aea640c5 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,7 @@ "node": ">=16" }, "scripts": { - "start": "node --experimental-specifier-resolution=node build/main.js", + "start": "node --experimental-specifier-resolution=node build/src/main.js", "dev": "tsc-watch --onSuccess 'npm run start'", "test": "NODE_OPTIONS='--experimental-vm-modules --no-experimental-fetch' jest ${1}", "lint": "gts lint", @@ -23,7 +23,7 @@ "author": "", "license": "ISC", "files": [ - "build/", + "build/src", "shacl/" ], "dependencies": { @@ -63,7 +63,7 @@ "@types/rdf-dataset-ext": "^1.0.1", "@types/rdf-ext": "^2.0.1", "chokidar": "^4.0.0", - "gts": "^5.2.0", + "gts": "^6.0.0", "husky": "^9.0.11", "jest": "^29.5.0", "jest-coverage-thresholds-bumper": "^1.1.0", diff --git a/src/crawler.ts b/src/crawler.ts index 753c5db4..dbf1a5d3 100644 --- a/src/crawler.ts +++ b/src/crawler.ts @@ -12,7 +12,7 @@ export class Crawler { private datasetStore: DatasetStore, private ratingStore: RatingStore, private validator: Validator, - private logger: Pino.Logger + private logger: Pino.Logger, ) {} /** @@ -47,7 +47,7 @@ export class Crawler { if (e instanceof HttpError) { statusCode = e.statusCode; this.logger.info( - `${registration.url} returned HTTP error ${statusCode}` + `${registration.url} returned HTTP error ${statusCode}`, ); } @@ -65,7 +65,7 @@ export class Crawler { const updatedRegistration = registration.read( datasetIris, statusCode, - isValid + isValid, ); await this.registrationStore.store(updatedRegistration); } diff --git a/src/dataset.ts b/src/dataset.ts index 6dabfbde..96246403 100644 --- a/src/dataset.ts +++ b/src/dataset.ts @@ -23,7 +23,7 @@ export function extractIri(dataset: DatasetCore): URL { ...dataset.match( null, DataFactory.namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), - datasetType + datasetType, ), ][0]; return new URL(quad.subject.value); @@ -31,7 +31,7 @@ export function extractIri(dataset: DatasetCore): URL { export async function load( stream: Readable, - contentType: 'application/ld+json' | string + contentType: 'application/ld+json' | string, ) { const parser = contentType === 'application/ld+json' @@ -45,8 +45,8 @@ export async function load( stream .pipe(parser) .on('error', error => reject(error)) - .pipe(new StandardizeSchemaOrgPrefixToHttps()) + .pipe(new StandardizeSchemaOrgPrefixToHttps()), ) - .then(data => resolve(data)) + .then(data => resolve(data)), ); } diff --git a/src/fetch.ts b/src/fetch.ts index 00f27032..cbaea054 100644 --- a/src/fetch.ts +++ b/src/fetch.ts @@ -10,7 +10,7 @@ import {rdfDereferencer} from 'rdf-dereference'; export class HttpError extends Error { constructor( message: string, - public readonly statusCode: number + public readonly statusCode: number, ) { super(message); } @@ -39,7 +39,7 @@ export async function dereference(url: URL): Promise { const stream = pipeline( data, new StandardizeSchemaOrgPrefixToHttps(), - () => {} // Noop because errors are caught below. + () => {}, // Noop because errors are caught below. ); return await factory.dataset().import(stream); } catch (e) { diff --git a/src/graphdb.ts b/src/graphdb.ts index 9c71f043..b721b1b3 100644 --- a/src/graphdb.ts +++ b/src/graphdb.ts @@ -36,7 +36,7 @@ export class GraphDbClient { constructor( private url: string, - private repository: string + private repository: string, ) { // Doesn't work with authentication: see https://github.com/Ontotext-AD/graphdb.js/issues/123 // const config = new graphdb.repository.RepositoryClientConfig() @@ -60,7 +60,7 @@ export class GraphDbClient { 'Could not authenticate username ' + this.username + ' with GraphDB; got status code ' + - response.status + response.status, ); } @@ -105,7 +105,7 @@ export class GraphDbClient { ' for ' + options.method + ' ' + - repositoryUrl + repositoryUrl, ); } @@ -132,7 +132,7 @@ export class GraphDbClient { if (!response.ok) { console.error( - `${response.status} response for SPARQL update ${payload})` + `${response.status} response for SPARQL update ${payload})`, ); } } @@ -163,39 +163,39 @@ export class GraphDbRegistrationStore implements RegistrationStore { factory.namedNode('http://schema.org/datePosted'), factory.literal( registration.datePosted.toISOString(), - factory.namedNode('http://www.w3.org/2001/XMLSchema#dateTime') - ) + factory.namedNode('http://www.w3.org/2001/XMLSchema#dateTime'), + ), ), this.registrationQuad( registration, factory.namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), - factory.namedNode('http://schema.org/EntryPoint') + factory.namedNode('http://schema.org/EntryPoint'), ), this.registrationQuad( registration, factory.namedNode('http://schema.org/encoding'), - factory.namedNode('http://schema.org') // Currently the only vocabulary that we support. + factory.namedNode('http://schema.org'), // Currently the only vocabulary that we support. ), ...registration.datasets.flatMap(datasetIri => { const datasetQuads = [ this.registrationQuad( registration, factory.namedNode('http://schema.org/about'), - factory.namedNode(datasetIri.toString()) + factory.namedNode(datasetIri.toString()), ), factory.quad( factory.namedNode(datasetIri.toString()), factory.namedNode( - 'http://www.w3.org/1999/02/22-rdf-syntax-ns#type' + 'http://www.w3.org/1999/02/22-rdf-syntax-ns#type', ), factory.namedNode('http://schema.org/Dataset'), - factory.namedNode(this.registrationsGraph) + factory.namedNode(this.registrationsGraph), ), factory.quad( factory.namedNode(datasetIri.toString()), factory.namedNode('http://schema.org/subjectOf'), factory.namedNode(registration.url.toString()), - factory.namedNode(this.registrationsGraph) + factory.namedNode(this.registrationsGraph), ), ]; if (registration.dateRead !== undefined) { @@ -205,10 +205,10 @@ export class GraphDbRegistrationStore implements RegistrationStore { factory.namedNode('http://schema.org/dateRead'), factory.literal( registration.dateRead.toISOString(), - factory.namedNode('http://www.w3.org/2001/XMLSchema#dateTime') + factory.namedNode('http://www.w3.org/2001/XMLSchema#dateTime'), ), - factory.namedNode(this.registrationsGraph) - ) + factory.namedNode(this.registrationsGraph), + ), ); } return datasetQuads; @@ -221,9 +221,9 @@ export class GraphDbRegistrationStore implements RegistrationStore { factory.namedNode('http://schema.org/dateRead'), factory.literal( registration.dateRead.toISOString(), - factory.namedNode('http://www.w3.org/2001/XMLSchema#dateTime') - ) - ) + factory.namedNode('http://www.w3.org/2001/XMLSchema#dateTime'), + ), + ), ); } @@ -234,9 +234,9 @@ export class GraphDbRegistrationStore implements RegistrationStore { factory.namedNode('http://schema.org/status'), factory.literal( registration.statusCode.toString(), - factory.namedNode('http://www.w3.org/2001/XMLSchema#integer') - ) - ) + factory.namedNode('http://www.w3.org/2001/XMLSchema#integer'), + ), + ), ); } @@ -247,9 +247,9 @@ export class GraphDbRegistrationStore implements RegistrationStore { factory.namedNode('http://schema.org/validUntil'), factory.literal( registration.validUntil.toISOString(), - factory.namedNode('http://www.w3.org/2001/XMLSchema#dateTime') - ) - ) + factory.namedNode('http://www.w3.org/2001/XMLSchema#dateTime'), + ), + ), ); } @@ -297,21 +297,21 @@ export class GraphDbRegistrationStore implements RegistrationStore { new Registration( new URL(binding.s.value), new Date(binding.datePosted.value), - binding.validUntil ? new Date(binding.validUntil.value) : undefined - ) + binding.validUntil ? new Date(binding.validUntil.value) : undefined, + ), ); } private registrationQuad = ( registration: Registration, predicate: Quad_Predicate, - object: Quad_Object + object: Quad_Object, ) => factory.quad( factory.namedNode(registration.url.toString()), predicate, object, - factory.namedNode(this.registrationsGraph) + factory.namedNode(this.registrationsGraph), ); } @@ -320,7 +320,7 @@ export class GraphDbAllowedRegistrationDomainStore { constructor( private readonly client: GraphDbClient, - private readonly allowedDomainNamesGraph = 'https://data.netwerkdigitaalerfgoed.nl/registry/allowed_domain_names' + private readonly allowedDomainNamesGraph = 'https://data.netwerkdigitaalerfgoed.nl/registry/allowed_domain_names', ) {} async contains(...domainNames: Array) { @@ -387,12 +387,12 @@ export class GraphDbDatasetStore implements DatasetStore { '/rdf-graphs/service?graph=' + encodeURIComponent(graphIri.toString()), body: result, - }) + }), ); } catch (e) { reject(e); } - } + }, ); }); } diff --git a/src/instrumentation.ts b/src/instrumentation.ts index 64250e64..014a59c8 100644 --- a/src/instrumentation.ts +++ b/src/instrumentation.ts @@ -10,17 +10,17 @@ import {DatasetStore} from './dataset.js'; export function startInstrumentation(datasetStore: DatasetStore) { datasetsCounter.addCallback(async result => - result.observe(await datasetStore.countDatasets()) + result.observe(await datasetStore.countDatasets()), ); organisationsCounter.addCallback(async result => - result.observe(await datasetStore.countOrganisations()) + result.observe(await datasetStore.countOrganisations()), ); } const meterProvider = new MeterProvider({ resource: Resource.default().merge( new Resource({ [SemanticResourceAttributes.SERVICE_NAME]: 'dataset-register', - }) + }), ), }); @@ -44,7 +44,7 @@ const organisationsCounter = meter.createObservableCounter( { description: 'Number of organisations', valueType: ValueType.INT, - } + }, ); export const registrationsCounter = meter.createCounter( @@ -52,7 +52,7 @@ export const registrationsCounter = meter.createCounter( { description: 'Number of times a dataset/catalog was submitted', valueType: ValueType.INT, - } + }, ); export const validationsCounter = meter.createCounter('validations.counter', { diff --git a/src/main.ts b/src/main.ts index cf979c56..0e8fea81 100644 --- a/src/main.ts +++ b/src/main.ts @@ -14,13 +14,13 @@ import {startInstrumentation} from './instrumentation.js'; const client = new GraphDbClient( process.env.GRAPHDB_URL || 'http://127.0.0.1:7200', - 'registry' + 'registry', ); -(async () => { +await (async () => { if (process.env.GRAPHDB_USERNAME && process.env.GRAPHDB_PASSWORD) { await client.authenticate( process.env.GRAPHDB_USERNAME, - process.env.GRAPHDB_PASSWORD + process.env.GRAPHDB_PASSWORD, ); } @@ -38,15 +38,15 @@ const client = new GraphDbClient( datasetStore, ratingStore, validator, - logger + logger, ); // Schedule crawler to check every hour for CRAWLER_INTERVAL that have expired their REGISTRATION_URL_TTL. const ttl = ((process.env.REGISTRATION_URL_TTL || 86400) as number) * 1000; if (process.env.CRAWLER_SCHEDULE !== undefined) { logger.info(`Crawler scheduled at ${process.env.CRAWLER_SCHEDULE}`); - scheduleJob(process.env.CRAWLER_SCHEDULE, () => { - crawler.crawl(new Date(Date.now() - ttl)); + scheduleJob(process.env.CRAWLER_SCHEDULE, async () => { + await crawler.crawl(new Date(Date.now() - ttl)); }); } @@ -62,7 +62,7 @@ const client = new GraphDbClient( { logger: process.env.LOG !== 'false', trustProxy: process.env.TRUST_PROXY === 'true', - } + }, ); await httpServer.listen({port: 3000, host: '0.0.0.0'}); } catch (err) { diff --git a/src/query.ts b/src/query.ts index 24685c94..9030b2d5 100644 --- a/src/query.ts +++ b/src/query.ts @@ -136,10 +136,10 @@ export const constructQuery = ` OPTIONAL { ?${distribution} dct:format ?${distributionFormat} } OPTIONAL { ?${distribution} dcat:mediaType ?${distributionMediaType} } OPTIONAL { ?${distribution} dct:issued ${convertToXsdDate( - distributionDatePublished + distributionDatePublished, )} } OPTIONAL { ?${distribution} dct:modified ${convertToXsdDate( - distributionDateModified + distributionDateModified, )} } OPTIONAL { ?${distribution} dct:description ?${distributionDescription} } OPTIONAL { ?${distribution} dct:language ?${distributionLanguage} } @@ -223,10 +223,10 @@ function schemaOrgQuery(prefix: string): string { OPTIONAL { ?${distribution} ${prefix}:fileFormat ?${distributionMediaType} } OPTIONAL { ?${distribution} ${prefix}:datePublished ${convertToXsdDate( - distributionDatePublished + distributionDatePublished, )} } OPTIONAL { ?${distribution} ${prefix}:dateModified ${convertToXsdDate( - distributionDateModified + distributionDateModified, )} } OPTIONAL { ?${distribution} ${prefix}:description ?${distributionDescription} } OPTIONAL { ?${distribution} ${prefix}:inLanguage ?${distributionLanguage} } @@ -239,13 +239,13 @@ function schemaOrgQuery(prefix: string): string { OPTIONAL { ?${dataset} ${prefix}:identifier ?${identifier} } OPTIONAL { ?${dataset} ${prefix}:alternateName ?${alternateName} } OPTIONAL { ?${dataset} ${prefix}:dateCreated ${convertToXsdDate( - dateCreated + dateCreated, )} } OPTIONAL { ?${dataset} ${prefix}:datePublished ${convertToXsdDate( - datePublished + datePublished, )} } OPTIONAL { ?${dataset} ${prefix}:dateModified ${convertToXsdDate( - dateModified + dateModified, )} } OPTIONAL { ?${dataset} ${prefix}:inLanguage ?${language} } OPTIONAL { ?${dataset} ${prefix}:isBasedOn ?${source} } diff --git a/src/rate.ts b/src/rate.ts index 59066051..9fae109a 100644 --- a/src/rate.ts +++ b/src/rate.ts @@ -16,7 +16,7 @@ const penalties = new Map([ const worstRating = [...penalties].reduce( (score, [, penalty]) => score - penalty, - 100 + 100, ); export function rate(validationResult: Valid): Rating { @@ -28,14 +28,14 @@ export function rate(validationResult: Valid): Rating { [ ...validationResult.errors.match( quad.subject, - shacl('resultMessage') + shacl('resultMessage'), ), - ][0]?.value + ][0]?.value, ); } return map; }, - new Map() + new Map(), ); const appliedPenalties = [...penalties].reduce( @@ -48,7 +48,7 @@ export function rate(validationResult: Valid): Rating { return [...appliedPenalties, new Penalty(properties[0].value, penalty)]; }, - new Array() + new Array(), ); return new Rating(appliedPenalties, worstRating); @@ -57,7 +57,7 @@ export function rate(validationResult: Valid): Rating { export class Penalty { public constructor( public readonly path: string, - public readonly score: number + public readonly score: number, ) {} } @@ -68,11 +68,11 @@ export class Rating { public constructor( readonly penalties: Penalty[], public readonly worstRating: number, - public readonly bestRating = 100 + public readonly bestRating = 100, ) { this.score = penalties.reduce( (score, penalty) => score - penalty.score, - 100 + 100, ); this.explanation = penalties.map(penalty => penalty.path).join(', '); } diff --git a/src/registration.ts b/src/registration.ts index b90af27a..17069ef3 100644 --- a/src/registration.ts +++ b/src/registration.ts @@ -11,7 +11,7 @@ export class Registration { /** * If the Registration has become invalid, the date at which it did so. */ - public readonly validUntil?: Date + public readonly validUntil?: Date, ) {} /** @@ -21,12 +21,12 @@ export class Registration { datasets: URL[], statusCode: number, valid: boolean, - date: Date = new Date() + date: Date = new Date(), ): Registration { const registration = new Registration( this.url, this.datePosted, - valid ? undefined : this.validUntil ?? date + valid ? undefined : (this.validUntil ?? date), ); registration._datasets = datasets; registration._statusCode = statusCode; diff --git a/src/server.ts b/src/server.ts index 975c74fc..929cceee 100644 --- a/src/server.ts +++ b/src/server.ts @@ -40,7 +40,7 @@ export async function server( validator: Validator, shacl: DatasetCore, docsUrl = '/', - options?: FastifyServerOptions + options?: FastifyServerOptions, ): Promise> { const server = fastify(options); @@ -91,27 +91,27 @@ export async function server( async function resolveDataset( url: URL, - reply: FastifyReply + reply: FastifyReply, ): Promise { try { return await dereference(url); } catch (e) { if (e instanceof HttpError) { reply.log.info( - `Error at URL ${url.toString()}: ${e.statusCode} ${e.message}` + `Error at URL ${url.toString()}: ${e.statusCode} ${e.message}`, ); if (e.statusCode === 404) { - reply.code(404).send(); + return reply.code(404).send(); } else { - reply.code(406).send(); + return reply.code(406).send(); } } if (e instanceof NoDatasetFoundAtUrl) { reply.log.info( - `No dataset found at URL ${url.toString()}: ${e.message}` + `No dataset found at URL ${url.toString()}: ${e.message}`, ); - reply.code(406).send(); + return reply.code(406).send(); } return null; @@ -123,13 +123,13 @@ export async function server( switch (validation.state) { case 'valid': - reply.send(validation.errors); + await reply.send(validation.errors); return true; case 'no-dataset': - reply.code(406).send(); + await reply.code(406).send(); return false; case 'invalid': { - reply.code(400).send(validation.errors); + await reply.code(400).send(validation.errors); return false; } } @@ -143,7 +143,7 @@ export async function server( return await allowedRegistrationDomainStore.contains( result.domain, - result.input + result.input, ); } @@ -153,13 +153,11 @@ export async function server( async (request, reply) => { const url = new URL((request.body as {'@id': string})['@id']); if (!(await domainIsAllowed(url))) { - reply.code(403).send(); - return; + return reply.code(403).send(); } - reply.code(202); // The validate function will reply.send() with any validation warnings. const dataset = await resolveDataset(url, reply); - const valid = dataset ? await validate(dataset, reply) : false; + const valid = dataset ? await validate(dataset, reply.code(202)) : false; if (dataset && valid) { // The URL has validated, so any problems with processing the dataset are now ours. Therefore, make sure to // store the registration so we can come back to that when crawling, even if fetching the datasets fails. @@ -175,7 +173,7 @@ export async function server( } request.log.info( - `Found ${datasetIris.length} datasets at ${url.toString()}` + `Found ${datasetIris.length} datasets at ${url.toString()}`, ); // Update registration with dataset descriptions that we found. @@ -189,7 +187,7 @@ export async function server( // If the dataset did not validate, the validate() function has set a 4xx status code. return reply; - } + }, ); server.put( @@ -207,11 +205,11 @@ export async function server( }); request.log.info( `Validated at ${Math.round( - process.memoryUsage().rss / 1024 / 1024 - )} MB memory` + process.memoryUsage().rss / 1024 / 1024, + )} MB memory`, ); return reply; - } + }, ); server.post( @@ -223,7 +221,7 @@ export async function server( status: reply.statusCode, }); return reply; - } + }, ); server.get( @@ -231,7 +229,7 @@ export async function server( {config: rdfSerializerConfig}, async (request, reply) => { return reply.send(shacl); - } + }, ); /** @@ -244,7 +242,7 @@ export async function server( try { return await load( request.raw, - request.headers['content-type'] ?? 'application/ld+json' + request.headers['content-type'] ?? 'application/ld+json', ); } catch (e) { (e as FastifyError).statusCode = 400; @@ -262,7 +260,7 @@ export async function server( resolve(JSON.parse(data)); }); }); - } + }, ); return server; diff --git a/src/transform.ts b/src/transform.ts index d443c865..e741ade4 100644 --- a/src/transform.ts +++ b/src/transform.ts @@ -16,7 +16,7 @@ export class StandardizeSchemaOrgPrefixToHttps extends Transform { _transform( chunk: Quad, encoding: BufferEncoding, - callback: TransformCallback + callback: TransformCallback, ) { const quad = factory.quad( chunk.subject, @@ -24,14 +24,14 @@ export class StandardizeSchemaOrgPrefixToHttps extends Transform { ? this.replace(chunk.predicate as NamedNode) : chunk.predicate, this.object(chunk.object), - chunk.graph + chunk.graph, ); callback(null, quad); } replace(node: NamedNode) { return factory.namedNode( - node.value.replace('http://schema.org', 'https://schema.org') + node.value.replace('http://schema.org', 'https://schema.org'), ); } @@ -39,7 +39,7 @@ export class StandardizeSchemaOrgPrefixToHttps extends Transform { if (object.termType === 'Literal') { return factory.literal( object.value, - object.language || this.replace(object.datatype as NamedNode) + object.language || this.replace(object.datatype as NamedNode), ); } diff --git a/src/validator.ts b/src/validator.ts index 4fa00d84..dfbf8cd8 100644 --- a/src/validator.ts +++ b/src/validator.ts @@ -29,7 +29,7 @@ export class ShaclValidator implements Validator { quad.predicate.value === 'http://www.w3.org/1999/02/22-rdf-syntax-ns#type' && (quad.object.value === 'https://schema.org/Dataset' || - quad.object.value === 'http://www.w3.org/ns/dcat#Dataset') + quad.object.value === 'http://www.w3.org/ns/dcat#Dataset'), ); if (datasetIris.size === 0) { return {state: 'no-dataset'}; @@ -82,7 +82,7 @@ const hasViolation = (report: ValidationReport) => report.results.some(result => resultIsViolation(result)); const resultIsViolation = ( - result: ValidationReport.ValidationResult + result: ValidationReport.ValidationResult, ): boolean => { return ( result.severity?.value === shacl('Violation').value || diff --git a/test/crawler.test.ts b/test/crawler.test.ts index 3203ea10..6bab8062 100644 --- a/test/crawler.test.ts +++ b/test/crawler.test.ts @@ -17,7 +17,7 @@ let registrationStore: MockRegistrationStore; let crawler: Crawler; const validator = ( isValid: boolean, - errors: DatasetCore = factory.dataset() + errors: DatasetCore = factory.dataset(), ): Validator => ({ validate: () => Promise.resolve({ @@ -34,12 +34,12 @@ describe('Crawler', () => { new MockDatasetStore(), new MockRatingStore(), validator(true), - Pino({enabled: false}) + Pino({enabled: false}), ); }); it('crawls valid URLs', async () => { - storeRegistrationFixture(new URL('https://example.com/valid')); + await storeRegistrationFixture(new URL('https://example.com/valid')); const response = await file('dataset-schema-org-valid.jsonld'); nock('https://example.com') @@ -57,7 +57,7 @@ describe('Crawler', () => { }); it('crawls valid URL with minimal description', async () => { - storeRegistrationFixture(new URL('https://example.com/minimal')); + await storeRegistrationFixture(new URL('https://example.com/minimal')); const response = await file('dataset-schema-org-valid-minimal.jsonld'); nock('https://example.com') @@ -75,7 +75,9 @@ describe('Crawler', () => { }); it('stores error HTTP response status code', async () => { - storeRegistrationFixture(new URL('https://example.com/registered-url')); + await storeRegistrationFixture( + new URL('https://example.com/registered-url'), + ); nock('https://example.com').get('/registered-url').reply(404); await crawler.crawl(new Date('3000-01-01')); @@ -86,7 +88,9 @@ describe('Crawler', () => { }); it('ignores datasets no longer available', async () => { - storeRegistrationFixture(new URL('https://example.com/no-more-datasets')); + await storeRegistrationFixture( + new URL('https://example.com/no-more-datasets'), + ); nock('https://example.com') .get('/no-more-datasets') @@ -104,9 +108,9 @@ describe('Crawler', () => { new MockDatasetStore(), new MockRatingStore(), validator(false), - Pino({enabled: false}) + Pino({enabled: false}), ); - storeRegistrationFixture(new URL('https://example.com/invalid')); + await storeRegistrationFixture(new URL('https://example.com/invalid')); nock('https://example.com') .defaultReplyHeaders({'Content-Type': 'application/ld+json'}) @@ -119,13 +123,13 @@ describe('Crawler', () => { }); }); -function storeRegistrationFixture(url: URL) { +async function storeRegistrationFixture(url: URL) { const registration = new Registration(url, new Date()); const updatedRegistration = registration.read( [new URL('https://example.com/dataset1')], 200, true, - new Date('2000-01-01') + new Date('2000-01-01'), ); - registrationStore.store(updatedRegistration); + await registrationStore.store(updatedRegistration); } diff --git a/test/fetch.test.ts b/test/fetch.test.ts index 0abfffbf..adaebbf0 100644 --- a/test/fetch.test.ts +++ b/test/fetch.test.ts @@ -19,16 +19,16 @@ describe('Fetch', () => { .reply(200, response); const datasets = await fetchDatasetsAsArray( - new URL('https://example.com/valid-dcat-dataset') + new URL('https://example.com/valid-dcat-dataset'), ); expect(datasets).toHaveLength(1); const datasetUri = factory.namedNode( - 'http://data.bibliotheken.nl/id/dataset/rise-alba' + 'http://data.bibliotheken.nl/id/dataset/rise-alba', ); const dataset = datasets[0]; expect( - dataset.has(factory.quad(datasetUri, rdf('type'), dcat('Dataset'))) + dataset.has(factory.quad(datasetUri, rdf('type'), dcat('Dataset'))), ).toBe(true); const distributions = [ @@ -42,9 +42,9 @@ describe('Fetch', () => { factory.quad( distributions[0].object as BlankNode, dct('format'), - factory.literal('application/rdf+xml') - ) - ) + factory.literal('application/rdf+xml'), + ), + ), ).toBe(true); }); @@ -56,7 +56,7 @@ describe('Fetch', () => { .reply(200, response); const datasets = await fetchDatasetsAsArray( - new URL('https://example.com/minimal-valid-schema-org-dataset') + new URL('https://example.com/minimal-valid-schema-org-dataset'), ); expect(datasets).toHaveLength(1); @@ -72,14 +72,14 @@ describe('Fetch', () => { .reply(200, response); const datasets = await fetchDatasetsAsArray( - new URL('https://example.com/valid-schema-org-dataset') + new URL('https://example.com/valid-schema-org-dataset'), ); expect(datasets).toHaveLength(1); const dataset = datasets[0]; const dcatEquivalent = await dereference( - 'test/datasets/dataset-dcat-valid.jsonld' + 'test/datasets/dataset-dcat-valid.jsonld', ); expect(dataset.size).toStrictEqual(dcatEquivalent.size); expect(dataset.toCanonical()).toStrictEqual(dcatEquivalent.toCanonical()); @@ -89,9 +89,11 @@ describe('Fetch', () => { factory.quad( factory.namedNode('http://data.bibliotheken.nl/id/dataset/rise-alba'), dct('license'), - factory.namedNode('http://creativecommons.org/publicdomain/zero/1.0/') - ) - ) + factory.namedNode( + 'http://creativecommons.org/publicdomain/zero/1.0/', + ), + ), + ), ).toBe(true); expect( dataset.has( @@ -100,10 +102,10 @@ describe('Fetch', () => { dct('created'), factory.literal( '2021-05-27', - factory.namedNode('http://www.w3.org/2001/XMLSchema#date') - ) - ) - ) + factory.namedNode('http://www.w3.org/2001/XMLSchema#date'), + ), + ), + ), ).toBe(true); expect( dataset.has( @@ -112,10 +114,10 @@ describe('Fetch', () => { dct('issued'), factory.literal( '2021-05-28', - factory.namedNode('http://www.w3.org/2001/XMLSchema#date') - ) - ) - ) + factory.namedNode('http://www.w3.org/2001/XMLSchema#date'), + ), + ), + ), ).toBe(true); expect( dataset.has( @@ -124,61 +126,61 @@ describe('Fetch', () => { dct('modified'), factory.literal( '2021-05-27T09:56:21.370767', - factory.namedNode('http://www.w3.org/2001/XMLSchema#dateTime') - ) - ) - ) + factory.namedNode('http://www.w3.org/2001/XMLSchema#dateTime'), + ), + ), + ), ).toBe(true); expect( dataset.has( factory.quad( factory.namedNode('http://data.bibliotheken.nl/id/dataset/rise-alba'), dct('publisher'), - factory.namedNode('https://example.com/publisher') - ) - ) + factory.namedNode('https://example.com/publisher'), + ), + ), ).toBe(true); expect( dataset.has( factory.quad( factory.namedNode('https://example.com/publisher'), rdf('type'), - foaf('Organization') - ) - ) + foaf('Organization'), + ), + ), ).toBe(true); expect( dataset.has( factory.quad( factory.namedNode('https://example.com/publisher'), foaf('mbox'), - factory.literal('datasets@example.com') - ) - ) + factory.literal('datasets@example.com'), + ), + ), ).toBe(true); expect( dataset.has( factory.quad( factory.namedNode('https://example.com/creator1'), rdf('type'), - foaf('Person') - ) - ) + foaf('Person'), + ), + ), ).toBe(true); expect( dataset.has( factory.quad( factory.namedNode('https://example.com/creator2'), rdf('type'), - foaf('Person') - ) - ) + foaf('Person'), + ), + ), ).toBe(true); expect([ ...dataset.match( factory.namedNode('http://data.bibliotheken.nl/id/dataset/rise-alba'), dcat('distribution'), - null + null, ), ]).toHaveLength(2); }); @@ -191,7 +193,7 @@ describe('Fetch', () => { .reply(200, response); const datasets = await fetchDatasetsAsArray( - new URL('https://example.com/valid-schema-org-dataset.ttl') + new URL('https://example.com/valid-schema-org-dataset.ttl'), ); expect(datasets).toHaveLength(1); @@ -202,14 +204,14 @@ describe('Fetch', () => { dataset.has( factory.quad( factory.namedNode( - 'https://www.goudatijdmachine.nl/data/api/items/144' + 'https://www.goudatijdmachine.nl/data/api/items/144', ), dcat('accessURL'), factory.namedNode( - 'https://www.goudatijdmachine.nl/wp-content/uploads/sites/7/2021/09/Totaal_perceel_Plaand_EPSG_4326.geojson' - ) - ) - ) + 'https://www.goudatijdmachine.nl/wp-content/uploads/sites/7/2021/09/Totaal_perceel_Plaand_EPSG_4326.geojson', + ), + ), + ), ).toBe(true); }); @@ -221,7 +223,7 @@ describe('Fetch', () => { .reply(200, response); const datasets = await fetchDatasetsAsArray( - new URL('https://example.com/valid-schema-org-dataset') + new URL('https://example.com/valid-schema-org-dataset'), ); expect(datasets).toHaveLength(1); @@ -235,7 +237,7 @@ describe('Fetch', () => { .reply(200, response); const datasets = await fetchDatasetsAsArray( - new URL('https://example.com/valid-schema-org-dataset') + new URL('https://example.com/valid-schema-org-dataset'), ); expect(datasets).toHaveLength(1); @@ -265,8 +267,8 @@ describe('Fetch', () => { it('handles empty dataset response', async () => { nock('https://example.com').get('/200').reply(200); - expect( - async () => await fetchDatasetsAsArray(new URL('https://example.com/200')) + await expect( + fetchDatasetsAsArray(new URL('https://example.com/200')), ).rejects.toThrow(NoDatasetFoundAtUrl); }); @@ -284,7 +286,7 @@ describe('Fetch', () => { }); const datasets = await fetchDatasetsAsArray( - new URL('https://example.com/datasets/hydra-page1.jsonld') + new URL('https://example.com/datasets/hydra-page1.jsonld'), ); expect(datasets).toHaveLength(3); @@ -304,7 +306,7 @@ describe('Fetch', () => { }); const datasets = await fetchDatasetsAsArray( - new URL('https://example.com/datasets/hydra-page1.ttl') + new URL('https://example.com/datasets/hydra-page1.ttl'), ); expect(datasets).toHaveLength(2); @@ -324,7 +326,7 @@ async function dereference(file: string): Promise { const stream = pipeline( data, new StandardizeSchemaOrgPrefixToHttps(), - () => {} // Noop, just throw errors. + () => {}, // Noop, just throw errors. ); return await factory.dataset().import(stream); } diff --git a/test/mock.ts b/test/mock.ts index bf7b27b8..ddd60524 100644 --- a/test/mock.ts +++ b/test/mock.ts @@ -20,14 +20,14 @@ export class MockRegistrationStore implements RegistrationStore { return Promise.resolve( [...this.registrations.values()].filter( (registration: Registration) => - registration.dateRead && registration.dateRead < date - ) + registration.dateRead && registration.dateRead < date, + ), ); } isRegistered(url: URL) { return this.all().some( - registration => registration.url.toString() === url.toString() + registration => registration.url.toString() === url.toString(), ); } diff --git a/test/rate.test.ts b/test/rate.test.ts index f789d0d1..8d753f21 100644 --- a/test/rate.test.ts +++ b/test/rate.test.ts @@ -5,7 +5,7 @@ import {validate} from './validator.test'; describe('Rate', () => { it('rates minimal dataset description', async () => { const validationResult = (await validate( - 'dataset-dcat-valid-minimal.jsonld' + 'dataset-dcat-valid-minimal.jsonld', )) as Valid; expect(rate(validationResult).worstRating).toBe(25); expect(rate(validationResult).score).toBe(25); @@ -13,7 +13,7 @@ describe('Rate', () => { it('rates complete dataset description', async () => { const validationResult = (await validate( - 'dataset-dcat-valid.jsonld' + 'dataset-dcat-valid.jsonld', )) as Valid; const rating = rate(validationResult); expect(rating.score).toBe(100); diff --git a/test/registration.test.ts b/test/registration.test.ts index 1574de2d..a5e7dbbf 100644 --- a/test/registration.test.ts +++ b/test/registration.test.ts @@ -5,7 +5,7 @@ describe('Registration', () => { it('must toggle from valid to invalid', () => { const registration = new Registration( new URL('https://example.com/registration'), - new Date() + new Date(), ); const updatedRegistration = registration.read([], 200, true); diff --git a/test/server.test.ts b/test/server.test.ts index 0a70cb2e..d8aa8a95 100644 --- a/test/server.test.ts +++ b/test/server.test.ts @@ -24,7 +24,7 @@ describe('Server', () => { new ShaclValidator(shacl), shacl, '/', - {logger: true} + {logger: true}, ); nock.back.fixtures = dirname(fileURLToPath(import.meta.url)) + '/http'; @@ -233,13 +233,15 @@ describe('Server', () => { }), }); nockDone(); + // sleep 2 seconds to allow the async store to complete + await new Promise(resolve => setTimeout(resolve, 2000)); // Validation succeeds, so 202 to the client, even if fetching datasets fails. expect(response.statusCode).toEqual(202); expect( registrationStore.isRegistered( - new URL('https://netwerkdigitaalerfgoed.nl/fails') - ) + new URL('https://netwerkdigitaalerfgoed.nl/fails'), + ), ).toBe(true); }); diff --git a/test/validator.test.ts b/test/validator.test.ts index 1b0b2aca..94df6fa1 100644 --- a/test/validator.test.ts +++ b/test/validator.test.ts @@ -14,7 +14,7 @@ const validator = await ShaclValidator.fromUrl('shacl/register.ttl'); describe('Validator', () => { it('accepts minimal valid Schema.org dataset', async () => { const report = (await validate( - 'dataset-schema-org-valid-minimal.jsonld' + 'dataset-schema-org-valid-minimal.jsonld', )) as Valid; expect(report.state).toEqual('valid'); expectViolations(report, ['https://schema.org/description']); @@ -24,7 +24,7 @@ describe('Validator', () => { it('accepts minimal valid Schema.org dataset in Turtle', async () => { const report = await validate( 'dataset-http-schema-org-valid.ttl', - new StreamParser() + new StreamParser(), ); expect(report.state).toEqual('valid'); }); @@ -32,7 +32,7 @@ describe('Validator', () => { it('accepts minimal valid Schema.org dataset in Microdata', async () => { const report = await validate( 'dataset-schema-org-valid-microdata.html', - new MicrodataRdfParser() + new MicrodataRdfParser(), ); expect(report.state).toEqual('valid'); }); @@ -40,14 +40,14 @@ describe('Validator', () => { it('accepts minimal valid Schema.org dataset in HTML+RDFa', async () => { const report = await validate( 'dataset-schema-org-valid-rdfa.html', - new RdfaParser() + new RdfaParser(), ); expect(report.state).toEqual('valid'); }); it('accepts minimal valid Schema.org dataset with separate organization', async () => { const report = await validate( - 'dataset-schema-org-valid-plus-organization.jsonld' + 'dataset-schema-org-valid-plus-organization.jsonld', ); expect(report.state).toEqual('valid'); }); @@ -64,7 +64,7 @@ describe('Validator', () => { it('accepts valid Schema.org dataset without publisher', async () => { const report = await validate( - 'dataset-schema-org-valid-no-publisher.jsonld' + 'dataset-schema-org-valid-no-publisher.jsonld', ); expect(report.state).toEqual('valid'); expect(report.state === 'valid'); @@ -72,8 +72,8 @@ describe('Validator', () => { (report as Valid).errors.match( null, shacl('resultSeverity'), - shacl('Warning') - ).size + shacl('Warning'), + ).size, ).toEqual(1); }); @@ -176,20 +176,20 @@ const dataset = async (filename: string, parser?: Transform) => { fs .createReadStream(`test/datasets/${filename}`) .pipe(parser ?? (new JsonLdParser() as unknown as Transform)) - .pipe(new StandardizeSchemaOrgPrefixToHttps()) + .pipe(new StandardizeSchemaOrgPrefixToHttps()), )) as unknown as Dataset; }; const expectViolations = ( report: InvalidDataset | Valid, - violationPaths: string[] + violationPaths: string[], ) => violationPaths.forEach(violationPath => expect( report.errors.match( null, shacl('resultPath'), - rdf.namedNode(violationPath) - ).size - ).toEqual(1) + rdf.namedNode(violationPath), + ).size, + ).toEqual(1), ); diff --git a/tsconfig.json b/tsconfig.json index e154762d..df3df3c4 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -16,6 +16,7 @@ "declaration": true }, "include": [ - "src/" + "src/", + "test/" ] }