feat: update API controllers, validations, and Vue components
All checks were successful
CI / container-job (push) Successful in 49s
All checks were successful
CI / container-job (push) Successful in 49s
- Modified Api/Authors.Controller.ts to use only personal types and sort by dataset_count. - Completely rewritten AvatarController.ts. - Added new Api/CollectionsController.ts for querying collections and collection_roles. - Modified Api/DatasetController.ts to preload titles, identifier and order by server_date_published. - Modified FileController.ts to serve files from /storage/app/data/ instead of /storage/app/public. - Added new Api/UserController for requesting submitters (getSubmitters). - Improved OaiController.ts with performant DB queries for better ResumptionToken handling. - Modified Submitter/DatasetController.ts by adding a categorize method for library classification. - Rewritten ResumptionToken.ts. - Improved TokenWorkerService.ts to utilize browser fingerprint. - Edited dataset.ts by adding the doiIdentifier property. - Enhanced person.ts to improve the fullName property. - Completely rewritten AsideMenuItem.vue component. - Updated CarBoxClient.vue to use TypeScript. - Added new CardBoxDataset.vue for displaying recent datasets on the dashboard. - Completely rewritten TableSampleClients.vue for the dashboard. - Completely rewritten UserAvatar.vue. - Made small layout changes in Dashboard.vue. - Added new Category.vue for browsing scientific collections. - Adapted the pinia store in main.ts. - Added additional routes in start/routes.ts and start/api/routes.ts. - Improved referenceValidation.ts for better ISBN existence checking. - NPM dependency updates.
This commit is contained in:
parent
36cd7a757b
commit
b540547e4c
34 changed files with 1757 additions and 1018 deletions
|
@ -19,14 +19,13 @@ import XmlModel from '#app/Library/XmlModel';
|
|||
import logger from '@adonisjs/core/services/logger';
|
||||
import ResumptionToken from '#app/Library/Oai/ResumptionToken';
|
||||
// import Config from '@ioc:Adonis/Core/Config';
|
||||
import config from '@adonisjs/core/services/config'
|
||||
import config from '@adonisjs/core/services/config';
|
||||
// import { inject } from '@adonisjs/fold';
|
||||
import { inject } from '@adonisjs/core'
|
||||
import { inject } from '@adonisjs/core';
|
||||
// import { TokenWorkerContract } from "MyApp/Models/TokenWorker";
|
||||
import TokenWorkerContract from '#library/Oai/TokenWorkerContract';
|
||||
import { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model';
|
||||
|
||||
|
||||
interface XslTParameter {
|
||||
[key: string]: any;
|
||||
}
|
||||
|
@ -35,12 +34,14 @@ interface Dictionary {
|
|||
[index: string]: string;
|
||||
}
|
||||
|
||||
interface ListParameter {
|
||||
interface PagingParameter {
|
||||
cursor: number;
|
||||
totalIds: number;
|
||||
totalLength: number;
|
||||
start: number;
|
||||
reldocIds: (number | null)[];
|
||||
nextDocIds: number[];
|
||||
activeWorkIds: number[];
|
||||
metadataPrefix: string;
|
||||
queryParams: Object;
|
||||
}
|
||||
|
||||
@inject()
|
||||
|
@ -49,6 +50,7 @@ export default class OaiController {
|
|||
private sampleRegEx = /^[A-Za-zäüÄÜß0-9\-_.!~]+$/;
|
||||
private xsltParameter: XslTParameter;
|
||||
|
||||
private firstPublishedDataset: Dataset | null;
|
||||
/**
|
||||
* Holds xml representation of document information to be processed.
|
||||
*
|
||||
|
@ -57,7 +59,6 @@ export default class OaiController {
|
|||
private xml: XMLBuilder;
|
||||
private proc;
|
||||
|
||||
|
||||
constructor(public tokenWorker: TokenWorkerContract) {
|
||||
// Load the XSLT file
|
||||
this.proc = readFileSync('public/assets2/datasetxml2oai.sef.json');
|
||||
|
@ -85,9 +86,9 @@ export default class OaiController {
|
|||
let earliestDateFromDb;
|
||||
// const oaiRequest: OaiParameter = request.body;
|
||||
try {
|
||||
const firstPublishedDataset: Dataset | null = await Dataset.earliestPublicationDate();
|
||||
firstPublishedDataset != null &&
|
||||
(earliestDateFromDb = firstPublishedDataset.server_date_published.toFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"));
|
||||
this.firstPublishedDataset = await Dataset.earliestPublicationDate();
|
||||
this.firstPublishedDataset != null &&
|
||||
(earliestDateFromDb = this.firstPublishedDataset.server_date_published.toFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"));
|
||||
this.xsltParameter['earliestDatestamp'] = earliestDateFromDb;
|
||||
// start the request
|
||||
await this.handleRequest(oaiRequest, request);
|
||||
|
@ -162,22 +163,19 @@ export default class OaiController {
|
|||
} else if (verb == 'GetRecord') {
|
||||
await this.handleGetRecord(oaiRequest);
|
||||
} else if (verb == 'ListRecords') {
|
||||
await this.handleListRecords(oaiRequest);
|
||||
// Get browser fingerprint from the request:
|
||||
const browserFingerprint = this.getBrowserFingerprint(request);
|
||||
await this.handleListRecords(oaiRequest, browserFingerprint);
|
||||
} else if (verb == 'ListIdentifiers') {
|
||||
await this.handleListIdentifiers(oaiRequest);
|
||||
// Get browser fingerprint from the request:
|
||||
const browserFingerprint = this.getBrowserFingerprint(request);
|
||||
await this.handleListIdentifiers(oaiRequest, browserFingerprint);
|
||||
} else if (verb == 'ListSets') {
|
||||
await this.handleListSets();
|
||||
} else {
|
||||
this.handleIllegalVerb();
|
||||
}
|
||||
} else {
|
||||
// // try {
|
||||
// // console.log("Async code example.")
|
||||
// const err = new PageNotFoundException("verb not found");
|
||||
// throw err;
|
||||
// // } catch (error) { // manually catching
|
||||
// // next(error); // passing to default middleware error handler
|
||||
// // }
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'The verb provided in the request is illegal.',
|
||||
|
@ -187,11 +185,11 @@ export default class OaiController {
|
|||
}
|
||||
|
||||
protected handleIdentify() {
|
||||
const email = process.env.OAI_EMAIL || 'repository@geosphere.at';
|
||||
const repositoryName = 'Tethys RDR';
|
||||
const repIdentifier = 'tethys.at';
|
||||
const sampleIdentifier = 'oai:' + repIdentifier + ':1'; //$this->_configuration->getSampleIdentifier();
|
||||
|
||||
// Get configuration values from environment or a dedicated configuration service
|
||||
const email = process.env.OAI_EMAIL ?? 'repository@geosphere.at';
|
||||
const repositoryName = process.env.OAI_REPOSITORY_NAME ?? 'Tethys RDR';
|
||||
const repIdentifier = process.env.OAI_REP_IDENTIFIER ?? 'tethys.at';
|
||||
const sampleIdentifier = `oai:${repIdentifier}:1`;
|
||||
// Dataset::earliestPublicationDate()->server_date_published->format('Y-m-d\TH:i:s\Z') : null;
|
||||
// earliestDateFromDb!= null && (this.xsltParameter['earliestDatestamp'] = earliestDateFromDb?.server_date_published);
|
||||
|
||||
|
@ -216,7 +214,7 @@ export default class OaiController {
|
|||
|
||||
const sets: { [key: string]: string } = {
|
||||
'open_access': 'Set for open access licenses',
|
||||
'openaire_data': "OpenAIRE",
|
||||
'openaire_data': 'OpenAIRE',
|
||||
'doc-type:ResearchData': 'Set for document type ResearchData',
|
||||
...(await this.getSetsForDatasetTypes()),
|
||||
...(await this.getSetsForCollections()),
|
||||
|
@ -234,7 +232,15 @@ export default class OaiController {
|
|||
const repIdentifier = 'tethys.at';
|
||||
this.xsltParameter['repIdentifier'] = repIdentifier;
|
||||
|
||||
// Validate that required parameter exists early
|
||||
if (!('identifier' in oaiRequest)) {
|
||||
throw new BadOaiModelException('The prefix of the identifier argument is unknown.');
|
||||
}
|
||||
|
||||
// Validate and extract the dataset identifier from the request
|
||||
const dataId = this.validateAndGetIdentifier(oaiRequest);
|
||||
|
||||
// Retrieve dataset with associated XML cache and collection roles
|
||||
const dataset = await Dataset.query()
|
||||
.where('publish_id', dataId)
|
||||
.preload('xmlCache')
|
||||
|
@ -251,59 +257,61 @@ export default class OaiController {
|
|||
);
|
||||
}
|
||||
|
||||
// Validate and set the metadata prefix parameter
|
||||
const metadataPrefix = this.validateAndGetMetadataPrefix(oaiRequest);
|
||||
this.xsltParameter['oai_metadataPrefix'] = metadataPrefix;
|
||||
// do not deliver datasets which are restricted by document state defined in deliveringStates
|
||||
|
||||
// Ensure that the dataset is in an exportable state
|
||||
this.validateDatasetState(dataset);
|
||||
|
||||
// add xml elements
|
||||
// Build the XML for the dataset record and add it to the root node
|
||||
const datasetNode = this.xml.root().ele('Datasets');
|
||||
await this.createXmlRecord(dataset, datasetNode);
|
||||
}
|
||||
|
||||
protected async handleListIdentifiers(oaiRequest: Dictionary) {
|
||||
!this.tokenWorker.isConnected && (await this.tokenWorker.connect());
|
||||
protected async handleListIdentifiers(oaiRequest: Dictionary, browserFingerprint: string) {
|
||||
if (!this.tokenWorker.isConnected) {
|
||||
await this.tokenWorker.connect();
|
||||
}
|
||||
|
||||
const maxIdentifier: number = config.get('oai.max.listidentifiers', 100);
|
||||
await this.handleLists(oaiRequest, maxIdentifier);
|
||||
await this.handleLists(oaiRequest, maxIdentifier, browserFingerprint);
|
||||
}
|
||||
|
||||
protected async handleListRecords(oaiRequest: Dictionary) {
|
||||
!this.tokenWorker.isConnected && (await this.tokenWorker.connect());
|
||||
protected async handleListRecords(oaiRequest: Dictionary, browserFingerprint: string) {
|
||||
if (!this.tokenWorker.isConnected) {
|
||||
await this.tokenWorker.connect();
|
||||
}
|
||||
|
||||
const maxRecords: number = config.get('oai.max.listrecords', 100);
|
||||
await this.handleLists(oaiRequest, maxRecords);
|
||||
await this.handleLists(oaiRequest, maxRecords, browserFingerprint);
|
||||
}
|
||||
|
||||
private async handleLists(oaiRequest: Dictionary, maxRecords: number) {
|
||||
maxRecords = maxRecords || 100;
|
||||
private async handleLists(oaiRequest: Dictionary, maxRecords: number, browserFingerprint: string) {
|
||||
const repIdentifier = 'tethys.at';
|
||||
this.xsltParameter['repIdentifier'] = repIdentifier;
|
||||
const datasetNode = this.xml.root().ele('Datasets');
|
||||
|
||||
// list initialisation
|
||||
const numWrapper: ListParameter = {
|
||||
const paginationParams: PagingParameter ={
|
||||
cursor: 0,
|
||||
totalIds: 0,
|
||||
totalLength: 0,
|
||||
start: maxRecords + 1,
|
||||
reldocIds: [],
|
||||
nextDocIds: [],
|
||||
activeWorkIds: [],
|
||||
metadataPrefix: '',
|
||||
queryParams: {},
|
||||
};
|
||||
|
||||
// resumptionToken is defined
|
||||
if ('resumptionToken' in oaiRequest) {
|
||||
await this.handleResumptionToken(oaiRequest, maxRecords, numWrapper);
|
||||
await this.handleResumptionToken(oaiRequest, maxRecords, paginationParams);
|
||||
} else {
|
||||
// no resumptionToken is given
|
||||
await this.handleNoResumptionToken(oaiRequest, numWrapper);
|
||||
await this.handleNoResumptionToken(oaiRequest, paginationParams, maxRecords);
|
||||
}
|
||||
|
||||
// handling of document ids
|
||||
const restIds = numWrapper.reldocIds as number[];
|
||||
const workIds = restIds.splice(0, maxRecords) as number[]; // array_splice(restIds, 0, maxRecords);
|
||||
const nextIds: number[] = paginationParams.nextDocIds;
|
||||
const workIds: number[] = paginationParams.activeWorkIds;
|
||||
|
||||
// no records returned
|
||||
if (workIds.length == 0) {
|
||||
if (workIds.length === 0) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'The combination of the given values results in an empty list.',
|
||||
|
@ -311,169 +319,218 @@ export default class OaiController {
|
|||
);
|
||||
}
|
||||
|
||||
const datasets: Dataset[] = await Dataset.query()
|
||||
const datasets = await Dataset.query()
|
||||
.whereIn('publish_id', workIds)
|
||||
.preload('xmlCache')
|
||||
.preload('collections', (builder) => {
|
||||
builder.preload('collectionRole');
|
||||
})
|
||||
.orderBy('publish_id');
|
||||
|
||||
for (const dataset of datasets) {
|
||||
await this.createXmlRecord(dataset, datasetNode);
|
||||
}
|
||||
|
||||
// store the further Ids in a resumption-file
|
||||
const countRestIds = restIds.length; //84
|
||||
if (countRestIds > 0) {
|
||||
const token = new ResumptionToken();
|
||||
token.startPosition = numWrapper.start; //101
|
||||
token.totalIds = numWrapper.totalIds; //184
|
||||
token.documentIds = restIds; //101 -184
|
||||
token.metadataPrefix = numWrapper.metadataPrefix;
|
||||
|
||||
// $tokenWorker->storeResumptionToken($token);
|
||||
const res: string = await this.tokenWorker.set(token);
|
||||
|
||||
// set parameters for the resumptionToken-node
|
||||
// const res = token.ResumptionId;
|
||||
this.setParamResumption(res, numWrapper.cursor, numWrapper.totalIds);
|
||||
}
|
||||
await this.setResumptionToken(nextIds, paginationParams, browserFingerprint);
|
||||
}
|
||||
|
||||
private async handleResumptionToken(oaiRequest: Dictionary, maxRecords: number, numWrapper: ListParameter) {
|
||||
const resParam = oaiRequest['resumptionToken']; //e.g. "158886496600000"
|
||||
private async handleNoResumptionToken(oaiRequest: Dictionary, paginationParams: PagingParameter, maxRecords: number) {
|
||||
this.validateMetadataPrefix(oaiRequest, paginationParams);
|
||||
const finder: ModelQueryBuilderContract<typeof Dataset, Dataset> = Dataset.query().whereIn(
|
||||
'server_state',
|
||||
this.deliveringDocumentStates,
|
||||
);
|
||||
this.applySetFilter(finder, oaiRequest);
|
||||
this.applyDateFilters(finder, oaiRequest);
|
||||
await this.fetchAndSetResults(finder, paginationParams, oaiRequest, maxRecords);
|
||||
}
|
||||
|
||||
private async fetchAndSetResults(
|
||||
finder: ModelQueryBuilderContract<typeof Dataset, Dataset>,
|
||||
paginationParams: PagingParameter,
|
||||
oaiRequest: Dictionary,
|
||||
maxRecords: number
|
||||
) {
|
||||
const totalResult = await finder
|
||||
.clone()
|
||||
.count('* as total')
|
||||
.first()
|
||||
.then((res) => res?.$extras.total);
|
||||
paginationParams.totalLength = Number(totalResult);
|
||||
|
||||
const combinedRecords: Dataset[] = await finder.select('publish_id').orderBy('publish_id').offset(0).limit(maxRecords*2);
|
||||
|
||||
paginationParams.activeWorkIds = combinedRecords.slice(0, 100).map((dat) => Number(dat.publish_id));
|
||||
paginationParams.nextDocIds = combinedRecords.slice(100).map((dat) => Number(dat.publish_id));
|
||||
|
||||
// No resumption token was used – set queryParams from the current oaiRequest
|
||||
paginationParams.queryParams = {
|
||||
...oaiRequest,
|
||||
deliveringStates: this.deliveringDocumentStates,
|
||||
};
|
||||
|
||||
// paginationParams.totalLength = 230;
|
||||
}
|
||||
|
||||
private async handleResumptionToken(oaiRequest: Dictionary, maxRecords: number, paginationParams: PagingParameter) {
|
||||
const resParam = oaiRequest['resumptionToken'];
|
||||
const token = await this.tokenWorker.get(resParam);
|
||||
|
||||
if (!token) {
|
||||
throw new OaiModelException(StatusCodes.INTERNAL_SERVER_ERROR, 'cache is outdated.', OaiErrorCodes.BADRESUMPTIONTOKEN);
|
||||
}
|
||||
|
||||
numWrapper.cursor = token.startPosition - 1; //startet dann bei Index 10
|
||||
numWrapper.start = token.startPosition + maxRecords;
|
||||
numWrapper.totalIds = token.totalIds;
|
||||
numWrapper.reldocIds = token.documentIds;
|
||||
numWrapper.metadataPrefix = token.metadataPrefix;
|
||||
// this.setResumptionParameters(token, maxRecords, paginationParams);
|
||||
paginationParams.cursor = token.startPosition - 1;
|
||||
paginationParams.start = token.startPosition + maxRecords;
|
||||
paginationParams.totalLength = token.totalIds;
|
||||
paginationParams.activeWorkIds = token.documentIds;
|
||||
paginationParams.metadataPrefix = token.metadataPrefix;
|
||||
paginationParams.queryParams = token.queryParams;
|
||||
this.xsltParameter['oai_metadataPrefix'] = token.metadataPrefix;
|
||||
|
||||
this.xsltParameter['oai_metadataPrefix'] = numWrapper.metadataPrefix;
|
||||
const finder = this.buildDatasetQueryViaToken(token);
|
||||
const nextRecords: Dataset[] = await this.fetchNextRecords(finder, token, maxRecords);
|
||||
paginationParams.nextDocIds = nextRecords.map((dat) => Number(dat.publish_id));
|
||||
}
|
||||
|
||||
private async handleNoResumptionToken(oaiRequest: Dictionary, numWrapper: ListParameter) {
|
||||
// no resumptionToken is given
|
||||
if ('metadataPrefix' in oaiRequest) {
|
||||
numWrapper.metadataPrefix = oaiRequest['metadataPrefix'];
|
||||
} else {
|
||||
private async setResumptionToken(nextIds: number[], paginationParams: PagingParameter, browserFingerprint: string) {
|
||||
const countRestIds = nextIds.length;
|
||||
if (countRestIds > 0) {
|
||||
// const token = this.createResumptionToken(paginationParams, nextIds);
|
||||
const token = new ResumptionToken();
|
||||
token.startPosition = paginationParams.start;
|
||||
token.totalIds = paginationParams.totalLength;
|
||||
token.documentIds = nextIds;
|
||||
token.metadataPrefix = paginationParams.metadataPrefix;
|
||||
token.queryParams = paginationParams.queryParams;
|
||||
const res: string = await this.tokenWorker.set(token, browserFingerprint);
|
||||
this.setParamResumption(res, paginationParams.cursor, paginationParams.totalLength);
|
||||
}
|
||||
}
|
||||
|
||||
private buildDatasetQueryViaToken(token: ResumptionToken) {
|
||||
const finder = Dataset.query();
|
||||
const originalQuery = token.queryParams || {};
|
||||
const deliveringStates = originalQuery.deliveringStates || this.deliveringDocumentStates;
|
||||
|
||||
finder.whereIn('server_state', deliveringStates);
|
||||
this.applySetFilter(finder, originalQuery);
|
||||
this.applyDateFilters(finder, originalQuery);
|
||||
|
||||
return finder;
|
||||
}
|
||||
|
||||
private async fetchNextRecords(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, token: ResumptionToken, maxRecords: number) {
|
||||
return finder
|
||||
.select('publish_id')
|
||||
.orderBy('publish_id')
|
||||
.offset(token.startPosition - 1 + maxRecords)
|
||||
.limit(100);
|
||||
}
|
||||
|
||||
private validateMetadataPrefix(oaiRequest: Dictionary, paginationParams: PagingParameter) {
|
||||
if (!('metadataPrefix' in oaiRequest)) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'The prefix of the metadata argument is unknown.',
|
||||
OaiErrorCodes.BADARGUMENT,
|
||||
);
|
||||
}
|
||||
this.xsltParameter['oai_metadataPrefix'] = numWrapper.metadataPrefix;
|
||||
paginationParams.metadataPrefix = oaiRequest['metadataPrefix'];
|
||||
this.xsltParameter['oai_metadataPrefix'] = paginationParams.metadataPrefix;
|
||||
}
|
||||
|
||||
let finder: ModelQueryBuilderContract<typeof Dataset, Dataset> = Dataset.query();
|
||||
// add server state restrictions
|
||||
finder.whereIn('server_state', this.deliveringDocumentStates);
|
||||
if ('set' in oaiRequest) {
|
||||
const set = oaiRequest['set'] as string;
|
||||
const setArray = set.split(':');
|
||||
private applySetFilter(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, queryParams: any) {
|
||||
if ('set' in queryParams) {
|
||||
const [setType, setValue] = queryParams['set'].split(':');
|
||||
|
||||
if (setArray[0] == 'data-type') {
|
||||
if (setArray.length == 2 && setArray[1]) {
|
||||
finder.where('type', setArray[1]);
|
||||
}
|
||||
} else if (setArray[0] == 'open_access') {
|
||||
const openAccessLicences = ['CC-BY-4.0', 'CC-BY-SA-4.0'];
|
||||
finder.andWhereHas('licenses', (query) => {
|
||||
query.whereIn('name', openAccessLicences);
|
||||
});
|
||||
} else if (setArray[0] == 'ddc') {
|
||||
if (setArray.length == 2 && setArray[1] != '') {
|
||||
finder.andWhereHas('collections', (query) => {
|
||||
query.where('number', setArray[1]);
|
||||
switch (setType) {
|
||||
case 'data-type':
|
||||
setValue && finder.where('type', setValue);
|
||||
break;
|
||||
case 'open_access':
|
||||
finder.andWhereHas('licenses', (query) => {
|
||||
query.whereIn('name', ['CC-BY-4.0', 'CC-BY-SA-4.0']);
|
||||
});
|
||||
}
|
||||
break;
|
||||
case 'ddc':
|
||||
setValue &&
|
||||
finder.andWhereHas('collections', (query) => {
|
||||
query.where('number', setValue);
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// const timeZone = "Europe/Vienna"; // Canonical time zone name
|
||||
// &from=2020-09-03&until2020-09-03
|
||||
// &from=2020-09-11&until=2021-05-11
|
||||
if ('from' in oaiRequest && 'until' in oaiRequest) {
|
||||
const from = oaiRequest['from'] as string;
|
||||
let fromDate = dayjs(from); //.tz(timeZone);
|
||||
const until = oaiRequest['until'] as string;
|
||||
let untilDate = dayjs(until); //.tz(timeZone);
|
||||
if (!fromDate.isValid() || !untilDate.isValid()) {
|
||||
throw new OaiModelException(StatusCodes.INTERNAL_SERVER_ERROR, 'Date Parameter is not valid.', OaiErrorCodes.BADARGUMENT);
|
||||
}
|
||||
fromDate = dayjs.tz(from, 'Europe/Vienna');
|
||||
untilDate = dayjs.tz(until, 'Europe/Vienna');
|
||||
private applyDateFilters(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, queryParams: any) {
|
||||
const { from, until } = queryParams;
|
||||
|
||||
if (from.length != until.length) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'The request has different granularities for the from and until parameters.',
|
||||
OaiErrorCodes.BADARGUMENT,
|
||||
);
|
||||
}
|
||||
fromDate.hour() == 0 && (fromDate = fromDate.startOf('day'));
|
||||
untilDate.hour() == 0 && (untilDate = untilDate.endOf('day'));
|
||||
if (from && until) {
|
||||
this.handleFromUntilFilter(finder, from, until);
|
||||
} else if (from) {
|
||||
this.handleFromFilter(finder, from);
|
||||
} else if (until) {
|
||||
this.handleUntilFilter(finder, until);
|
||||
}
|
||||
}
|
||||
|
||||
finder.whereBetween('server_date_published', [fromDate.format('YYYY-MM-DD HH:mm:ss'), untilDate.format('YYYY-MM-DD HH:mm:ss')]);
|
||||
} else if ('from' in oaiRequest && !('until' in oaiRequest)) {
|
||||
const from = oaiRequest['from'] as string;
|
||||
let fromDate = dayjs(from);
|
||||
if (!fromDate.isValid()) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'From date parameter is not valid.',
|
||||
OaiErrorCodes.BADARGUMENT,
|
||||
);
|
||||
}
|
||||
fromDate = dayjs.tz(from, 'Europe/Vienna');
|
||||
fromDate.hour() == 0 && (fromDate = fromDate.startOf('day'));
|
||||
private handleFromUntilFilter(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, from: string, until: string) {
|
||||
const fromDate = this.parseDateWithValidation(from, 'From');
|
||||
const untilDate = this.parseDateWithValidation(until, 'Until');
|
||||
|
||||
const now = dayjs();
|
||||
if (fromDate.isAfter(now)) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'Given from date is greater than now. The given values results in an empty list.',
|
||||
OaiErrorCodes.NORECORDSMATCH,
|
||||
);
|
||||
} else {
|
||||
finder.andWhere('server_date_published', '>=', fromDate.format('YYYY-MM-DD HH:mm:ss'));
|
||||
}
|
||||
} else if (!('from' in oaiRequest) && 'until' in oaiRequest) {
|
||||
const until = oaiRequest['until'] as string;
|
||||
let untilDate = dayjs(until);
|
||||
if (!untilDate.isValid()) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'Until date parameter is not valid.',
|
||||
OaiErrorCodes.BADARGUMENT,
|
||||
);
|
||||
}
|
||||
untilDate = dayjs.tz(until, 'Europe/Vienna');
|
||||
untilDate.hour() == 0 && (untilDate = untilDate.endOf('day'));
|
||||
|
||||
const firstPublishedDataset: Dataset = (await Dataset.earliestPublicationDate()) as Dataset;
|
||||
const earliestPublicationDate = dayjs(firstPublishedDataset.server_date_published.toISO()); //format("YYYY-MM-DDThh:mm:ss[Z]"));
|
||||
if (earliestPublicationDate.isAfter(untilDate)) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
`earliestDatestamp is greater than given until date.
|
||||
The given values results in an empty list.`,
|
||||
OaiErrorCodes.NORECORDSMATCH,
|
||||
);
|
||||
} else {
|
||||
finder.andWhere('server_date_published', '<=', untilDate.format('YYYY-MM-DD HH:mm:ss'));
|
||||
}
|
||||
if (from.length !== until.length) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'The request has different granularities for the from and until parameters.',
|
||||
OaiErrorCodes.BADARGUMENT,
|
||||
);
|
||||
}
|
||||
|
||||
let reldocIdsDocs = await finder.select('publish_id').orderBy('publish_id');
|
||||
numWrapper.reldocIds = reldocIdsDocs.map((dat) => dat.publish_id);
|
||||
numWrapper.totalIds = numWrapper.reldocIds.length; //212
|
||||
finder.whereBetween('server_date_published', [fromDate.format('YYYY-MM-DD HH:mm:ss'), untilDate.format('YYYY-MM-DD HH:mm:ss')]);
|
||||
}
|
||||
|
||||
private handleFromFilter(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, from: string) {
|
||||
const fromDate = this.parseDateWithValidation(from, 'From');
|
||||
const now = dayjs();
|
||||
|
||||
if (fromDate.isAfter(now)) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'Given from date is greater than now. The given values results in an empty list.',
|
||||
OaiErrorCodes.NORECORDSMATCH,
|
||||
);
|
||||
}
|
||||
|
||||
finder.andWhere('server_date_published', '>=', fromDate.format('YYYY-MM-DD HH:mm:ss'));
|
||||
}
|
||||
|
||||
private handleUntilFilter(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, until: string) {
|
||||
const untilDate = this.parseDateWithValidation(until, 'Until');
|
||||
|
||||
const earliestPublicationDate = dayjs(this.firstPublishedDataset?.server_date_published.toISO());
|
||||
|
||||
if (earliestPublicationDate.isAfter(untilDate)) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'earliestDatestamp is greater than given until date. The given values results in an empty list.',
|
||||
OaiErrorCodes.NORECORDSMATCH,
|
||||
);
|
||||
}
|
||||
|
||||
finder.andWhere('server_date_published', '<=', untilDate.format('YYYY-MM-DD HH:mm:ss'));
|
||||
}
|
||||
|
||||
private parseDateWithValidation(dateStr: string, label: string) {
|
||||
let date = dayjs(dateStr);
|
||||
if (!date.isValid()) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
`${label} date parameter is not valid.`,
|
||||
OaiErrorCodes.BADARGUMENT,
|
||||
);
|
||||
}
|
||||
date = dayjs.tz(dateStr, 'Europe/Vienna');
|
||||
return date.hour() === 0 ? (label === 'From' ? date.startOf('day') : date.endOf('day')) : date;
|
||||
}
|
||||
|
||||
private setParamResumption(res: string, cursor: number, totalIds: number) {
|
||||
|
@ -641,4 +698,30 @@ export default class OaiController {
|
|||
this.xsltParameter['oai_error_code'] = 'badVerb';
|
||||
this.xsltParameter['oai_error_message'] = 'The verb provided in the request is illegal.';
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to build a browser fingerprint by combining:
|
||||
* - User-Agent header,
|
||||
* - the IP address,
|
||||
* - Accept-Language header,
|
||||
* - current timestamp rounded to the hour.
|
||||
*
|
||||
* Every new hour, this will return a different fingerprint.
|
||||
*/
|
||||
private getBrowserFingerprint(request: Request): string {
|
||||
const userAgent = request.header('user-agent') || 'unknown';
|
||||
// Check for X-Forwarded-For header to use the client IP from the proxy if available.
|
||||
const xForwardedFor = request.header('x-forwarded-for');
|
||||
let ip = request.ip();
|
||||
// console.log(ip);
|
||||
if (xForwardedFor) {
|
||||
// X-Forwarded-For may contain a comma-separated list of IPs; the first one is the client IP.
|
||||
ip = xForwardedFor.split(',')[0].trim();
|
||||
// console.log('xforwardedfor ip' + ip);
|
||||
}
|
||||
const locale = request.header('accept-language') || 'default';
|
||||
// Round the current time to the start of the hour.
|
||||
const timestampHour = dayjs().startOf('hour').format('YYYY-MM-DDTHH');
|
||||
return `${userAgent}-${ip}-${locale}-${timestampHour}`;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
editor.link_modal.header
Reference in a new issue