feat: update API controllers, validations, and Vue components
All checks were successful
CI / container-job (push) Successful in 49s
All checks were successful
CI / container-job (push) Successful in 49s
- Modified Api/Authors.Controller.ts to use only personal types and sort by dataset_count. - Completely rewritten AvatarController.ts. - Added new Api/CollectionsController.ts for querying collections and collection_roles. - Modified Api/DatasetController.ts to preload titles, identifier and order by server_date_published. - Modified FileController.ts to serve files from /storage/app/data/ instead of /storage/app/public. - Added new Api/UserController for requesting submitters (getSubmitters). - Improved OaiController.ts with performant DB queries for better ResumptionToken handling. - Modified Submitter/DatasetController.ts by adding a categorize method for library classification. - Rewritten ResumptionToken.ts. - Improved TokenWorkerService.ts to utilize browser fingerprint. - Edited dataset.ts by adding the doiIdentifier property. - Enhanced person.ts to improve the fullName property. - Completely rewritten AsideMenuItem.vue component. - Updated CarBoxClient.vue to use TypeScript. - Added new CardBoxDataset.vue for displaying recent datasets on the dashboard. - Completely rewritten TableSampleClients.vue for the dashboard. - Completely rewritten UserAvatar.vue. - Made small layout changes in Dashboard.vue. - Added new Category.vue for browsing scientific collections. - Adapted the pinia store in main.ts. - Added additional routes in start/routes.ts and start/api/routes.ts. - Improved referenceValidation.ts for better ISBN existence checking. - NPM dependency updates.
This commit is contained in:
parent
36cd7a757b
commit
b540547e4c
34 changed files with 1757 additions and 1018 deletions
|
@ -9,12 +9,14 @@ export default class AuthorsController {
|
|||
// where exists (select * from gba.documents inner join gba.link_documents_persons on "documents"."id" = "link_documents_persons"."document_id"
|
||||
// where ("link_documents_persons"."role" = 'author') and ("persons"."id" = "link_documents_persons"."person_id"));
|
||||
const authors = await Person.query()
|
||||
.where('name_type', 'Personal')
|
||||
.whereHas('datasets', (dQuery) => {
|
||||
dQuery.wherePivot('role', 'author');
|
||||
})
|
||||
.withCount('datasets', (query) => {
|
||||
query.as('datasets_count');
|
||||
});
|
||||
})
|
||||
.orderBy('datasets_count', 'desc');
|
||||
|
||||
return authors;
|
||||
}
|
||||
|
|
|
@ -1,104 +1,135 @@
|
|||
import type { HttpContext } from '@adonisjs/core/http';
|
||||
import { StatusCodes } from 'http-status-codes';
|
||||
import redis from '@adonisjs/redis/services/main';
|
||||
|
||||
const prefixes = ['von', 'van'];
|
||||
const PREFIXES = ['von', 'van'];
|
||||
const DEFAULT_SIZE = 50;
|
||||
const FONT_SIZE_RATIO = 0.4;
|
||||
const COLOR_LIGHTENING_PERCENT = 60;
|
||||
const COLOR_DARKENING_FACTOR = 0.6;
|
||||
|
||||
export default class AvatarController {
|
||||
public async generateAvatar({ request, response }: HttpContext) {
|
||||
try {
|
||||
const { name, size } = request.only(['name', 'size']);
|
||||
const { name, size = DEFAULT_SIZE } = request.only(['name', 'size']);
|
||||
if (!name) {
|
||||
return response.status(StatusCodes.BAD_REQUEST).json({ error: 'Name is required' });
|
||||
}
|
||||
|
||||
// Build a unique cache key for the given name and size
|
||||
const cacheKey = `avatar:${name.trim().toLowerCase()}-${size}`;
|
||||
const cachedSvg = await redis.get(cacheKey);
|
||||
if (cachedSvg) {
|
||||
this.setResponseHeaders(response);
|
||||
return response.send(cachedSvg);
|
||||
}
|
||||
|
||||
const initials = this.getInitials(name);
|
||||
const colors = this.generateColors(name);
|
||||
const svgContent = this.createSvg(size, colors, initials);
|
||||
|
||||
const originalColor = this.getColorFromName(name);
|
||||
const backgroundColor = this.lightenColor(originalColor, 60);
|
||||
const textColor = this.darkenColor(originalColor);
|
||||
|
||||
const svgContent = `
|
||||
<svg width="${size || 50}" height="${size || 50}" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect width="100%" height="100%" fill="#${backgroundColor}"/>
|
||||
<text x="50%" y="50%" dominant-baseline="middle" text-anchor="middle" font-weight="bold" font-family="Arial, sans-serif" font-size="${
|
||||
(size / 100) * 40 || 25
|
||||
}" fill="#${textColor}">${initials}</text>
|
||||
</svg>
|
||||
`;
|
||||
|
||||
response.header('Content-type', 'image/svg+xml');
|
||||
response.header('Cache-Control', 'no-cache');
|
||||
response.header('Pragma', 'no-cache');
|
||||
response.header('Expires', '0');
|
||||
// // Cache the generated avatar for future use, e.g. 1 hour expiry
|
||||
await redis.setex(cacheKey, 3600, svgContent);
|
||||
|
||||
this.setResponseHeaders(response);
|
||||
return response.send(svgContent);
|
||||
} catch (error) {
|
||||
return response.status(StatusCodes.OK).json({ error: error.message });
|
||||
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({ error: error.message });
|
||||
}
|
||||
}
|
||||
|
||||
private getInitials(name: string) {
|
||||
const parts = name.split(' ');
|
||||
let initials = '';
|
||||
private getInitials(name: string): string {
|
||||
const parts = name
|
||||
.trim()
|
||||
.split(' ')
|
||||
.filter((part) => part.length > 0);
|
||||
|
||||
if (parts.length === 0) {
|
||||
return 'NA';
|
||||
}
|
||||
|
||||
if (parts.length >= 2) {
|
||||
const firstName = parts[0];
|
||||
const lastName = parts[parts.length - 1];
|
||||
|
||||
const firstInitial = firstName.charAt(0).toUpperCase();
|
||||
const lastInitial = lastName.charAt(0).toUpperCase();
|
||||
|
||||
if (prefixes.includes(lastName.toLowerCase()) && lastName === lastName.toUpperCase()) {
|
||||
initials = firstInitial + lastName.charAt(1).toUpperCase();
|
||||
} else {
|
||||
initials = firstInitial + lastInitial;
|
||||
}
|
||||
} else if (parts.length === 1) {
|
||||
initials = parts[0].substring(0, 2).toUpperCase();
|
||||
return this.getMultiWordInitials(parts);
|
||||
}
|
||||
|
||||
return initials;
|
||||
return parts[0].substring(0, 2).toUpperCase();
|
||||
}
|
||||
|
||||
private getColorFromName(name: string) {
|
||||
private getMultiWordInitials(parts: string[]): string {
|
||||
const firstName = parts[0];
|
||||
const lastName = parts[parts.length - 1];
|
||||
const firstInitial = firstName.charAt(0).toUpperCase();
|
||||
const lastInitial = lastName.charAt(0).toUpperCase();
|
||||
|
||||
if (PREFIXES.includes(lastName.toLowerCase()) && lastName === lastName.toUpperCase()) {
|
||||
return firstInitial + lastName.charAt(1).toUpperCase();
|
||||
}
|
||||
return firstInitial + lastInitial;
|
||||
}
|
||||
|
||||
private generateColors(name: string): { background: string; text: string } {
|
||||
const baseColor = this.getColorFromName(name);
|
||||
return {
|
||||
background: this.lightenColor(baseColor, COLOR_LIGHTENING_PERCENT),
|
||||
text: this.darkenColor(baseColor),
|
||||
};
|
||||
}
|
||||
|
||||
private createSvg(size: number, colors: { background: string; text: string }, initials: string): string {
|
||||
const fontSize = size * FONT_SIZE_RATIO;
|
||||
return `
|
||||
<svg width="${size}" height="${size}" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect width="100%" height="100%" fill="#${colors.background}"/>
|
||||
<text x="50%" y="50%" dominant-baseline="middle" text-anchor="middle" font-weight="bold" font-family="Arial, sans-serif" font-size="${fontSize}" fill="#${colors.text}">${initials}</text>
|
||||
</svg>
|
||||
`;
|
||||
}
|
||||
|
||||
private setResponseHeaders(response: HttpContext['response']): void {
|
||||
response.header('Content-type', 'image/svg+xml');
|
||||
response.header('Cache-Control', 'no-cache');
|
||||
response.header('Pragma', 'no-cache');
|
||||
response.header('Expires', '0');
|
||||
}
|
||||
|
||||
private getColorFromName(name: string): string {
|
||||
let hash = 0;
|
||||
for (let i = 0; i < name.length; i++) {
|
||||
hash = name.charCodeAt(i) + ((hash << 5) - hash);
|
||||
}
|
||||
let color = '#';
|
||||
|
||||
const colorParts = [];
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const value = (hash >> (i * 8)) & 0xff;
|
||||
color += ('00' + value.toString(16)).substr(-2);
|
||||
colorParts.push(value.toString(16).padStart(2, '0'));
|
||||
}
|
||||
return color.replace('#', '');
|
||||
return colorParts.join('');
|
||||
}
|
||||
|
||||
private lightenColor(hexColor: string, percent: number) {
|
||||
let r = parseInt(hexColor.substring(0, 2), 16);
|
||||
let g = parseInt(hexColor.substring(2, 4), 16);
|
||||
let b = parseInt(hexColor.substring(4, 6), 16);
|
||||
private lightenColor(hexColor: string, percent: number): string {
|
||||
const r = parseInt(hexColor.substring(0, 2), 16);
|
||||
const g = parseInt(hexColor.substring(2, 4), 16);
|
||||
const b = parseInt(hexColor.substring(4, 6), 16);
|
||||
|
||||
r = Math.floor((r * (100 + percent)) / 100);
|
||||
g = Math.floor((g * (100 + percent)) / 100);
|
||||
b = Math.floor((b * (100 + percent)) / 100);
|
||||
const lightenValue = (value: number) => Math.min(255, Math.floor((value * (100 + percent)) / 100));
|
||||
|
||||
r = r < 255 ? r : 255;
|
||||
g = g < 255 ? g : 255;
|
||||
b = b < 255 ? b : 255;
|
||||
const newR = lightenValue(r);
|
||||
const newG = lightenValue(g);
|
||||
const newB = lightenValue(b);
|
||||
|
||||
const lighterHex = ((r << 16) | (g << 8) | b).toString(16);
|
||||
|
||||
return lighterHex.padStart(6, '0');
|
||||
return ((newR << 16) | (newG << 8) | newB).toString(16).padStart(6, '0');
|
||||
}
|
||||
|
||||
private darkenColor(hexColor: string) {
|
||||
private darkenColor(hexColor: string): string {
|
||||
const r = parseInt(hexColor.slice(0, 2), 16);
|
||||
const g = parseInt(hexColor.slice(2, 4), 16);
|
||||
const b = parseInt(hexColor.slice(4, 6), 16);
|
||||
|
||||
const darkerR = Math.round(r * 0.6);
|
||||
const darkerG = Math.round(g * 0.6);
|
||||
const darkerB = Math.round(b * 0.6);
|
||||
const darkenValue = (value: number) => Math.round(value * COLOR_DARKENING_FACTOR);
|
||||
|
||||
const darkerColor = ((darkerR << 16) + (darkerG << 8) + darkerB).toString(16);
|
||||
const darkerR = darkenValue(r);
|
||||
const darkerG = darkenValue(g);
|
||||
const darkerB = darkenValue(b);
|
||||
|
||||
return darkerColor.padStart(6, '0');
|
||||
return ((darkerR << 16) + (darkerG << 8) + darkerB).toString(16).padStart(6, '0');
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,10 +6,15 @@ import { StatusCodes } from 'http-status-codes';
|
|||
// node ace make:controller Author
|
||||
export default class DatasetController {
|
||||
public async index({}: HttpContext) {
|
||||
// select * from gba.persons
|
||||
// where exists (select * from gba.documents inner join gba.link_documents_persons on "documents"."id" = "link_documents_persons"."document_id"
|
||||
// where ("link_documents_persons"."role" = 'author') and ("persons"."id" = "link_documents_persons"."person_id"));
|
||||
const datasets = await Dataset.query().where('server_state', 'published').orWhere('server_state', 'deleted');
|
||||
// Select datasets with server_state 'published' or 'deleted' and sort by the last published date
|
||||
const datasets = await Dataset.query()
|
||||
.where(function (query) {
|
||||
query.where('server_state', 'published')
|
||||
.orWhere('server_state', 'deleted');
|
||||
})
|
||||
.preload('titles')
|
||||
.preload('identifier')
|
||||
.orderBy('server_date_published', 'desc');
|
||||
|
||||
return datasets;
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ export default class FileController {
|
|||
// where: { id: id },
|
||||
// });
|
||||
if (file) {
|
||||
const filePath = '/storage/app/public/' + file.pathName;
|
||||
const filePath = '/storage/app/data/' + file.pathName;
|
||||
const ext = path.extname(filePath);
|
||||
const fileName = file.label + ext;
|
||||
try {
|
||||
|
|
|
@ -9,6 +9,24 @@ import BackupCode from '#models/backup_code';
|
|||
|
||||
// Here we are generating secret and recovery codes for the user that’s enabling 2FA and storing them to our database.
|
||||
export default class UserController {
|
||||
public async getSubmitters({ response }: HttpContext) {
|
||||
try {
|
||||
const submitters = await User.query()
|
||||
.preload('roles', (query) => {
|
||||
query.where('name', 'submitter')
|
||||
})
|
||||
.whereHas('roles', (query) => {
|
||||
query.where('name', 'submitter')
|
||||
})
|
||||
.exec();
|
||||
return submitters;
|
||||
} catch (error) {
|
||||
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
|
||||
message: 'Invalid TOTP state',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public async enable({ auth, response, request }: HttpContext) {
|
||||
const user = (await User.find(auth.user?.id)) as User;
|
||||
// await user.load('totp_secret');
|
||||
|
|
36
app/Controllers/Http/Api/collections_controller.ts
Normal file
36
app/Controllers/Http/Api/collections_controller.ts
Normal file
|
@ -0,0 +1,36 @@
|
|||
import type { HttpContext } from '@adonisjs/core/http';
|
||||
import Collection from '#models/collection';
|
||||
|
||||
export default class CollectionsController {
|
||||
public async show({ params, response }: HttpContext) {
|
||||
// Get the collection id from route parameters
|
||||
const collectionId = params.id;
|
||||
|
||||
// Find the selected collection by id
|
||||
const collection = await Collection.find(collectionId);
|
||||
if (!collection) {
|
||||
return response.status(404).json({ message: 'Collection not found' });
|
||||
}
|
||||
|
||||
// Query for narrower concepts: collections whose parent_id equals the selected collection's id
|
||||
const narrowerCollections = await Collection.query().where('parent_id', collection.id) || [];
|
||||
|
||||
// For broader concept, if the selected collection has a parent_id fetch that record (otherwise null)
|
||||
const broaderCollection: Collection[] | never[] | null = await (async () => {
|
||||
if (collection.parent_id) {
|
||||
// Try to fetch the parent...
|
||||
const parent = await Collection.find(collection.parent_id)
|
||||
// If found, return it wrapped in an array; if not found, return null (or empty array if you prefer)
|
||||
return parent ? [parent] : null
|
||||
}
|
||||
return []
|
||||
})()
|
||||
|
||||
// Return the selected collection along with its narrower and broader concepts in JSON format
|
||||
return response.json({
|
||||
selectedCollection: collection,
|
||||
narrowerCollections,
|
||||
broaderCollection,
|
||||
});
|
||||
}
|
||||
}
|
|
@ -19,14 +19,13 @@ import XmlModel from '#app/Library/XmlModel';
|
|||
import logger from '@adonisjs/core/services/logger';
|
||||
import ResumptionToken from '#app/Library/Oai/ResumptionToken';
|
||||
// import Config from '@ioc:Adonis/Core/Config';
|
||||
import config from '@adonisjs/core/services/config'
|
||||
import config from '@adonisjs/core/services/config';
|
||||
// import { inject } from '@adonisjs/fold';
|
||||
import { inject } from '@adonisjs/core'
|
||||
import { inject } from '@adonisjs/core';
|
||||
// import { TokenWorkerContract } from "MyApp/Models/TokenWorker";
|
||||
import TokenWorkerContract from '#library/Oai/TokenWorkerContract';
|
||||
import { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model';
|
||||
|
||||
|
||||
interface XslTParameter {
|
||||
[key: string]: any;
|
||||
}
|
||||
|
@ -35,12 +34,14 @@ interface Dictionary {
|
|||
[index: string]: string;
|
||||
}
|
||||
|
||||
interface ListParameter {
|
||||
interface PagingParameter {
|
||||
cursor: number;
|
||||
totalIds: number;
|
||||
totalLength: number;
|
||||
start: number;
|
||||
reldocIds: (number | null)[];
|
||||
nextDocIds: number[];
|
||||
activeWorkIds: number[];
|
||||
metadataPrefix: string;
|
||||
queryParams: Object;
|
||||
}
|
||||
|
||||
@inject()
|
||||
|
@ -49,6 +50,7 @@ export default class OaiController {
|
|||
private sampleRegEx = /^[A-Za-zäüÄÜß0-9\-_.!~]+$/;
|
||||
private xsltParameter: XslTParameter;
|
||||
|
||||
private firstPublishedDataset: Dataset | null;
|
||||
/**
|
||||
* Holds xml representation of document information to be processed.
|
||||
*
|
||||
|
@ -57,7 +59,6 @@ export default class OaiController {
|
|||
private xml: XMLBuilder;
|
||||
private proc;
|
||||
|
||||
|
||||
constructor(public tokenWorker: TokenWorkerContract) {
|
||||
// Load the XSLT file
|
||||
this.proc = readFileSync('public/assets2/datasetxml2oai.sef.json');
|
||||
|
@ -85,9 +86,9 @@ export default class OaiController {
|
|||
let earliestDateFromDb;
|
||||
// const oaiRequest: OaiParameter = request.body;
|
||||
try {
|
||||
const firstPublishedDataset: Dataset | null = await Dataset.earliestPublicationDate();
|
||||
firstPublishedDataset != null &&
|
||||
(earliestDateFromDb = firstPublishedDataset.server_date_published.toFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"));
|
||||
this.firstPublishedDataset = await Dataset.earliestPublicationDate();
|
||||
this.firstPublishedDataset != null &&
|
||||
(earliestDateFromDb = this.firstPublishedDataset.server_date_published.toFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"));
|
||||
this.xsltParameter['earliestDatestamp'] = earliestDateFromDb;
|
||||
// start the request
|
||||
await this.handleRequest(oaiRequest, request);
|
||||
|
@ -162,22 +163,19 @@ export default class OaiController {
|
|||
} else if (verb == 'GetRecord') {
|
||||
await this.handleGetRecord(oaiRequest);
|
||||
} else if (verb == 'ListRecords') {
|
||||
await this.handleListRecords(oaiRequest);
|
||||
// Get browser fingerprint from the request:
|
||||
const browserFingerprint = this.getBrowserFingerprint(request);
|
||||
await this.handleListRecords(oaiRequest, browserFingerprint);
|
||||
} else if (verb == 'ListIdentifiers') {
|
||||
await this.handleListIdentifiers(oaiRequest);
|
||||
// Get browser fingerprint from the request:
|
||||
const browserFingerprint = this.getBrowserFingerprint(request);
|
||||
await this.handleListIdentifiers(oaiRequest, browserFingerprint);
|
||||
} else if (verb == 'ListSets') {
|
||||
await this.handleListSets();
|
||||
} else {
|
||||
this.handleIllegalVerb();
|
||||
}
|
||||
} else {
|
||||
// // try {
|
||||
// // console.log("Async code example.")
|
||||
// const err = new PageNotFoundException("verb not found");
|
||||
// throw err;
|
||||
// // } catch (error) { // manually catching
|
||||
// // next(error); // passing to default middleware error handler
|
||||
// // }
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'The verb provided in the request is illegal.',
|
||||
|
@ -187,11 +185,11 @@ export default class OaiController {
|
|||
}
|
||||
|
||||
protected handleIdentify() {
|
||||
const email = process.env.OAI_EMAIL || 'repository@geosphere.at';
|
||||
const repositoryName = 'Tethys RDR';
|
||||
const repIdentifier = 'tethys.at';
|
||||
const sampleIdentifier = 'oai:' + repIdentifier + ':1'; //$this->_configuration->getSampleIdentifier();
|
||||
|
||||
// Get configuration values from environment or a dedicated configuration service
|
||||
const email = process.env.OAI_EMAIL ?? 'repository@geosphere.at';
|
||||
const repositoryName = process.env.OAI_REPOSITORY_NAME ?? 'Tethys RDR';
|
||||
const repIdentifier = process.env.OAI_REP_IDENTIFIER ?? 'tethys.at';
|
||||
const sampleIdentifier = `oai:${repIdentifier}:1`;
|
||||
// Dataset::earliestPublicationDate()->server_date_published->format('Y-m-d\TH:i:s\Z') : null;
|
||||
// earliestDateFromDb!= null && (this.xsltParameter['earliestDatestamp'] = earliestDateFromDb?.server_date_published);
|
||||
|
||||
|
@ -216,7 +214,7 @@ export default class OaiController {
|
|||
|
||||
const sets: { [key: string]: string } = {
|
||||
'open_access': 'Set for open access licenses',
|
||||
'openaire_data': "OpenAIRE",
|
||||
'openaire_data': 'OpenAIRE',
|
||||
'doc-type:ResearchData': 'Set for document type ResearchData',
|
||||
...(await this.getSetsForDatasetTypes()),
|
||||
...(await this.getSetsForCollections()),
|
||||
|
@ -234,7 +232,15 @@ export default class OaiController {
|
|||
const repIdentifier = 'tethys.at';
|
||||
this.xsltParameter['repIdentifier'] = repIdentifier;
|
||||
|
||||
// Validate that required parameter exists early
|
||||
if (!('identifier' in oaiRequest)) {
|
||||
throw new BadOaiModelException('The prefix of the identifier argument is unknown.');
|
||||
}
|
||||
|
||||
// Validate and extract the dataset identifier from the request
|
||||
const dataId = this.validateAndGetIdentifier(oaiRequest);
|
||||
|
||||
// Retrieve dataset with associated XML cache and collection roles
|
||||
const dataset = await Dataset.query()
|
||||
.where('publish_id', dataId)
|
||||
.preload('xmlCache')
|
||||
|
@ -251,59 +257,61 @@ export default class OaiController {
|
|||
);
|
||||
}
|
||||
|
||||
// Validate and set the metadata prefix parameter
|
||||
const metadataPrefix = this.validateAndGetMetadataPrefix(oaiRequest);
|
||||
this.xsltParameter['oai_metadataPrefix'] = metadataPrefix;
|
||||
// do not deliver datasets which are restricted by document state defined in deliveringStates
|
||||
|
||||
// Ensure that the dataset is in an exportable state
|
||||
this.validateDatasetState(dataset);
|
||||
|
||||
// add xml elements
|
||||
// Build the XML for the dataset record and add it to the root node
|
||||
const datasetNode = this.xml.root().ele('Datasets');
|
||||
await this.createXmlRecord(dataset, datasetNode);
|
||||
}
|
||||
|
||||
protected async handleListIdentifiers(oaiRequest: Dictionary) {
|
||||
!this.tokenWorker.isConnected && (await this.tokenWorker.connect());
|
||||
protected async handleListIdentifiers(oaiRequest: Dictionary, browserFingerprint: string) {
|
||||
if (!this.tokenWorker.isConnected) {
|
||||
await this.tokenWorker.connect();
|
||||
}
|
||||
|
||||
const maxIdentifier: number = config.get('oai.max.listidentifiers', 100);
|
||||
await this.handleLists(oaiRequest, maxIdentifier);
|
||||
await this.handleLists(oaiRequest, maxIdentifier, browserFingerprint);
|
||||
}
|
||||
|
||||
protected async handleListRecords(oaiRequest: Dictionary) {
|
||||
!this.tokenWorker.isConnected && (await this.tokenWorker.connect());
|
||||
protected async handleListRecords(oaiRequest: Dictionary, browserFingerprint: string) {
|
||||
if (!this.tokenWorker.isConnected) {
|
||||
await this.tokenWorker.connect();
|
||||
}
|
||||
|
||||
const maxRecords: number = config.get('oai.max.listrecords', 100);
|
||||
await this.handleLists(oaiRequest, maxRecords);
|
||||
await this.handleLists(oaiRequest, maxRecords, browserFingerprint);
|
||||
}
|
||||
|
||||
private async handleLists(oaiRequest: Dictionary, maxRecords: number) {
|
||||
maxRecords = maxRecords || 100;
|
||||
private async handleLists(oaiRequest: Dictionary, maxRecords: number, browserFingerprint: string) {
|
||||
const repIdentifier = 'tethys.at';
|
||||
this.xsltParameter['repIdentifier'] = repIdentifier;
|
||||
const datasetNode = this.xml.root().ele('Datasets');
|
||||
|
||||
// list initialisation
|
||||
const numWrapper: ListParameter = {
|
||||
const paginationParams: PagingParameter ={
|
||||
cursor: 0,
|
||||
totalIds: 0,
|
||||
totalLength: 0,
|
||||
start: maxRecords + 1,
|
||||
reldocIds: [],
|
||||
nextDocIds: [],
|
||||
activeWorkIds: [],
|
||||
metadataPrefix: '',
|
||||
queryParams: {},
|
||||
};
|
||||
|
||||
// resumptionToken is defined
|
||||
if ('resumptionToken' in oaiRequest) {
|
||||
await this.handleResumptionToken(oaiRequest, maxRecords, numWrapper);
|
||||
await this.handleResumptionToken(oaiRequest, maxRecords, paginationParams);
|
||||
} else {
|
||||
// no resumptionToken is given
|
||||
await this.handleNoResumptionToken(oaiRequest, numWrapper);
|
||||
await this.handleNoResumptionToken(oaiRequest, paginationParams, maxRecords);
|
||||
}
|
||||
|
||||
// handling of document ids
|
||||
const restIds = numWrapper.reldocIds as number[];
|
||||
const workIds = restIds.splice(0, maxRecords) as number[]; // array_splice(restIds, 0, maxRecords);
|
||||
const nextIds: number[] = paginationParams.nextDocIds;
|
||||
const workIds: number[] = paginationParams.activeWorkIds;
|
||||
|
||||
// no records returned
|
||||
if (workIds.length == 0) {
|
||||
if (workIds.length === 0) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'The combination of the given values results in an empty list.',
|
||||
|
@ -311,169 +319,218 @@ export default class OaiController {
|
|||
);
|
||||
}
|
||||
|
||||
const datasets: Dataset[] = await Dataset.query()
|
||||
const datasets = await Dataset.query()
|
||||
.whereIn('publish_id', workIds)
|
||||
.preload('xmlCache')
|
||||
.preload('collections', (builder) => {
|
||||
builder.preload('collectionRole');
|
||||
})
|
||||
.orderBy('publish_id');
|
||||
|
||||
for (const dataset of datasets) {
|
||||
await this.createXmlRecord(dataset, datasetNode);
|
||||
}
|
||||
|
||||
// store the further Ids in a resumption-file
|
||||
const countRestIds = restIds.length; //84
|
||||
if (countRestIds > 0) {
|
||||
const token = new ResumptionToken();
|
||||
token.startPosition = numWrapper.start; //101
|
||||
token.totalIds = numWrapper.totalIds; //184
|
||||
token.documentIds = restIds; //101 -184
|
||||
token.metadataPrefix = numWrapper.metadataPrefix;
|
||||
|
||||
// $tokenWorker->storeResumptionToken($token);
|
||||
const res: string = await this.tokenWorker.set(token);
|
||||
|
||||
// set parameters for the resumptionToken-node
|
||||
// const res = token.ResumptionId;
|
||||
this.setParamResumption(res, numWrapper.cursor, numWrapper.totalIds);
|
||||
}
|
||||
await this.setResumptionToken(nextIds, paginationParams, browserFingerprint);
|
||||
}
|
||||
|
||||
private async handleResumptionToken(oaiRequest: Dictionary, maxRecords: number, numWrapper: ListParameter) {
|
||||
const resParam = oaiRequest['resumptionToken']; //e.g. "158886496600000"
|
||||
private async handleNoResumptionToken(oaiRequest: Dictionary, paginationParams: PagingParameter, maxRecords: number) {
|
||||
this.validateMetadataPrefix(oaiRequest, paginationParams);
|
||||
const finder: ModelQueryBuilderContract<typeof Dataset, Dataset> = Dataset.query().whereIn(
|
||||
'server_state',
|
||||
this.deliveringDocumentStates,
|
||||
);
|
||||
this.applySetFilter(finder, oaiRequest);
|
||||
this.applyDateFilters(finder, oaiRequest);
|
||||
await this.fetchAndSetResults(finder, paginationParams, oaiRequest, maxRecords);
|
||||
}
|
||||
|
||||
private async fetchAndSetResults(
|
||||
finder: ModelQueryBuilderContract<typeof Dataset, Dataset>,
|
||||
paginationParams: PagingParameter,
|
||||
oaiRequest: Dictionary,
|
||||
maxRecords: number
|
||||
) {
|
||||
const totalResult = await finder
|
||||
.clone()
|
||||
.count('* as total')
|
||||
.first()
|
||||
.then((res) => res?.$extras.total);
|
||||
paginationParams.totalLength = Number(totalResult);
|
||||
|
||||
const combinedRecords: Dataset[] = await finder.select('publish_id').orderBy('publish_id').offset(0).limit(maxRecords*2);
|
||||
|
||||
paginationParams.activeWorkIds = combinedRecords.slice(0, 100).map((dat) => Number(dat.publish_id));
|
||||
paginationParams.nextDocIds = combinedRecords.slice(100).map((dat) => Number(dat.publish_id));
|
||||
|
||||
// No resumption token was used – set queryParams from the current oaiRequest
|
||||
paginationParams.queryParams = {
|
||||
...oaiRequest,
|
||||
deliveringStates: this.deliveringDocumentStates,
|
||||
};
|
||||
|
||||
// paginationParams.totalLength = 230;
|
||||
}
|
||||
|
||||
private async handleResumptionToken(oaiRequest: Dictionary, maxRecords: number, paginationParams: PagingParameter) {
|
||||
const resParam = oaiRequest['resumptionToken'];
|
||||
const token = await this.tokenWorker.get(resParam);
|
||||
|
||||
if (!token) {
|
||||
throw new OaiModelException(StatusCodes.INTERNAL_SERVER_ERROR, 'cache is outdated.', OaiErrorCodes.BADRESUMPTIONTOKEN);
|
||||
}
|
||||
|
||||
numWrapper.cursor = token.startPosition - 1; //startet dann bei Index 10
|
||||
numWrapper.start = token.startPosition + maxRecords;
|
||||
numWrapper.totalIds = token.totalIds;
|
||||
numWrapper.reldocIds = token.documentIds;
|
||||
numWrapper.metadataPrefix = token.metadataPrefix;
|
||||
// this.setResumptionParameters(token, maxRecords, paginationParams);
|
||||
paginationParams.cursor = token.startPosition - 1;
|
||||
paginationParams.start = token.startPosition + maxRecords;
|
||||
paginationParams.totalLength = token.totalIds;
|
||||
paginationParams.activeWorkIds = token.documentIds;
|
||||
paginationParams.metadataPrefix = token.metadataPrefix;
|
||||
paginationParams.queryParams = token.queryParams;
|
||||
this.xsltParameter['oai_metadataPrefix'] = token.metadataPrefix;
|
||||
|
||||
this.xsltParameter['oai_metadataPrefix'] = numWrapper.metadataPrefix;
|
||||
const finder = this.buildDatasetQueryViaToken(token);
|
||||
const nextRecords: Dataset[] = await this.fetchNextRecords(finder, token, maxRecords);
|
||||
paginationParams.nextDocIds = nextRecords.map((dat) => Number(dat.publish_id));
|
||||
}
|
||||
|
||||
private async handleNoResumptionToken(oaiRequest: Dictionary, numWrapper: ListParameter) {
|
||||
// no resumptionToken is given
|
||||
if ('metadataPrefix' in oaiRequest) {
|
||||
numWrapper.metadataPrefix = oaiRequest['metadataPrefix'];
|
||||
} else {
|
||||
private async setResumptionToken(nextIds: number[], paginationParams: PagingParameter, browserFingerprint: string) {
|
||||
const countRestIds = nextIds.length;
|
||||
if (countRestIds > 0) {
|
||||
// const token = this.createResumptionToken(paginationParams, nextIds);
|
||||
const token = new ResumptionToken();
|
||||
token.startPosition = paginationParams.start;
|
||||
token.totalIds = paginationParams.totalLength;
|
||||
token.documentIds = nextIds;
|
||||
token.metadataPrefix = paginationParams.metadataPrefix;
|
||||
token.queryParams = paginationParams.queryParams;
|
||||
const res: string = await this.tokenWorker.set(token, browserFingerprint);
|
||||
this.setParamResumption(res, paginationParams.cursor, paginationParams.totalLength);
|
||||
}
|
||||
}
|
||||
|
||||
private buildDatasetQueryViaToken(token: ResumptionToken) {
|
||||
const finder = Dataset.query();
|
||||
const originalQuery = token.queryParams || {};
|
||||
const deliveringStates = originalQuery.deliveringStates || this.deliveringDocumentStates;
|
||||
|
||||
finder.whereIn('server_state', deliveringStates);
|
||||
this.applySetFilter(finder, originalQuery);
|
||||
this.applyDateFilters(finder, originalQuery);
|
||||
|
||||
return finder;
|
||||
}
|
||||
|
||||
private async fetchNextRecords(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, token: ResumptionToken, maxRecords: number) {
|
||||
return finder
|
||||
.select('publish_id')
|
||||
.orderBy('publish_id')
|
||||
.offset(token.startPosition - 1 + maxRecords)
|
||||
.limit(100);
|
||||
}
|
||||
|
||||
private validateMetadataPrefix(oaiRequest: Dictionary, paginationParams: PagingParameter) {
|
||||
if (!('metadataPrefix' in oaiRequest)) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'The prefix of the metadata argument is unknown.',
|
||||
OaiErrorCodes.BADARGUMENT,
|
||||
);
|
||||
}
|
||||
this.xsltParameter['oai_metadataPrefix'] = numWrapper.metadataPrefix;
|
||||
paginationParams.metadataPrefix = oaiRequest['metadataPrefix'];
|
||||
this.xsltParameter['oai_metadataPrefix'] = paginationParams.metadataPrefix;
|
||||
}
|
||||
|
||||
let finder: ModelQueryBuilderContract<typeof Dataset, Dataset> = Dataset.query();
|
||||
// add server state restrictions
|
||||
finder.whereIn('server_state', this.deliveringDocumentStates);
|
||||
if ('set' in oaiRequest) {
|
||||
const set = oaiRequest['set'] as string;
|
||||
const setArray = set.split(':');
|
||||
private applySetFilter(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, queryParams: any) {
|
||||
if ('set' in queryParams) {
|
||||
const [setType, setValue] = queryParams['set'].split(':');
|
||||
|
||||
if (setArray[0] == 'data-type') {
|
||||
if (setArray.length == 2 && setArray[1]) {
|
||||
finder.where('type', setArray[1]);
|
||||
}
|
||||
} else if (setArray[0] == 'open_access') {
|
||||
const openAccessLicences = ['CC-BY-4.0', 'CC-BY-SA-4.0'];
|
||||
finder.andWhereHas('licenses', (query) => {
|
||||
query.whereIn('name', openAccessLicences);
|
||||
});
|
||||
} else if (setArray[0] == 'ddc') {
|
||||
if (setArray.length == 2 && setArray[1] != '') {
|
||||
finder.andWhereHas('collections', (query) => {
|
||||
query.where('number', setArray[1]);
|
||||
switch (setType) {
|
||||
case 'data-type':
|
||||
setValue && finder.where('type', setValue);
|
||||
break;
|
||||
case 'open_access':
|
||||
finder.andWhereHas('licenses', (query) => {
|
||||
query.whereIn('name', ['CC-BY-4.0', 'CC-BY-SA-4.0']);
|
||||
});
|
||||
}
|
||||
break;
|
||||
case 'ddc':
|
||||
setValue &&
|
||||
finder.andWhereHas('collections', (query) => {
|
||||
query.where('number', setValue);
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// const timeZone = "Europe/Vienna"; // Canonical time zone name
|
||||
// &from=2020-09-03&until2020-09-03
|
||||
// &from=2020-09-11&until=2021-05-11
|
||||
if ('from' in oaiRequest && 'until' in oaiRequest) {
|
||||
const from = oaiRequest['from'] as string;
|
||||
let fromDate = dayjs(from); //.tz(timeZone);
|
||||
const until = oaiRequest['until'] as string;
|
||||
let untilDate = dayjs(until); //.tz(timeZone);
|
||||
if (!fromDate.isValid() || !untilDate.isValid()) {
|
||||
throw new OaiModelException(StatusCodes.INTERNAL_SERVER_ERROR, 'Date Parameter is not valid.', OaiErrorCodes.BADARGUMENT);
|
||||
}
|
||||
fromDate = dayjs.tz(from, 'Europe/Vienna');
|
||||
untilDate = dayjs.tz(until, 'Europe/Vienna');
|
||||
private applyDateFilters(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, queryParams: any) {
|
||||
const { from, until } = queryParams;
|
||||
|
||||
if (from.length != until.length) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'The request has different granularities for the from and until parameters.',
|
||||
OaiErrorCodes.BADARGUMENT,
|
||||
);
|
||||
}
|
||||
fromDate.hour() == 0 && (fromDate = fromDate.startOf('day'));
|
||||
untilDate.hour() == 0 && (untilDate = untilDate.endOf('day'));
|
||||
if (from && until) {
|
||||
this.handleFromUntilFilter(finder, from, until);
|
||||
} else if (from) {
|
||||
this.handleFromFilter(finder, from);
|
||||
} else if (until) {
|
||||
this.handleUntilFilter(finder, until);
|
||||
}
|
||||
}
|
||||
|
||||
finder.whereBetween('server_date_published', [fromDate.format('YYYY-MM-DD HH:mm:ss'), untilDate.format('YYYY-MM-DD HH:mm:ss')]);
|
||||
} else if ('from' in oaiRequest && !('until' in oaiRequest)) {
|
||||
const from = oaiRequest['from'] as string;
|
||||
let fromDate = dayjs(from);
|
||||
if (!fromDate.isValid()) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'From date parameter is not valid.',
|
||||
OaiErrorCodes.BADARGUMENT,
|
||||
);
|
||||
}
|
||||
fromDate = dayjs.tz(from, 'Europe/Vienna');
|
||||
fromDate.hour() == 0 && (fromDate = fromDate.startOf('day'));
|
||||
private handleFromUntilFilter(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, from: string, until: string) {
|
||||
const fromDate = this.parseDateWithValidation(from, 'From');
|
||||
const untilDate = this.parseDateWithValidation(until, 'Until');
|
||||
|
||||
const now = dayjs();
|
||||
if (fromDate.isAfter(now)) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'Given from date is greater than now. The given values results in an empty list.',
|
||||
OaiErrorCodes.NORECORDSMATCH,
|
||||
);
|
||||
} else {
|
||||
finder.andWhere('server_date_published', '>=', fromDate.format('YYYY-MM-DD HH:mm:ss'));
|
||||
}
|
||||
} else if (!('from' in oaiRequest) && 'until' in oaiRequest) {
|
||||
const until = oaiRequest['until'] as string;
|
||||
let untilDate = dayjs(until);
|
||||
if (!untilDate.isValid()) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'Until date parameter is not valid.',
|
||||
OaiErrorCodes.BADARGUMENT,
|
||||
);
|
||||
}
|
||||
untilDate = dayjs.tz(until, 'Europe/Vienna');
|
||||
untilDate.hour() == 0 && (untilDate = untilDate.endOf('day'));
|
||||
|
||||
const firstPublishedDataset: Dataset = (await Dataset.earliestPublicationDate()) as Dataset;
|
||||
const earliestPublicationDate = dayjs(firstPublishedDataset.server_date_published.toISO()); //format("YYYY-MM-DDThh:mm:ss[Z]"));
|
||||
if (earliestPublicationDate.isAfter(untilDate)) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
`earliestDatestamp is greater than given until date.
|
||||
The given values results in an empty list.`,
|
||||
OaiErrorCodes.NORECORDSMATCH,
|
||||
);
|
||||
} else {
|
||||
finder.andWhere('server_date_published', '<=', untilDate.format('YYYY-MM-DD HH:mm:ss'));
|
||||
}
|
||||
if (from.length !== until.length) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'The request has different granularities for the from and until parameters.',
|
||||
OaiErrorCodes.BADARGUMENT,
|
||||
);
|
||||
}
|
||||
|
||||
let reldocIdsDocs = await finder.select('publish_id').orderBy('publish_id');
|
||||
numWrapper.reldocIds = reldocIdsDocs.map((dat) => dat.publish_id);
|
||||
numWrapper.totalIds = numWrapper.reldocIds.length; //212
|
||||
finder.whereBetween('server_date_published', [fromDate.format('YYYY-MM-DD HH:mm:ss'), untilDate.format('YYYY-MM-DD HH:mm:ss')]);
|
||||
}
|
||||
|
||||
private handleFromFilter(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, from: string) {
|
||||
const fromDate = this.parseDateWithValidation(from, 'From');
|
||||
const now = dayjs();
|
||||
|
||||
if (fromDate.isAfter(now)) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'Given from date is greater than now. The given values results in an empty list.',
|
||||
OaiErrorCodes.NORECORDSMATCH,
|
||||
);
|
||||
}
|
||||
|
||||
finder.andWhere('server_date_published', '>=', fromDate.format('YYYY-MM-DD HH:mm:ss'));
|
||||
}
|
||||
|
||||
private handleUntilFilter(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, until: string) {
|
||||
const untilDate = this.parseDateWithValidation(until, 'Until');
|
||||
|
||||
const earliestPublicationDate = dayjs(this.firstPublishedDataset?.server_date_published.toISO());
|
||||
|
||||
if (earliestPublicationDate.isAfter(untilDate)) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
'earliestDatestamp is greater than given until date. The given values results in an empty list.',
|
||||
OaiErrorCodes.NORECORDSMATCH,
|
||||
);
|
||||
}
|
||||
|
||||
finder.andWhere('server_date_published', '<=', untilDate.format('YYYY-MM-DD HH:mm:ss'));
|
||||
}
|
||||
|
||||
private parseDateWithValidation(dateStr: string, label: string) {
|
||||
let date = dayjs(dateStr);
|
||||
if (!date.isValid()) {
|
||||
throw new OaiModelException(
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
`${label} date parameter is not valid.`,
|
||||
OaiErrorCodes.BADARGUMENT,
|
||||
);
|
||||
}
|
||||
date = dayjs.tz(dateStr, 'Europe/Vienna');
|
||||
return date.hour() === 0 ? (label === 'From' ? date.startOf('day') : date.endOf('day')) : date;
|
||||
}
|
||||
|
||||
private setParamResumption(res: string, cursor: number, totalIds: number) {
|
||||
|
@ -641,4 +698,30 @@ export default class OaiController {
|
|||
this.xsltParameter['oai_error_code'] = 'badVerb';
|
||||
this.xsltParameter['oai_error_message'] = 'The verb provided in the request is illegal.';
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to build a browser fingerprint by combining:
|
||||
* - User-Agent header,
|
||||
* - the IP address,
|
||||
* - Accept-Language header,
|
||||
* - current timestamp rounded to the hour.
|
||||
*
|
||||
* Every new hour, this will return a different fingerprint.
|
||||
*/
|
||||
private getBrowserFingerprint(request: Request): string {
|
||||
const userAgent = request.header('user-agent') || 'unknown';
|
||||
// Check for X-Forwarded-For header to use the client IP from the proxy if available.
|
||||
const xForwardedFor = request.header('x-forwarded-for');
|
||||
let ip = request.ip();
|
||||
// console.log(ip);
|
||||
if (xForwardedFor) {
|
||||
// X-Forwarded-For may contain a comma-separated list of IPs; the first one is the client IP.
|
||||
ip = xForwardedFor.split(',')[0].trim();
|
||||
// console.log('xforwardedfor ip' + ip);
|
||||
}
|
||||
const locale = request.header('accept-language') || 'default';
|
||||
// Round the current time to the start of the hour.
|
||||
const timestampHour = dayjs().startOf('hour').format('YYYY-MM-DDTHH');
|
||||
return `${userAgent}-${ip}-${locale}-${timestampHour}`;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ import Description from '#models/description';
|
|||
import Language from '#models/language';
|
||||
import Coverage from '#models/coverage';
|
||||
import Collection from '#models/collection';
|
||||
import CollectionRole from '#models/collection_role';
|
||||
import dayjs from 'dayjs';
|
||||
import Person from '#models/person';
|
||||
import db from '@adonisjs/lucid/services/db';
|
||||
|
@ -501,7 +502,7 @@ export default class DatasetController {
|
|||
}
|
||||
|
||||
// save collection
|
||||
const collection: Collection | null = await Collection.query().where('id', 21).first();
|
||||
const collection: Collection | null = await Collection.query().where('id', 594).first();
|
||||
collection && (await dataset.useTransaction(trx).related('collections').attach([collection.id]));
|
||||
|
||||
// save coverage
|
||||
|
@ -545,7 +546,7 @@ export default class DatasetController {
|
|||
overwrite: true, // overwrite in case of conflict
|
||||
disk: 'local',
|
||||
});
|
||||
|
||||
|
||||
// save file metadata into db
|
||||
const newFile = new File();
|
||||
newFile.pathName = `${datasetFolder}/${fileName}`;
|
||||
|
@ -1183,16 +1184,16 @@ export default class DatasetController {
|
|||
const datasetFolder = `files/${params.id}`;
|
||||
// const folderExists = await drive.use('local').exists(datasetFolder);
|
||||
// if (folderExists) {
|
||||
// const dirListing = drive.list(datasetFolder);
|
||||
// const folderContents = await dirListing.toArray();
|
||||
// if (folderContents.length === 0) {
|
||||
// await drive.delete(datasetFolder);
|
||||
// }
|
||||
await drive.use('local').deleteAll(datasetFolder);
|
||||
// delete dataset wirh relation in db
|
||||
await dataset.delete();
|
||||
session.flash({ message: 'You have deleted 1 dataset!' });
|
||||
return response.redirect().toRoute('dataset.list');
|
||||
// const dirListing = drive.list(datasetFolder);
|
||||
// const folderContents = await dirListing.toArray();
|
||||
// if (folderContents.length === 0) {
|
||||
// await drive.delete(datasetFolder);
|
||||
// }
|
||||
await drive.use('local').deleteAll(datasetFolder);
|
||||
// delete dataset wirh relation in db
|
||||
await dataset.delete();
|
||||
session.flash({ message: 'You have deleted 1 dataset!' });
|
||||
return response.redirect().toRoute('dataset.list');
|
||||
// } else {
|
||||
// // session.flash({
|
||||
// // warning: `You cannot delete this dataset! Invalid server_state: "${dataset.server_state}"!`,
|
||||
|
@ -1209,7 +1210,7 @@ export default class DatasetController {
|
|||
throw error;
|
||||
} else if (error instanceof Exception) {
|
||||
// General exception handling
|
||||
session.flash({ error: error.message});
|
||||
session.flash({ error: error.message });
|
||||
return response.redirect().back();
|
||||
} else {
|
||||
session.flash({ error: 'An error occurred while deleting the dataset.' });
|
||||
|
@ -1217,4 +1218,34 @@ export default class DatasetController {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async categorize({ inertia, request, response }: HttpContext) {
|
||||
const id = request.param('id');
|
||||
// Preload dataset and its "collections" relation
|
||||
const dataset = await Dataset.query().where('id', id).preload('collections').firstOrFail();
|
||||
const validStates = ['inprogress', 'rejected_editor'];
|
||||
if (!validStates.includes(dataset.server_state)) {
|
||||
// session.flash('errors', 'Invalid server state!');
|
||||
return response
|
||||
.flash(
|
||||
'warning',
|
||||
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
|
||||
)
|
||||
.redirect()
|
||||
.toRoute('dataset.list');
|
||||
}
|
||||
|
||||
const collectionRoles = await CollectionRole.query()
|
||||
.preload('collections', (coll: Collection) => {
|
||||
// preloa only top level collection with noparent_id
|
||||
coll.whereNull('parent_id').orderBy('number', 'asc');
|
||||
})
|
||||
.exec();
|
||||
|
||||
return inertia.render('Submitter/Dataset/Category', {
|
||||
collectionRoles: collectionRoles,
|
||||
dataset: dataset,
|
||||
relatedCollections: dataset.collections,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ export default class ResumptionToken {
|
|||
private _resumptionId = '';
|
||||
private _startPosition = 0;
|
||||
private _totalIds = 0;
|
||||
private _queryParams: Record<string, any> = {};
|
||||
|
||||
get key(): string {
|
||||
return this.metadataPrefix + this.startPosition + this.totalIds;
|
||||
|
@ -48,4 +49,12 @@ export default class ResumptionToken {
|
|||
set totalIds(totalIds: number) {
|
||||
this._totalIds = totalIds;
|
||||
}
|
||||
|
||||
get queryParams(): Record<string, any> {
|
||||
return this._queryParams;
|
||||
}
|
||||
|
||||
set queryParams(params: Record<string, any>) {
|
||||
this._queryParams = params;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,6 +6,6 @@ export default abstract class TokenWorkerContract {
|
|||
abstract connect(): void;
|
||||
abstract close(): void;
|
||||
abstract get(key: string): Promise<ResumptionToken | null>;
|
||||
abstract set(token: ResumptionToken): Promise<string>;
|
||||
abstract set(token: ResumptionToken, browserFingerprint: string): Promise<string>;
|
||||
}
|
||||
|
||||
|
|
|
@ -40,14 +40,64 @@ export default class TokenWorkerService implements TokenWorkerContract {
|
|||
return result !== undefined && result !== null;
|
||||
}
|
||||
|
||||
public async set(token: ResumptionToken): Promise<string> {
|
||||
const uniqueName = await this.generateUniqueName();
|
||||
/**
|
||||
* Simplified set method that stores the token using a browser fingerprint key.
|
||||
* If the token for that fingerprint already exists and its documentIds match the new token,
|
||||
* then the fingerprint key is simply returned.
|
||||
*/
|
||||
public async set(token: ResumptionToken, browserFingerprint: string): Promise<string> {
|
||||
// Generate a 15-digit unique number string based on the fingerprint
|
||||
const uniqueNumberKey = this.createUniqueNumberFromFingerprint(browserFingerprint, token.documentIds, token.totalIds);
|
||||
// Optionally, you could prefix it if desired, e.g. 'rs_' + uniqueNumberKey
|
||||
const fingerprintKey = uniqueNumberKey;
|
||||
|
||||
// const fingerprintKey = `rs_fp_${browserFingerprint}`;
|
||||
const existingTokenString = await this.cache.get(fingerprintKey);
|
||||
|
||||
if (existingTokenString) {
|
||||
const existingToken = this.parseToken(existingTokenString);
|
||||
if (this.arraysAreEqual(existingToken.documentIds, token.documentIds)) {
|
||||
return fingerprintKey;
|
||||
}
|
||||
}
|
||||
|
||||
const serialToken = JSON.stringify(token);
|
||||
await this.cache.setEx(uniqueName, this.ttl, serialToken);
|
||||
return uniqueName;
|
||||
await this.cache.setEx(fingerprintKey, this.ttl, serialToken);
|
||||
return fingerprintKey;
|
||||
}
|
||||
|
||||
// Updated helper method to generate a unique key based on fingerprint and documentIds
|
||||
private createUniqueNumberFromFingerprint(browserFingerprint: string, documentIds: number[], totalIds: number): string {
|
||||
// Combine the fingerprint, document IDs and totalIds to produce the input string
|
||||
const combined = browserFingerprint + ':' + documentIds.join('-') + ':' + totalIds;
|
||||
// Simple hash algorithm
|
||||
let hash = 0;
|
||||
for (let i = 0; i < combined.length; i++) {
|
||||
hash = (hash << 5) - hash + combined.charCodeAt(i);
|
||||
hash |= 0; // Convert to 32-bit integer
|
||||
}
|
||||
// Ensure positive number and limit it to at most 15 digits
|
||||
const positiveHash = Math.abs(hash) % 1000000000000000;
|
||||
// Pad with trailing zeros to ensure a 15-digit string
|
||||
return positiveHash.toString().padEnd(15, '0');
|
||||
}
|
||||
|
||||
// Add a helper function to compare two arrays of numbers with identical order
|
||||
private arraysAreEqual(arr1: number[], arr2: number[]): boolean {
|
||||
if (arr1.length !== arr2.length) {
|
||||
return false;
|
||||
}
|
||||
return arr1.every((num, index) => num === arr2[index]);
|
||||
}
|
||||
|
||||
// public async set(token: ResumptionToken): Promise<string> {
|
||||
// const uniqueName = await this.generateUniqueName();
|
||||
|
||||
// const serialToken = JSON.stringify(token);
|
||||
// await this.cache.setEx(uniqueName, this.ttl, serialToken);
|
||||
// return uniqueName;
|
||||
// }
|
||||
|
||||
private async generateUniqueName(): Promise<string> {
|
||||
let fc = 0;
|
||||
const uniqueId = dayjs().unix().toString();
|
||||
|
|
|
@ -209,6 +209,15 @@ export default class Dataset extends DatasetExtension {
|
|||
return mainTitle ? mainTitle.value : null;
|
||||
}
|
||||
|
||||
@computed({
|
||||
serializeAs: 'doi_identifier',
|
||||
})
|
||||
public get doiIdentifier() {
|
||||
// return `${this.firstName} ${this.lastName}`;
|
||||
const identifier: DatasetIdentifier = this.identifier;
|
||||
return identifier ? identifier.value : null;
|
||||
}
|
||||
|
||||
@manyToMany(() => Person, {
|
||||
pivotForeignKey: 'document_id',
|
||||
pivotRelatedForeignKey: 'person_id',
|
||||
|
|
|
@ -51,7 +51,7 @@ export default class Person extends BaseModel {
|
|||
serializeAs: 'name',
|
||||
})
|
||||
public get fullName() {
|
||||
return `${this.firstName} ${this.lastName}`;
|
||||
return [this.firstName, this.lastName].filter(Boolean).join(' ');
|
||||
}
|
||||
|
||||
// @computed()
|
||||
|
@ -64,10 +64,13 @@ export default class Person extends BaseModel {
|
|||
// return '2023-03-21 08:45:00';
|
||||
// }
|
||||
|
||||
@computed()
|
||||
|
||||
@computed({
|
||||
serializeAs: 'dataset_count',
|
||||
})
|
||||
public get datasetCount() {
|
||||
const stock = this.$extras.datasets_count; //my pivot column name was "stock"
|
||||
return stock;
|
||||
return Number(stock);
|
||||
}
|
||||
|
||||
@computed()
|
||||
|
|
Loading…
Add table
editor.link_modal.header
Reference in a new issue