feat: Enhance background job settings UI and functionality
Some checks failed
build.yaml / feat: Enhance background job settings UI and functionality (push) Failing after 0s
Some checks failed
build.yaml / feat: Enhance background job settings UI and functionality (push) Failing after 0s
- Updated BackgroundJob.vue to improve the display of background job statuses, including missing cross-references and current job mode. - Added auto-refresh functionality for background job status. - Introduced success toast notifications for successful status refreshes. - Modified the XML serialization process in DatasetXmlSerializer for better caching and performance. - Implemented a new RuleProvider for managing custom validation rules. - Improved error handling in routes for loading background job settings. - Enhanced ClamScan configuration with socket support for virus scanning. - Refactored dayjs utility to streamline locale management.
This commit is contained in:
parent
6757bdb77c
commit
b5bbe26ec2
27 changed files with 1221 additions and 603 deletions
|
|
@ -3,7 +3,7 @@ import { Client } from '@opensearch-project/opensearch';
|
|||
import User from '#models/user';
|
||||
import Dataset from '#models/dataset';
|
||||
import DatasetIdentifier from '#models/dataset_identifier';
|
||||
import XmlModel from '#app/Library/XmlModel';
|
||||
import DatasetXmlSerializer from '#app/Library/DatasetXmlSerializer';
|
||||
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
|
||||
import { create } from 'xmlbuilder2';
|
||||
import { readFileSync } from 'fs';
|
||||
|
|
@ -574,55 +574,88 @@ export default class DatasetsController {
|
|||
|
||||
public async doiStore({ request, response }: HttpContext) {
|
||||
const dataId = request.param('publish_id');
|
||||
const dataset = await Dataset.query()
|
||||
// .preload('xmlCache')
|
||||
.where('publish_id', dataId)
|
||||
.firstOrFail();
|
||||
|
||||
// Load dataset with minimal required relationships
|
||||
const dataset = await Dataset.query().where('publish_id', dataId).firstOrFail();
|
||||
|
||||
const prefix = process.env.DATACITE_PREFIX || '';
|
||||
const base_domain = process.env.BASE_DOMAIN || '';
|
||||
|
||||
// Generate DOI metadata XML
|
||||
const xmlMeta = (await Index.getDoiRegisterString(dataset)) as string;
|
||||
|
||||
let prefix = '';
|
||||
let base_domain = '';
|
||||
// const datacite_environment = process.env.DATACITE_ENVIRONMENT || 'debug';
|
||||
prefix = process.env.DATACITE_PREFIX || '';
|
||||
base_domain = process.env.BASE_DOMAIN || '';
|
||||
// Prepare DOI registration data
|
||||
const doiValue = `${prefix}/tethys.${dataset.publish_id}`; //'10.21388/tethys.213'
|
||||
const landingPageUrl = `https://doi.${getDomain(base_domain)}/${prefix}/tethys.${dataset.publish_id}`; //https://doi.dev.tethys.at/10.21388/tethys.213
|
||||
|
||||
// register DOI:
|
||||
const doiValue = prefix + '/tethys.' + dataset.publish_id; //'10.21388/tethys.213'
|
||||
const landingPageUrl = 'https://doi.' + getDomain(base_domain) + '/' + prefix + '/tethys.' + dataset.publish_id; //https://doi.dev.tethys.at/10.21388/tethys.213
|
||||
// Register DOI with DataCite
|
||||
const doiClient = new DoiClient();
|
||||
const dataciteResponse = await doiClient.registerDoi(doiValue, xmlMeta, landingPageUrl);
|
||||
|
||||
if (dataciteResponse?.status === 201) {
|
||||
// if response OK 201; save the Identifier value into db
|
||||
const doiIdentifier = new DatasetIdentifier();
|
||||
doiIdentifier.value = doiValue;
|
||||
doiIdentifier.dataset_id = dataset.id;
|
||||
doiIdentifier.type = 'doi';
|
||||
doiIdentifier.status = 'findable';
|
||||
|
||||
// save updated dataset to db an index to OpenSearch
|
||||
try {
|
||||
// save modified date of datset for re-caching model in db an update the search index
|
||||
dataset.server_date_modified = DateTime.now();
|
||||
// autoUpdate: true only triggers when dataset.save() is called, not when saving a related model like below
|
||||
await dataset.save();
|
||||
await dataset.related('identifier').save(doiIdentifier);
|
||||
const index_name = 'tethys-records';
|
||||
await Index.indexDocument(dataset, index_name);
|
||||
} catch (error) {
|
||||
logger.error(`${__filename}: Indexing document ${dataset.id} failed: ${error.message}`);
|
||||
// Log the error or handle it as needed
|
||||
throw new HttpException(error.message);
|
||||
}
|
||||
return response.toRoute('editor.dataset.list').flash('message', 'You have successfully created a DOI for the dataset!');
|
||||
} else {
|
||||
if (dataciteResponse?.status !== 201) {
|
||||
const message = `Unexpected DataCite MDS response code ${dataciteResponse?.status}`;
|
||||
// Log the error or handle it as needed
|
||||
throw new DoiClientException(dataciteResponse?.status, message);
|
||||
}
|
||||
|
||||
// DOI registration successful - persist and index
|
||||
try {
|
||||
// Save identifier
|
||||
await this.persistDoiAndIndex(dataset, doiValue);
|
||||
|
||||
return response.toRoute('editor.dataset.list').flash('message', 'You have successfully created a DOI for the dataset!');
|
||||
} catch (error) {
|
||||
logger.error(`${__filename}: Failed to persist DOI and index dataset ${dataset.id}: ${error.message}`);
|
||||
throw new HttpException(error.message);
|
||||
}
|
||||
|
||||
// return response.toRoute('editor.dataset.list').flash('message', xmlMeta);
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist DOI identifier and update search index
|
||||
* Handles cache invalidation to ensure fresh indexing
|
||||
*/
|
||||
private async persistDoiAndIndex(dataset: Dataset, doiValue: string): Promise<void> {
|
||||
// Create DOI identifier
|
||||
const doiIdentifier = new DatasetIdentifier();
|
||||
doiIdentifier.value = doiValue;
|
||||
doiIdentifier.dataset_id = dataset.id;
|
||||
doiIdentifier.type = 'doi';
|
||||
doiIdentifier.status = 'findable';
|
||||
|
||||
// Save identifier (this will trigger database insert)
|
||||
await dataset.related('identifier').save(doiIdentifier);
|
||||
|
||||
// Update dataset modification timestamp to reflect the change
|
||||
dataset.server_date_modified = DateTime.now();
|
||||
await dataset.save();
|
||||
|
||||
// Invalidate stale XML cache
|
||||
await this.invalidateDatasetCache(dataset);
|
||||
|
||||
// Reload dataset with fresh state for indexing
|
||||
const freshDataset = await Dataset.query().where('id', dataset.id).preload('identifier').preload('xmlCache').firstOrFail();
|
||||
|
||||
// Index to OpenSearch with fresh data
|
||||
const index_name = process.env.OPENSEARCH_INDEX || 'tethys-records';
|
||||
await Index.indexDocument(freshDataset, index_name);
|
||||
|
||||
logger.info(`Successfully created DOI ${doiValue} and indexed dataset ${dataset.id}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidate XML cache for dataset
|
||||
* Ensures fresh cache generation on next access
|
||||
*/
|
||||
private async invalidateDatasetCache(dataset: Dataset): Promise<void> {
|
||||
await dataset.load('xmlCache');
|
||||
|
||||
if (dataset.xmlCache) {
|
||||
await dataset.xmlCache.delete();
|
||||
logger.debug(`Invalidated XML cache for dataset ${dataset.id}`);
|
||||
}
|
||||
}
|
||||
|
||||
public async show({}: HttpContext) {}
|
||||
|
||||
public async edit({ request, inertia, response }: HttpContext) {
|
||||
|
|
@ -1124,14 +1157,14 @@ export default class DatasetsController {
|
|||
|
||||
// Set the response headers and download the file
|
||||
response
|
||||
.header('Cache-Control', 'no-cache private')
|
||||
.header('Content-Description', 'File Transfer')
|
||||
.header('Content-Type', file.mime_type || 'application/octet-stream')
|
||||
// .header('Content-Disposition', 'inline; filename=' + fileName)
|
||||
.header('Content-Transfer-Encoding', 'binary')
|
||||
.header('Access-Control-Allow-Origin', '*')
|
||||
.header('Access-Control-Allow-Methods', 'GET');
|
||||
response.attachment(fileName);
|
||||
.header('Cache-Control', 'no-cache private')
|
||||
.header('Content-Description', 'File Transfer')
|
||||
.header('Content-Type', file.mime_type || 'application/octet-stream')
|
||||
// .header('Content-Disposition', 'inline; filename=' + fileName)
|
||||
.header('Content-Transfer-Encoding', 'binary')
|
||||
.header('Access-Control-Allow-Origin', '*')
|
||||
.header('Access-Control-Allow-Methods', 'GET');
|
||||
response.attachment(fileName);
|
||||
return response.download(filePath);
|
||||
}
|
||||
|
||||
|
|
@ -1144,19 +1177,18 @@ export default class DatasetsController {
|
|||
}
|
||||
}
|
||||
|
||||
private async getDatasetXmlDomNode(dataset: Dataset) {
|
||||
const xmlModel = new XmlModel(dataset);
|
||||
private async getDatasetXmlDomNode(dataset: Dataset): Promise<XMLBuilder | null> {
|
||||
const serializer = new DatasetXmlSerializer(dataset).enableCaching().excludeEmptyFields();
|
||||
// xmlModel.setModel(dataset);
|
||||
xmlModel.excludeEmptyFields();
|
||||
xmlModel.caching = true;
|
||||
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
|
||||
// dataset.load('xmlCache');
|
||||
|
||||
// Load existing cache if available
|
||||
await dataset.load('xmlCache');
|
||||
if (dataset.xmlCache) {
|
||||
xmlModel.xmlCache = dataset.xmlCache;
|
||||
serializer.setCache(dataset.xmlCache);
|
||||
}
|
||||
|
||||
// return cache.getDomDocument();
|
||||
const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
|
||||
return domDocument;
|
||||
const xmlDocument : XMLBuilder | null = await serializer.toXmlDocument();
|
||||
return xmlDocument;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue