feat: Enhance background job settings UI and functionality
Some checks failed
build.yaml / feat: Enhance background job settings UI and functionality (push) Failing after 0s
Some checks failed
build.yaml / feat: Enhance background job settings UI and functionality (push) Failing after 0s
- Updated BackgroundJob.vue to improve the display of background job statuses, including missing cross-references and current job mode. - Added auto-refresh functionality for background job status. - Introduced success toast notifications for successful status refreshes. - Modified the XML serialization process in DatasetXmlSerializer for better caching and performance. - Implemented a new RuleProvider for managing custom validation rules. - Improved error handling in routes for loading background job settings. - Enhanced ClamScan configuration with socket support for virus scanning. - Refactored dayjs utility to streamline locale management.
This commit is contained in:
parent
6757bdb77c
commit
b5bbe26ec2
27 changed files with 1221 additions and 603 deletions
|
|
@ -2,7 +2,7 @@ import Dataset from '#models/dataset';
|
|||
import { Client } from '@opensearch-project/opensearch';
|
||||
import { create } from 'xmlbuilder2';
|
||||
import SaxonJS from 'saxon-js';
|
||||
import XmlModel from '#app/Library/XmlModel';
|
||||
import DatasetXmlSerializer from '#app/Library/DatasetXmlSerializer';
|
||||
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
|
||||
import logger from '@adonisjs/core/services/logger';
|
||||
import { readFileSync } from 'fs';
|
||||
|
|
@ -72,31 +72,42 @@ export default {
|
|||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Index a dataset document to OpenSearch/Elasticsearch
|
||||
*/
|
||||
async indexDocument(dataset: Dataset, index_name: string): Promise<void> {
|
||||
try {
|
||||
const proc = readFileSync('public/assets2/solr.sef.json');
|
||||
const doc: string = await this.getTransformedString(dataset, proc);
|
||||
// Load XSLT transformation file
|
||||
const xsltProc = readFileSync('public/assets2/solr.sef.json');
|
||||
|
||||
let document = JSON.parse(doc);
|
||||
// Transform dataset to JSON document
|
||||
const jsonDoc: string = await this.getTransformedString(dataset, xsltProc);
|
||||
|
||||
const document = JSON.parse(jsonDoc);
|
||||
|
||||
// Index document to OpenSearch with doument json body
|
||||
await this.client.index({
|
||||
id: dataset.publish_id?.toString(),
|
||||
index: index_name,
|
||||
body: document,
|
||||
refresh: true,
|
||||
refresh: true, // make immediately searchable
|
||||
});
|
||||
logger.info(`dataset with publish_id ${dataset.publish_id} successfully indexed`);
|
||||
logger.info(`Dataset ${dataset.publish_id} successfully indexed to ${index_name}`);
|
||||
} catch (error) {
|
||||
logger.error(`An error occurred while indexing datsaet with publish_id ${dataset.publish_id}.`);
|
||||
logger.error(`Failed to index dataset ${dataset.publish_id}: ${error.message}`);
|
||||
throw error; // Re-throw to allow caller to handle
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Transform dataset XML to JSON using XSLT
|
||||
*/
|
||||
async getTransformedString(dataset: Dataset, proc: Buffer): Promise<string> {
|
||||
let xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
|
||||
const datasetNode = xml.root().ele('Dataset');
|
||||
await createXmlRecord(dataset, datasetNode);
|
||||
const xmlString = xml.end({ prettyPrint: false });
|
||||
// Generate XML string from dataset
|
||||
const xmlString = await this.generateDatasetXml(dataset);
|
||||
|
||||
try {
|
||||
// Apply XSLT transformation
|
||||
const result = await SaxonJS.transform({
|
||||
stylesheetText: proc,
|
||||
destination: 'serialized',
|
||||
|
|
@ -108,6 +119,18 @@ export default {
|
|||
return '';
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Generate XML string from dataset model
|
||||
*/
|
||||
async generateDatasetXml(dataset: Dataset): Promise<string> {
|
||||
const xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
|
||||
const datasetNode = xml.root().ele('Dataset');
|
||||
|
||||
await createXmlRecord(dataset, datasetNode);
|
||||
|
||||
return xml.end({ prettyPrint: false });
|
||||
},
|
||||
};
|
||||
/**
|
||||
* Return the default global focus trap stack
|
||||
|
|
@ -115,74 +138,49 @@ export default {
|
|||
* @return {import('focus-trap').FocusTrap[]}
|
||||
*/
|
||||
|
||||
// export const indexDocument = async (dataset: Dataset, index_name: string, proc: Buffer): Promise<void> => {
|
||||
// try {
|
||||
// const doc = await getJsonString(dataset, proc);
|
||||
|
||||
// let document = JSON.parse(doc);
|
||||
// await client.index({
|
||||
// id: dataset.publish_id?.toString(),
|
||||
// index: index_name,
|
||||
// body: document,
|
||||
// refresh: true,
|
||||
// });
|
||||
// Logger.info(`dataset with publish_id ${dataset.publish_id} successfully indexed`);
|
||||
// } catch (error) {
|
||||
// Logger.error(`An error occurred while indexing datsaet with publish_id ${dataset.publish_id}.`);
|
||||
// }
|
||||
// };
|
||||
|
||||
// const getJsonString = async (dataset, proc): Promise<string> => {
|
||||
// let xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
|
||||
// const datasetNode = xml.root().ele('Dataset');
|
||||
// await createXmlRecord(dataset, datasetNode);
|
||||
// const xmlString = xml.end({ prettyPrint: false });
|
||||
|
||||
// try {
|
||||
// const result = await transform({
|
||||
// stylesheetText: proc,
|
||||
// destination: 'serialized',
|
||||
// sourceText: xmlString,
|
||||
// });
|
||||
// return result.principalResult;
|
||||
// } catch (error) {
|
||||
// Logger.error(`An error occurred while creating the user, error: ${error.message},`);
|
||||
// return '';
|
||||
// }
|
||||
// };
|
||||
|
||||
/**
|
||||
* Create complete XML record for dataset
|
||||
* Handles caching and metadata enrichment
|
||||
*/
|
||||
const createXmlRecord = async (dataset: Dataset, datasetNode: XMLBuilder): Promise<void> => {
|
||||
const domNode = await getDatasetXmlDomNode(dataset);
|
||||
if (domNode) {
|
||||
// add frontdoor url and data-type
|
||||
dataset.publish_id && addLandingPageAttribute(domNode, dataset.publish_id.toString());
|
||||
addSpecInformation(domNode, 'data-type:' + dataset.type);
|
||||
if (dataset.collections) {
|
||||
for (const coll of dataset.collections) {
|
||||
const collRole = coll.collectionRole;
|
||||
addSpecInformation(domNode, collRole.oai_name + ':' + coll.number);
|
||||
}
|
||||
}
|
||||
|
||||
datasetNode.import(domNode);
|
||||
if (!domNode) {
|
||||
throw new Error(`Failed to generate XML DOM node for dataset ${dataset.id}`);
|
||||
}
|
||||
|
||||
// Enrich with landing page URL
|
||||
if (dataset.publish_id) {
|
||||
addLandingPageAttribute(domNode, dataset.publish_id.toString());
|
||||
}
|
||||
|
||||
// Add data type specification
|
||||
addSpecInformation(domNode, `data-type:${dataset.type}`);
|
||||
|
||||
// Add collection information
|
||||
if (dataset.collections) {
|
||||
for (const coll of dataset.collections) {
|
||||
const collRole = coll.collectionRole;
|
||||
addSpecInformation(domNode, `${collRole.oai_name}:${coll.number}`);
|
||||
}
|
||||
}
|
||||
|
||||
datasetNode.import(domNode);
|
||||
};
|
||||
|
||||
const getDatasetXmlDomNode = async (dataset: Dataset): Promise<XMLBuilder | null> => {
|
||||
const xmlModel = new XmlModel(dataset);
|
||||
const serializer = new DatasetXmlSerializer(dataset).enableCaching().excludeEmptyFields();
|
||||
// xmlModel.setModel(dataset);
|
||||
xmlModel.excludeEmptyFields();
|
||||
xmlModel.caching = true;
|
||||
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
|
||||
// dataset.load('xmlCache');
|
||||
|
||||
// Load cache relationship if not already loaded
|
||||
await dataset.load('xmlCache');
|
||||
if (dataset.xmlCache) {
|
||||
xmlModel.xmlCache = dataset.xmlCache;
|
||||
serializer.setCache(dataset.xmlCache);
|
||||
}
|
||||
|
||||
// return cache.getDomDocument();
|
||||
const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
|
||||
return domDocument;
|
||||
// Generate or retrieve cached DOM document
|
||||
const xmlDocument: XMLBuilder | null = await serializer.toXmlDocument();
|
||||
return xmlDocument;
|
||||
};
|
||||
|
||||
const addLandingPageAttribute = (domNode: XMLBuilder, dataid: string) => {
|
||||
|
|
@ -192,6 +190,6 @@ const addLandingPageAttribute = (domNode: XMLBuilder, dataid: string) => {
|
|||
domNode.att('landingpage', url);
|
||||
};
|
||||
|
||||
const addSpecInformation= (domNode: XMLBuilder, information: string) => {
|
||||
const addSpecInformation = (domNode: XMLBuilder, information: string) => {
|
||||
domNode.ele('SetSpec').att('Value', information);
|
||||
};
|
||||
};
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue