feat: Enhance background job settings UI and functionality
Some checks failed
build.yaml / feat: Enhance background job settings UI and functionality (push) Failing after 0s
Some checks failed
build.yaml / feat: Enhance background job settings UI and functionality (push) Failing after 0s
- Updated BackgroundJob.vue to improve the display of background job statuses, including missing cross-references and current job mode. - Added auto-refresh functionality for background job status. - Introduced success toast notifications for successful status refreshes. - Modified the XML serialization process in DatasetXmlSerializer for better caching and performance. - Implemented a new RuleProvider for managing custom validation rules. - Improved error handling in routes for loading background job settings. - Enhanced ClamScan configuration with socket support for virus scanning. - Refactored dayjs utility to streamline locale management.
This commit is contained in:
parent
6757bdb77c
commit
b5bbe26ec2
27 changed files with 1221 additions and 603 deletions
|
|
@ -1,6 +1,7 @@
|
|||
import type { HttpContext } from '@adonisjs/core/http';
|
||||
import Dataset from '#models/dataset';
|
||||
import { StatusCodes } from 'http-status-codes';
|
||||
import DatasetReference from '#models/dataset_reference';
|
||||
|
||||
// node ace make:controller Author
|
||||
export default class DatasetController {
|
||||
|
|
@ -81,11 +82,11 @@ export default class DatasetController {
|
|||
.preload('licenses')
|
||||
.preload('references')
|
||||
.preload('project')
|
||||
.preload('referenced_by', (builder) => {
|
||||
builder.preload('dataset', (builder) => {
|
||||
builder.preload('identifier');
|
||||
});
|
||||
})
|
||||
// .preload('referenced_by', (builder) => {
|
||||
// builder.preload('dataset', (builder) => {
|
||||
// builder.preload('identifier');
|
||||
// });
|
||||
// })
|
||||
.preload('files', (builder) => {
|
||||
builder.preload('hashvalues');
|
||||
})
|
||||
|
|
@ -98,7 +99,17 @@ export default class DatasetController {
|
|||
});
|
||||
}
|
||||
|
||||
return response.status(StatusCodes.OK).json(dataset);
|
||||
// Build the version chain
|
||||
const versionChain = await this.buildVersionChain(dataset);
|
||||
|
||||
// Add version chain to response
|
||||
const responseData = {
|
||||
...dataset.toJSON(),
|
||||
versionChain: versionChain,
|
||||
};
|
||||
|
||||
// return response.status(StatusCodes.OK).json(dataset);
|
||||
return response.status(StatusCodes.OK).json(responseData);
|
||||
} catch (error) {
|
||||
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
|
||||
message: error.message || `Error retrieving Dataset with publish_id=${params.publish_id}.`,
|
||||
|
|
@ -159,11 +170,11 @@ export default class DatasetController {
|
|||
.preload('licenses')
|
||||
.preload('references')
|
||||
.preload('project')
|
||||
.preload('referenced_by', (builder) => {
|
||||
builder.preload('dataset', (builder) => {
|
||||
builder.preload('identifier');
|
||||
});
|
||||
})
|
||||
// .preload('referenced_by', (builder) => {
|
||||
// builder.preload('dataset', (builder) => {
|
||||
// builder.preload('identifier');
|
||||
// });
|
||||
// })
|
||||
.preload('files', (builder) => {
|
||||
builder.preload('hashvalues');
|
||||
})
|
||||
|
|
@ -175,12 +186,139 @@ export default class DatasetController {
|
|||
message: `Cannot find Dataset with identifier=${identifierValue}.`,
|
||||
});
|
||||
}
|
||||
// Build the version chain
|
||||
const versionChain = await this.buildVersionChain(dataset);
|
||||
|
||||
return response.status(StatusCodes.OK).json(dataset);
|
||||
// Add version chain to response
|
||||
const responseData = {
|
||||
...dataset.toJSON(),
|
||||
versionChain: versionChain,
|
||||
};
|
||||
|
||||
// return response.status(StatusCodes.OK).json(dataset);
|
||||
return response.status(StatusCodes.OK).json(responseData);
|
||||
} catch (error) {
|
||||
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
|
||||
message: error.message || `Error retrieving Dataset with identifier=${identifierValue}.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the complete version chain for a dataset
|
||||
* Traverses both backwards (previous versions) and forwards (newer versions)
|
||||
*/
|
||||
private async buildVersionChain(dataset: Dataset) {
|
||||
const versionChain = {
|
||||
current: {
|
||||
id: dataset.id,
|
||||
publish_id: dataset.publish_id,
|
||||
doi: dataset.identifier?.value || null,
|
||||
main_title: dataset.mainTitle || null,
|
||||
server_date_published: dataset.server_date_published,
|
||||
},
|
||||
previousVersions: [] as any[],
|
||||
newerVersions: [] as any[],
|
||||
};
|
||||
|
||||
// Get all previous versions (going backwards in time)
|
||||
versionChain.previousVersions = await this.getPreviousVersions(dataset.id);
|
||||
|
||||
// Get all newer versions (going forwards in time)
|
||||
versionChain.newerVersions = await this.getNewerVersions(dataset.id);
|
||||
|
||||
return versionChain;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively get all previous versions
|
||||
*/
|
||||
private async getPreviousVersions(datasetId: number, visited: Set<number> = new Set()): Promise<any[]> {
|
||||
// Prevent infinite loops
|
||||
if (visited.has(datasetId)) {
|
||||
return [];
|
||||
}
|
||||
visited.add(datasetId);
|
||||
|
||||
const previousVersions: any[] = [];
|
||||
|
||||
// Find references where this dataset "IsNewVersionOf" another dataset
|
||||
const previousRefs = await DatasetReference.query()
|
||||
.where('document_id', datasetId)
|
||||
.where('relation', 'IsNewVersionOf')
|
||||
.whereNotNull('related_document_id');
|
||||
|
||||
for (const ref of previousRefs) {
|
||||
if (!ref.related_document_id) continue;
|
||||
|
||||
const previousDataset = await Dataset.query()
|
||||
.where('id', ref.related_document_id)
|
||||
.preload('identifier')
|
||||
.preload('titles')
|
||||
.first();
|
||||
|
||||
if (previousDataset) {
|
||||
const versionInfo = {
|
||||
id: previousDataset.id,
|
||||
publish_id: previousDataset.publish_id,
|
||||
doi: previousDataset.identifier?.value || null,
|
||||
main_title: previousDataset.mainTitle || null,
|
||||
server_date_published: previousDataset.server_date_published,
|
||||
relation: 'IsPreviousVersionOf', // From perspective of current dataset
|
||||
};
|
||||
|
||||
previousVersions.push(versionInfo);
|
||||
|
||||
// Recursively get even older versions
|
||||
const olderVersions = await this.getPreviousVersions(previousDataset.id, visited);
|
||||
previousVersions.push(...olderVersions);
|
||||
}
|
||||
}
|
||||
|
||||
return previousVersions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively get all newer versions
|
||||
*/
|
||||
private async getNewerVersions(datasetId: number, visited: Set<number> = new Set()): Promise<any[]> {
|
||||
// Prevent infinite loops
|
||||
if (visited.has(datasetId)) {
|
||||
return [];
|
||||
}
|
||||
visited.add(datasetId);
|
||||
|
||||
const newerVersions: any[] = [];
|
||||
|
||||
// Find references where this dataset "IsPreviousVersionOf" another dataset
|
||||
const newerRefs = await DatasetReference.query()
|
||||
.where('document_id', datasetId)
|
||||
.where('relation', 'IsPreviousVersionOf')
|
||||
.whereNotNull('related_document_id');
|
||||
|
||||
for (const ref of newerRefs) {
|
||||
if (!ref.related_document_id) continue;
|
||||
|
||||
const newerDataset = await Dataset.query().where('id', ref.related_document_id).preload('identifier').preload('titles').first();
|
||||
|
||||
if (newerDataset) {
|
||||
const versionInfo = {
|
||||
id: newerDataset.id,
|
||||
publish_id: newerDataset.publish_id,
|
||||
doi: newerDataset.identifier?.value || null,
|
||||
main_title: newerDataset.mainTitle || null,
|
||||
server_date_published: newerDataset.server_date_published,
|
||||
relation: 'IsNewVersionOf', // From perspective of current dataset
|
||||
};
|
||||
|
||||
newerVersions.push(versionInfo);
|
||||
|
||||
// Recursively get even newer versions
|
||||
const evenNewerVersions = await this.getNewerVersions(newerDataset.id, visited);
|
||||
newerVersions.push(...evenNewerVersions);
|
||||
}
|
||||
}
|
||||
|
||||
return newerVersions;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -25,10 +25,10 @@ export default class FileController {
|
|||
const dataset = file.dataset;
|
||||
// Files from unpublished datasets are now blocked
|
||||
if (dataset.server_state !== 'published') {
|
||||
return response.status(StatusCodes.FORBIDDEN).send({
|
||||
message: `File access denied: Dataset is not published.`,
|
||||
});
|
||||
}
|
||||
return response.status(StatusCodes.FORBIDDEN).send({
|
||||
message: `File access denied: Dataset is not published.`,
|
||||
});
|
||||
}
|
||||
if (dataset && this.isUnderEmbargo(dataset.embargo_date)) {
|
||||
return response.status(StatusCodes.FORBIDDEN).send({
|
||||
message: `File is under embargo until ${dataset.embargo_date?.toFormat('yyyy-MM-dd')}`,
|
||||
|
|
@ -36,12 +36,26 @@ export default class FileController {
|
|||
}
|
||||
|
||||
// Proceed with file download
|
||||
const filePath = '/storage/app/data/' + file.pathName;
|
||||
const filePath = '/storage/app/data/' + file.pathName;
|
||||
const fileExt = file.filePath.split('.').pop() || '';
|
||||
// const fileName = file.label + fileExt;
|
||||
const fileName = file.label.toLowerCase().endsWith(`.${fileExt.toLowerCase()}`)
|
||||
? file.label
|
||||
: `${file.label}.${fileExt}`;
|
||||
// const fileName = file.label + fileExt;
|
||||
const fileName = file.label.toLowerCase().endsWith(`.${fileExt.toLowerCase()}`) ? file.label : `${file.label}.${fileExt}`;
|
||||
|
||||
// Determine if file can be previewed inline in browser
|
||||
const canPreviewInline = (mimeType: string): boolean => {
|
||||
const type = mimeType.toLowerCase();
|
||||
return (
|
||||
type === 'application/pdf' ||
|
||||
type.startsWith('image/') ||
|
||||
type.startsWith('text/') ||
|
||||
type === 'application/json' ||
|
||||
type === 'application/xml' ||
|
||||
// Uncomment if you want video/audio inline
|
||||
type.startsWith('video/') ||
|
||||
type.startsWith('audio/')
|
||||
);
|
||||
};
|
||||
const disposition = canPreviewInline(file.mimeType) ? 'inline' : 'attachment';
|
||||
|
||||
try {
|
||||
fs.accessSync(filePath, fs.constants.R_OK); //| fs.constants.W_OK);
|
||||
|
|
@ -51,7 +65,7 @@ export default class FileController {
|
|||
.header('Cache-Control', 'no-cache private')
|
||||
.header('Content-Description', 'File Transfer')
|
||||
.header('Content-Type', file.mimeType)
|
||||
.header('Content-Disposition', 'inline; filename=' + fileName)
|
||||
.header('Content-Disposition', `${disposition}; filename="${fileName}"`)
|
||||
.header('Content-Transfer-Encoding', 'binary')
|
||||
.header('Access-Control-Allow-Origin', '*')
|
||||
.header('Access-Control-Allow-Methods', 'GET');
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import { Client } from '@opensearch-project/opensearch';
|
|||
import User from '#models/user';
|
||||
import Dataset from '#models/dataset';
|
||||
import DatasetIdentifier from '#models/dataset_identifier';
|
||||
import XmlModel from '#app/Library/XmlModel';
|
||||
import DatasetXmlSerializer from '#app/Library/DatasetXmlSerializer';
|
||||
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
|
||||
import { create } from 'xmlbuilder2';
|
||||
import { readFileSync } from 'fs';
|
||||
|
|
@ -574,55 +574,88 @@ export default class DatasetsController {
|
|||
|
||||
public async doiStore({ request, response }: HttpContext) {
|
||||
const dataId = request.param('publish_id');
|
||||
const dataset = await Dataset.query()
|
||||
// .preload('xmlCache')
|
||||
.where('publish_id', dataId)
|
||||
.firstOrFail();
|
||||
|
||||
// Load dataset with minimal required relationships
|
||||
const dataset = await Dataset.query().where('publish_id', dataId).firstOrFail();
|
||||
|
||||
const prefix = process.env.DATACITE_PREFIX || '';
|
||||
const base_domain = process.env.BASE_DOMAIN || '';
|
||||
|
||||
// Generate DOI metadata XML
|
||||
const xmlMeta = (await Index.getDoiRegisterString(dataset)) as string;
|
||||
|
||||
let prefix = '';
|
||||
let base_domain = '';
|
||||
// const datacite_environment = process.env.DATACITE_ENVIRONMENT || 'debug';
|
||||
prefix = process.env.DATACITE_PREFIX || '';
|
||||
base_domain = process.env.BASE_DOMAIN || '';
|
||||
// Prepare DOI registration data
|
||||
const doiValue = `${prefix}/tethys.${dataset.publish_id}`; //'10.21388/tethys.213'
|
||||
const landingPageUrl = `https://doi.${getDomain(base_domain)}/${prefix}/tethys.${dataset.publish_id}`; //https://doi.dev.tethys.at/10.21388/tethys.213
|
||||
|
||||
// register DOI:
|
||||
const doiValue = prefix + '/tethys.' + dataset.publish_id; //'10.21388/tethys.213'
|
||||
const landingPageUrl = 'https://doi.' + getDomain(base_domain) + '/' + prefix + '/tethys.' + dataset.publish_id; //https://doi.dev.tethys.at/10.21388/tethys.213
|
||||
// Register DOI with DataCite
|
||||
const doiClient = new DoiClient();
|
||||
const dataciteResponse = await doiClient.registerDoi(doiValue, xmlMeta, landingPageUrl);
|
||||
|
||||
if (dataciteResponse?.status === 201) {
|
||||
// if response OK 201; save the Identifier value into db
|
||||
const doiIdentifier = new DatasetIdentifier();
|
||||
doiIdentifier.value = doiValue;
|
||||
doiIdentifier.dataset_id = dataset.id;
|
||||
doiIdentifier.type = 'doi';
|
||||
doiIdentifier.status = 'findable';
|
||||
|
||||
// save updated dataset to db an index to OpenSearch
|
||||
try {
|
||||
// save modified date of datset for re-caching model in db an update the search index
|
||||
dataset.server_date_modified = DateTime.now();
|
||||
// autoUpdate: true only triggers when dataset.save() is called, not when saving a related model like below
|
||||
await dataset.save();
|
||||
await dataset.related('identifier').save(doiIdentifier);
|
||||
const index_name = 'tethys-records';
|
||||
await Index.indexDocument(dataset, index_name);
|
||||
} catch (error) {
|
||||
logger.error(`${__filename}: Indexing document ${dataset.id} failed: ${error.message}`);
|
||||
// Log the error or handle it as needed
|
||||
throw new HttpException(error.message);
|
||||
}
|
||||
return response.toRoute('editor.dataset.list').flash('message', 'You have successfully created a DOI for the dataset!');
|
||||
} else {
|
||||
if (dataciteResponse?.status !== 201) {
|
||||
const message = `Unexpected DataCite MDS response code ${dataciteResponse?.status}`;
|
||||
// Log the error or handle it as needed
|
||||
throw new DoiClientException(dataciteResponse?.status, message);
|
||||
}
|
||||
|
||||
// DOI registration successful - persist and index
|
||||
try {
|
||||
// Save identifier
|
||||
await this.persistDoiAndIndex(dataset, doiValue);
|
||||
|
||||
return response.toRoute('editor.dataset.list').flash('message', 'You have successfully created a DOI for the dataset!');
|
||||
} catch (error) {
|
||||
logger.error(`${__filename}: Failed to persist DOI and index dataset ${dataset.id}: ${error.message}`);
|
||||
throw new HttpException(error.message);
|
||||
}
|
||||
|
||||
// return response.toRoute('editor.dataset.list').flash('message', xmlMeta);
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist DOI identifier and update search index
|
||||
* Handles cache invalidation to ensure fresh indexing
|
||||
*/
|
||||
private async persistDoiAndIndex(dataset: Dataset, doiValue: string): Promise<void> {
|
||||
// Create DOI identifier
|
||||
const doiIdentifier = new DatasetIdentifier();
|
||||
doiIdentifier.value = doiValue;
|
||||
doiIdentifier.dataset_id = dataset.id;
|
||||
doiIdentifier.type = 'doi';
|
||||
doiIdentifier.status = 'findable';
|
||||
|
||||
// Save identifier (this will trigger database insert)
|
||||
await dataset.related('identifier').save(doiIdentifier);
|
||||
|
||||
// Update dataset modification timestamp to reflect the change
|
||||
dataset.server_date_modified = DateTime.now();
|
||||
await dataset.save();
|
||||
|
||||
// Invalidate stale XML cache
|
||||
await this.invalidateDatasetCache(dataset);
|
||||
|
||||
// Reload dataset with fresh state for indexing
|
||||
const freshDataset = await Dataset.query().where('id', dataset.id).preload('identifier').preload('xmlCache').firstOrFail();
|
||||
|
||||
// Index to OpenSearch with fresh data
|
||||
const index_name = process.env.OPENSEARCH_INDEX || 'tethys-records';
|
||||
await Index.indexDocument(freshDataset, index_name);
|
||||
|
||||
logger.info(`Successfully created DOI ${doiValue} and indexed dataset ${dataset.id}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidate XML cache for dataset
|
||||
* Ensures fresh cache generation on next access
|
||||
*/
|
||||
private async invalidateDatasetCache(dataset: Dataset): Promise<void> {
|
||||
await dataset.load('xmlCache');
|
||||
|
||||
if (dataset.xmlCache) {
|
||||
await dataset.xmlCache.delete();
|
||||
logger.debug(`Invalidated XML cache for dataset ${dataset.id}`);
|
||||
}
|
||||
}
|
||||
|
||||
public async show({}: HttpContext) {}
|
||||
|
||||
public async edit({ request, inertia, response }: HttpContext) {
|
||||
|
|
@ -1124,14 +1157,14 @@ export default class DatasetsController {
|
|||
|
||||
// Set the response headers and download the file
|
||||
response
|
||||
.header('Cache-Control', 'no-cache private')
|
||||
.header('Content-Description', 'File Transfer')
|
||||
.header('Content-Type', file.mime_type || 'application/octet-stream')
|
||||
// .header('Content-Disposition', 'inline; filename=' + fileName)
|
||||
.header('Content-Transfer-Encoding', 'binary')
|
||||
.header('Access-Control-Allow-Origin', '*')
|
||||
.header('Access-Control-Allow-Methods', 'GET');
|
||||
response.attachment(fileName);
|
||||
.header('Cache-Control', 'no-cache private')
|
||||
.header('Content-Description', 'File Transfer')
|
||||
.header('Content-Type', file.mime_type || 'application/octet-stream')
|
||||
// .header('Content-Disposition', 'inline; filename=' + fileName)
|
||||
.header('Content-Transfer-Encoding', 'binary')
|
||||
.header('Access-Control-Allow-Origin', '*')
|
||||
.header('Access-Control-Allow-Methods', 'GET');
|
||||
response.attachment(fileName);
|
||||
return response.download(filePath);
|
||||
}
|
||||
|
||||
|
|
@ -1144,19 +1177,18 @@ export default class DatasetsController {
|
|||
}
|
||||
}
|
||||
|
||||
private async getDatasetXmlDomNode(dataset: Dataset) {
|
||||
const xmlModel = new XmlModel(dataset);
|
||||
private async getDatasetXmlDomNode(dataset: Dataset): Promise<XMLBuilder | null> {
|
||||
const serializer = new DatasetXmlSerializer(dataset).enableCaching().excludeEmptyFields();
|
||||
// xmlModel.setModel(dataset);
|
||||
xmlModel.excludeEmptyFields();
|
||||
xmlModel.caching = true;
|
||||
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
|
||||
// dataset.load('xmlCache');
|
||||
|
||||
// Load existing cache if available
|
||||
await dataset.load('xmlCache');
|
||||
if (dataset.xmlCache) {
|
||||
xmlModel.xmlCache = dataset.xmlCache;
|
||||
serializer.setCache(dataset.xmlCache);
|
||||
}
|
||||
|
||||
// return cache.getDomDocument();
|
||||
const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
|
||||
return domDocument;
|
||||
const xmlDocument : XMLBuilder | null = await serializer.toXmlDocument();
|
||||
return xmlDocument;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ import { OaiModelException, BadOaiModelException } from '#app/exceptions/OaiMode
|
|||
import Dataset from '#models/dataset';
|
||||
import Collection from '#models/collection';
|
||||
import { getDomain, preg_match } from '#app/utils/utility-functions';
|
||||
import XmlModel from '#app/Library/XmlModel';
|
||||
import DatasetXmlSerializer from '#app/Library/DatasetXmlSerializer';
|
||||
import logger from '@adonisjs/core/services/logger';
|
||||
import ResumptionToken from '#app/Library/Oai/ResumptionToken';
|
||||
// import Config from '@ioc:Adonis/Core/Config';
|
||||
|
|
@ -292,7 +292,7 @@ export default class OaiController {
|
|||
this.xsltParameter['repIdentifier'] = repIdentifier;
|
||||
const datasetNode = this.xml.root().ele('Datasets');
|
||||
|
||||
const paginationParams: PagingParameter ={
|
||||
const paginationParams: PagingParameter = {
|
||||
cursor: 0,
|
||||
totalLength: 0,
|
||||
start: maxRecords + 1,
|
||||
|
|
@ -333,7 +333,7 @@ export default class OaiController {
|
|||
}
|
||||
|
||||
private async handleNoResumptionToken(oaiRequest: Dictionary, paginationParams: PagingParameter, maxRecords: number) {
|
||||
this.validateMetadataPrefix(oaiRequest, paginationParams);
|
||||
this.validateMetadataPrefix(oaiRequest, paginationParams);
|
||||
const finder: ModelQueryBuilderContract<typeof Dataset, Dataset> = Dataset.query().whereIn(
|
||||
'server_state',
|
||||
this.deliveringDocumentStates,
|
||||
|
|
@ -347,16 +347,20 @@ export default class OaiController {
|
|||
finder: ModelQueryBuilderContract<typeof Dataset, Dataset>,
|
||||
paginationParams: PagingParameter,
|
||||
oaiRequest: Dictionary,
|
||||
maxRecords: number
|
||||
maxRecords: number,
|
||||
) {
|
||||
const totalResult = await finder
|
||||
.clone()
|
||||
.count('* as total')
|
||||
.first()
|
||||
.then((res) => res?.$extras.total);
|
||||
paginationParams.totalLength = Number(totalResult);
|
||||
paginationParams.totalLength = Number(totalResult);
|
||||
|
||||
const combinedRecords: Dataset[] = await finder.select('publish_id').orderBy('publish_id').offset(0).limit(maxRecords*2);
|
||||
const combinedRecords: Dataset[] = await finder
|
||||
.select('publish_id')
|
||||
.orderBy('publish_id')
|
||||
.offset(0)
|
||||
.limit(maxRecords * 2);
|
||||
|
||||
paginationParams.activeWorkIds = combinedRecords.slice(0, 100).map((dat) => Number(dat.publish_id));
|
||||
paginationParams.nextDocIds = combinedRecords.slice(100).map((dat) => Number(dat.publish_id));
|
||||
|
|
@ -602,19 +606,17 @@ export default class OaiController {
|
|||
}
|
||||
|
||||
private async getDatasetXmlDomNode(dataset: Dataset) {
|
||||
const xmlModel = new XmlModel(dataset);
|
||||
// xmlModel.setModel(dataset);
|
||||
xmlModel.excludeEmptyFields();
|
||||
xmlModel.caching = true;
|
||||
const serializer = new DatasetXmlSerializer(dataset).enableCaching().excludeEmptyFields();
|
||||
|
||||
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
|
||||
// dataset.load('xmlCache');
|
||||
if (dataset.xmlCache) {
|
||||
xmlModel.xmlCache = dataset.xmlCache;
|
||||
serializer.setCache(dataset.xmlCache);
|
||||
}
|
||||
|
||||
// return cache.getDomDocument();
|
||||
const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
|
||||
return domDocument;
|
||||
// return cache.toXmlDocument();
|
||||
const xmlDocument: XMLBuilder | null = await serializer.toXmlDocument();
|
||||
return xmlDocument;
|
||||
}
|
||||
|
||||
private addSpecInformation(domNode: XMLBuilder, information: string) {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue