All checks were successful
Gitea Actions Demo / Explore-Gitea-Actions (push) Successful in 11s
- Updated doi_datacite.xslt to include conditional checks for empty values in creators, titles, subjects, contributors, dates, rights, sizes, formats, descriptions, and geoLocations to ensure only non-empty elements are rendered. - Enhanced the handling of TitleAbstract and TitleAbstractAdditional templates to only output descriptions if the Value attribute is not empty. - Modified the Doi.vue component to clean up the import statements and commented out unused warning display code for clarity.
1283 lines
54 KiB
TypeScript
1283 lines
54 KiB
TypeScript
import type { HttpContext } from '@adonisjs/core/http';
|
|
import { Client } from '@opensearch-project/opensearch';
|
|
import User from '#models/user';
|
|
import Dataset from '#models/dataset';
|
|
import DatasetIdentifier from '#models/dataset_identifier';
|
|
import DatasetXmlSerializer from '#app/Library/DatasetXmlSerializer';
|
|
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
|
|
import { create } from 'xmlbuilder2';
|
|
import { readFileSync } from 'fs';
|
|
import SaxonJS from 'saxon-js';
|
|
import { DateTime } from 'luxon';
|
|
import Index from '#app/Library/Utils/Index';
|
|
import { getDomain } from '#app/utils/utility-functions';
|
|
import { DoiClient } from '#app/Library/Doi/DoiClient';
|
|
import DoiClientException from '#app/exceptions/DoiClientException';
|
|
import logger from '@adonisjs/core/services/logger';
|
|
import { HttpException } from 'node-exceptions';
|
|
import { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model';
|
|
import vine, { SimpleMessagesProvider } from '@vinejs/vine';
|
|
import mail from '@adonisjs/mail/services/main';
|
|
import { validate } from 'deep-email-validator';
|
|
import {
|
|
TitleTypes,
|
|
DescriptionTypes,
|
|
ContributorTypes,
|
|
PersonNameTypes,
|
|
ReferenceIdentifierTypes,
|
|
RelationTypes,
|
|
SubjectTypes,
|
|
DatasetTypes,
|
|
} from '#contracts/enums';
|
|
import { TransactionClientContract } from '@adonisjs/lucid/types/database';
|
|
import db from '@adonisjs/lucid/services/db';
|
|
import Project from '#models/project';
|
|
import License from '#models/license';
|
|
import Language from '#models/language';
|
|
import File from '#models/file';
|
|
import Coverage from '#models/coverage';
|
|
import Title from '#models/title';
|
|
import Description from '#models/description';
|
|
import Subject from '#models/subject';
|
|
import DatasetReference from '#models/dataset_reference';
|
|
import Collection from '#models/collection';
|
|
import CollectionRole from '#models/collection_role';
|
|
import { updateEditorDatasetValidator } from '#validators/dataset';
|
|
import { savePersons } from '#app/utils/utility-functions';
|
|
|
|
// Create a new instance of the client
|
|
const client = new Client({ node: 'http://localhost:9200' }); // replace with your OpenSearch endpoint
|
|
|
|
export default class DatasetsController {
|
|
private proc;
|
|
public messages = {
|
|
// 'required': '{{ field }} is required',
|
|
// 'licenses.minLength': 'at least {{ options.minLength }} permission must be defined',
|
|
'reviewer_id.required': 'reviewer_id must be defined',
|
|
'publisher_name.required': 'publisher name must be defined',
|
|
};
|
|
|
|
constructor() {
|
|
this.proc = readFileSync('public/assets2/solr.sef.json');
|
|
// Load the XSLT file
|
|
// this.proc = readFileSync('public/assets2/datasetxml2oai.sef.json');
|
|
}
|
|
|
|
// public async index({}: HttpContextContract) {}
|
|
public async index({ auth, request, inertia }: HttpContext) {
|
|
const user = (await User.find(auth.user?.id)) as User;
|
|
const page = request.input('page', 1);
|
|
let datasets: ModelQueryBuilderContract<typeof Dataset, Dataset> = Dataset.query();
|
|
|
|
// if (request.input('search')) {
|
|
// // users = users.whereRaw('name like %?%', [request.input('search')])
|
|
// const searchTerm = request.input('search');
|
|
// datasets.where('name', 'ilike', `%${searchTerm}%`);
|
|
// }
|
|
|
|
if (request.input('sort')) {
|
|
type SortOrder = 'asc' | 'desc' | undefined;
|
|
let attribute = request.input('sort');
|
|
let sortOrder: SortOrder = 'asc';
|
|
|
|
if (attribute.substr(0, 1) === '-') {
|
|
sortOrder = 'desc';
|
|
// attribute = substr(attribute, 1);
|
|
attribute = attribute.substr(1);
|
|
}
|
|
datasets.orderBy(attribute, sortOrder);
|
|
} else {
|
|
// datasets.orderBy('id', 'asc');
|
|
// Custom ordering to prioritize rejected_editor state
|
|
datasets.orderByRaw(`
|
|
CASE
|
|
WHEN server_state = 'rejected_reviewer' THEN 0
|
|
ELSE 1
|
|
END ASC,
|
|
id ASC
|
|
`);
|
|
}
|
|
|
|
// const users = await User.query().orderBy('login').paginate(page, limit);
|
|
const myDatasets = await datasets
|
|
.where('server_state', 'released')
|
|
.orWhere((dQuery) => {
|
|
dQuery
|
|
.whereIn('server_state', ['editor_accepted', 'rejected_reviewer', 'reviewed', 'published'])
|
|
.where('editor_id', user.id)
|
|
.doesntHave('identifier', 'and');
|
|
})
|
|
// .preload('identifier')
|
|
.preload('titles')
|
|
.preload('user', (query) => query.select('id', 'login'))
|
|
.preload('editor', (query) => query.select('id', 'login'))
|
|
.paginate(page, 10);
|
|
|
|
return inertia.render('Editor/Dataset/Index', {
|
|
datasets: myDatasets.serialize(),
|
|
filters: request.all(),
|
|
can: {
|
|
receive: await auth.user?.can(['dataset-receive']),
|
|
approve: await auth.user?.can(['dataset-approve']),
|
|
reject: await auth.user?.can(['dataset-editor-reject']),
|
|
edit: await auth.user?.can(['dataset-editor-update']),
|
|
delete: await auth.user?.can(['dataset-editor-delete']),
|
|
publish: await auth.user?.can(['dataset-publish']),
|
|
},
|
|
});
|
|
}
|
|
|
|
public async receive({ request, inertia, response }: HttpContext) {
|
|
const id = request.param('id');
|
|
const dataset = await Dataset.query()
|
|
.where('id', id)
|
|
.preload('titles')
|
|
.preload('descriptions')
|
|
.preload('user', (builder) => {
|
|
builder.select('id', 'login');
|
|
})
|
|
|
|
.firstOrFail();
|
|
|
|
const validStates = ['released'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// session.flash('errors', 'Invalid server state!');
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be received. Datset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.back();
|
|
}
|
|
|
|
return inertia.render('Editor/Dataset/Receive', {
|
|
dataset,
|
|
});
|
|
}
|
|
|
|
public async receiveUpdate({ auth, request, response }: HttpContext) {
|
|
const id = request.param('id');
|
|
// const { id } = params;
|
|
const dataset = await Dataset.findOrFail(id);
|
|
|
|
const validStates = ['released'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// throw new Error('Invalid server state!');
|
|
// return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back();
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be received by editor. Datset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
dataset.server_state = 'editor_accepted';
|
|
const user = (await User.find(auth.user?.id)) as User;
|
|
// dataset.editor().associate(user).save();
|
|
try {
|
|
await dataset.related('editor').associate(user); // speichert schon ab
|
|
// await dataset.save();
|
|
return response.toRoute('editor.dataset.list').flash(`You have accepted dataset ${dataset.id}!`, 'message');
|
|
} catch (error) {
|
|
// Handle any errors
|
|
console.error(error);
|
|
return response.status(500).json({ error: 'An error occurred while accepting the data.' });
|
|
}
|
|
}
|
|
|
|
public async approve({ request, inertia, response, auth }: HttpContext) {
|
|
const id = request.param('id');
|
|
|
|
const user = auth.user;
|
|
if (!user) {
|
|
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
|
|
}
|
|
|
|
// $dataset = Dataset::with('user:id,login')->findOrFail($id);
|
|
const dataset = await Dataset.query().where('id', id).where('editor_id', user.id).firstOrFail();
|
|
|
|
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// session.flash('errors', 'Invalid server state!');
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be approved. Datset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.back();
|
|
}
|
|
|
|
const reviewers = await User.query()
|
|
.whereHas('roles', (builder) => {
|
|
builder.where('name', 'reviewer');
|
|
})
|
|
.pluck('login', 'id');
|
|
|
|
return inertia.render('Editor/Dataset/Approve', {
|
|
dataset,
|
|
reviewers,
|
|
});
|
|
}
|
|
|
|
public async approveUpdate({ request, response, auth }: HttpContext) {
|
|
const approveDatasetSchema = vine.object({
|
|
reviewer_id: vine.number(),
|
|
});
|
|
try {
|
|
// await request.validate({ schema: approveDatasetSchema, messages: this.messages });
|
|
const validator = vine.compile(approveDatasetSchema);
|
|
await request.validateUsing(validator, { messagesProvider: new SimpleMessagesProvider(this.messages) });
|
|
} catch (error) {
|
|
// return response.badRequest(error.messages);
|
|
throw error;
|
|
}
|
|
const id = request.param('id');
|
|
const user = auth.user;
|
|
if (!user) {
|
|
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
|
|
}
|
|
const dataset = await Dataset.query().where('id', id).where('editor_id', user.id).firstOrFail();
|
|
|
|
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// session.flash('errors', 'Invalid server state!');
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be approved. Datset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.back();
|
|
}
|
|
|
|
dataset.server_state = 'approved';
|
|
if (dataset.reject_reviewer_note != null) {
|
|
dataset.reject_reviewer_note = null;
|
|
}
|
|
if (dataset.reject_editor_note != null) {
|
|
dataset.reject_editor_note = null;
|
|
}
|
|
|
|
//save main and additional titles
|
|
const reviewer_id = request.input('reviewer_id', null);
|
|
dataset.reviewer_id = reviewer_id;
|
|
|
|
if (await dataset.save()) {
|
|
return response.toRoute('editor.dataset.list').flash('message', 'You have approved one dataset!');
|
|
}
|
|
}
|
|
|
|
public async reject({ request, inertia, response, auth }: HttpContext) {
|
|
const id = request.param('id');
|
|
const user = auth.user;
|
|
if (!user) {
|
|
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
|
|
}
|
|
const dataset = await Dataset.query()
|
|
.where('id', id)
|
|
.where('editor_id', user.id) // Ensure the user is the editor of the dataset
|
|
// .preload('titles')
|
|
// .preload('descriptions')
|
|
.preload('user', (builder) => {
|
|
builder.select('id', 'login', 'email');
|
|
})
|
|
.firstOrFail();
|
|
|
|
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// session.flash('errors', 'Invalid server state!');
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be rejected. Datset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
return inertia.render('Editor/Dataset/Reject', {
|
|
dataset,
|
|
});
|
|
}
|
|
|
|
public async rejectUpdate({ request, response, auth }: HttpContext) {
|
|
const authUser = auth.user!;
|
|
|
|
if (!authUser) {
|
|
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
|
|
}
|
|
|
|
const id = request.param('id');
|
|
const dataset = await Dataset.query()
|
|
.where('id', id)
|
|
.where('editor_id', authUser.id) // Ensure the user is the editor of the dataset
|
|
.preload('user', (builder) => {
|
|
builder.select('id', 'login', 'email');
|
|
})
|
|
.firstOrFail();
|
|
|
|
const newSchema = vine.object({
|
|
server_state: vine.string().trim(),
|
|
reject_editor_note: vine.string().trim().minLength(10).maxLength(500),
|
|
send_mail: vine.boolean().optional(),
|
|
});
|
|
|
|
try {
|
|
// await request.validate({ schema: newSchema });
|
|
const validator = vine.compile(newSchema);
|
|
await request.validateUsing(validator);
|
|
} catch (error) {
|
|
// return response.badRequest(error.messages);
|
|
throw error;
|
|
}
|
|
|
|
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// throw new Error('Invalid server state!');
|
|
// return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back();
|
|
return response
|
|
.flash(
|
|
`Invalid server state. Dataset with id ${id} cannot be rejected. Datset has server state ${dataset.server_state}.`,
|
|
'warning',
|
|
)
|
|
.redirect()
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
dataset.server_state = 'rejected_editor';
|
|
const rejectEditorNote = request.input('reject_editor_note', '');
|
|
dataset.reject_editor_note = rejectEditorNote;
|
|
|
|
// add logic for sending reject message
|
|
const sendMail = request.input('send_email', false);
|
|
// const validRecipientEmail = await this.checkEmailDomain('arno.kaimbacher@outlook.at');
|
|
const validationResult = await validate({
|
|
email: dataset.user.email,
|
|
validateSMTP: false,
|
|
});
|
|
const validRecipientEmail: boolean = validationResult.valid;
|
|
|
|
let emailStatusMessage = '';
|
|
|
|
if (sendMail == true) {
|
|
if (dataset.user.email && validRecipientEmail) {
|
|
try {
|
|
await mail.send((message) => {
|
|
message.to(dataset.user.email).subject('Dataset Rejection Notification').html(`
|
|
<p>Dear ${dataset.user.login},</p>
|
|
<p>Your dataset with ID ${dataset.id} has been rejected.</p>
|
|
<p>Reason for rejection: ${rejectEditorNote}</p>
|
|
<p>Best regards,<br>Your Tethys editor: ${authUser.login}</p>
|
|
`);
|
|
});
|
|
emailStatusMessage = ` A rejection email was successfully sent to ${dataset.user.email}.`;
|
|
} catch (error) {
|
|
logger.error(error);
|
|
return response
|
|
.flash('Dataset has not been rejected due to an email error: ' + error.message, 'error')
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
} else {
|
|
emailStatusMessage = ` However, the email could not be sent because the submitter's email address (${dataset.user.email}) is not valid.`;
|
|
}
|
|
}
|
|
|
|
await dataset.save();
|
|
return response
|
|
.flash(
|
|
`You have successfully rejected dataset ${dataset.id} submitted by ${dataset.user.login}.${emailStatusMessage}`,
|
|
'message',
|
|
)
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
public async publish({ request, inertia, response, auth }: HttpContext) {
|
|
const id = request.param('id');
|
|
const user = auth.user;
|
|
if (!user) {
|
|
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
|
|
}
|
|
|
|
const dataset = await Dataset.query()
|
|
.where('id', id)
|
|
.where('editor_id', user.id) // Ensure the user is the editor of the dataset
|
|
.preload('titles')
|
|
.preload('authors')
|
|
// .preload('persons', (builder) => {
|
|
// builder.wherePivot('role', 'author')
|
|
// })
|
|
.firstOrFail();
|
|
|
|
const validStates = ['reviewed'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// session.flash('errors', 'Invalid server state!');
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be published. Datset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.back();
|
|
}
|
|
|
|
return inertia.render('Editor/Dataset/Publish', {
|
|
dataset,
|
|
can: {
|
|
reject: await auth.user?.can(['dataset-editor-reject']),
|
|
publish: await auth.user?.can(['dataset-publish']),
|
|
},
|
|
});
|
|
}
|
|
|
|
public async publishUpdate({ request, response, auth }: HttpContext) {
|
|
const publishDatasetSchema = vine.object({
|
|
publisher_name: vine.string().trim(),
|
|
});
|
|
try {
|
|
// await request.validate({ schema: publishDatasetSchema, messages: this.messages });
|
|
const validator = vine.compile(publishDatasetSchema);
|
|
await request.validateUsing(validator, { messagesProvider: new SimpleMessagesProvider(this.messages) });
|
|
} catch (error) {
|
|
throw error;
|
|
}
|
|
const id = request.param('id');
|
|
const user = auth.user;
|
|
if (!user) {
|
|
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
|
|
}
|
|
|
|
const dataset = await Dataset.query().where('id', id).where('editor_id', user.id).firstOrFail();
|
|
|
|
// let test = await Dataset.getMax('publish_id');
|
|
// const maxPublishId = await Database.from('documents').max('publish_id as max_publish_id').first();
|
|
// const max = maxPublishId.max_publish_id;
|
|
const max = await Dataset.getMax('publish_id');
|
|
let publish_id = 0;
|
|
if (max != null) {
|
|
publish_id = max + 1;
|
|
} else {
|
|
publish_id = publish_id + 1;
|
|
}
|
|
dataset.publish_id = publish_id;
|
|
dataset.server_state = 'published';
|
|
dataset.server_date_published = DateTime.now();
|
|
|
|
const publisherName = request.input('publisher_name', 'Tethys');
|
|
dataset.publisher_name = publisherName;
|
|
|
|
if (await dataset.save()) {
|
|
const index_name = 'tethys-records';
|
|
await Index.indexDocument(dataset, index_name);
|
|
return response.toRoute('editor.dataset.list').flash('message', 'You have successfully published the dataset!');
|
|
}
|
|
}
|
|
|
|
public async rejectToReviewer({ request, inertia, response, auth }: HttpContext) {
|
|
const id = request.param('id');
|
|
const user = auth.user;
|
|
if (!user) {
|
|
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
|
|
}
|
|
|
|
const dataset = await Dataset.query()
|
|
.where('id', id)
|
|
.where('editor_id', user.id) // Ensure the user is the editor of the dataset
|
|
.preload('reviewer', (builder) => {
|
|
builder.select('id', 'login', 'email');
|
|
})
|
|
.firstOrFail();
|
|
|
|
const validStates = ['reviewed'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// session.flash('errors', 'Invalid server state!');
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be rejected to the reviewer. Datset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
return inertia.render('Editor/Dataset/RejectToReviewer', {
|
|
dataset,
|
|
});
|
|
}
|
|
|
|
public async rejectToReviewerUpdate({ request, response, auth }: HttpContext) {
|
|
const authUser = auth.user!;
|
|
|
|
if (!authUser) {
|
|
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
|
|
}
|
|
|
|
const id = request.param('id');
|
|
const dataset = await Dataset.query()
|
|
.where('id', id)
|
|
.where('editor_id', authUser.id) // Ensure the user is the editor of the dataset
|
|
.preload('reviewer', (builder) => {
|
|
builder.select('id', 'login', 'email');
|
|
})
|
|
.firstOrFail();
|
|
|
|
const newSchema = vine.object({
|
|
server_state: vine.string().trim(),
|
|
reject_editor_note: vine.string().trim().minLength(10).maxLength(500),
|
|
send_mail: vine.boolean().optional(),
|
|
});
|
|
|
|
try {
|
|
// await request.validate({ schema: newSchema });
|
|
const validator = vine.compile(newSchema);
|
|
await request.validateUsing(validator);
|
|
} catch (error) {
|
|
// return response.badRequest(error.messages);
|
|
throw error;
|
|
}
|
|
|
|
const validStates = ['reviewed'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// throw new Error('Invalid server state!');
|
|
// return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back();
|
|
return response
|
|
.flash(
|
|
`Invalid server state. Dataset with id ${id} cannot be rejected to reviewer. Datset has server state ${dataset.server_state}.`,
|
|
'warning',
|
|
)
|
|
.redirect()
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
dataset.server_state = 'rejected_to_reviewer';
|
|
const rejectEditorNote = request.input('reject_editor_note', '');
|
|
dataset.reject_editor_note = rejectEditorNote;
|
|
|
|
// add logic for sending reject message
|
|
const sendMail = request.input('send_email', false);
|
|
// const validRecipientEmail = await this.checkEmailDomain('arno.kaimbacher@outlook.at');
|
|
const validationResult = await validate({
|
|
email: dataset.reviewer.email,
|
|
validateSMTP: false,
|
|
});
|
|
const validRecipientEmail: boolean = validationResult.valid;
|
|
|
|
await dataset.save();
|
|
|
|
let emailStatusMessage = '';
|
|
if (sendMail == true) {
|
|
if (dataset.reviewer.email && validRecipientEmail) {
|
|
try {
|
|
await mail.send((message) => {
|
|
message.to(dataset.reviewer.email).subject('Dataset Rejection Notification').html(`
|
|
<p>Dear ${dataset.reviewer.login},</p>
|
|
<p>Your dataset with ID ${dataset.id} has been rejected.</p>
|
|
<p>Reason for rejection: ${rejectEditorNote}</p>
|
|
<p>Best regards,<br>Your Tethys editor: ${authUser.login}</p>
|
|
`);
|
|
});
|
|
emailStatusMessage = ` A rejection email was successfully sent to ${dataset.reviewer.email}.`;
|
|
} catch (error) {
|
|
logger.error(error);
|
|
return response
|
|
.flash('Dataset has not been rejected due to an email error: ' + error.message, 'error')
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
} else {
|
|
emailStatusMessage = ` However, the email could not be sent because the submitter's email address (${dataset.reviewer.email}) is not valid.`;
|
|
}
|
|
}
|
|
|
|
return response
|
|
.flash(
|
|
`You have successfully rejected dataset ${dataset.id} reviewed by ${dataset.reviewer.login}.${emailStatusMessage}`,
|
|
'message',
|
|
)
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
public async doiCreate({ request, inertia, auth, response }: HttpContext) {
|
|
const id = request.param('id');
|
|
const user = auth.user;
|
|
if (!user) {
|
|
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
|
|
}
|
|
|
|
const dataset = await Dataset.query()
|
|
.where('id', id)
|
|
.where('editor_id', user.id) // Ensure the user is the editor of the dataset
|
|
.preload('titles')
|
|
.preload('descriptions')
|
|
// .preload('identifier')
|
|
.preload('authors')
|
|
.firstOrFail();
|
|
return inertia.render('Editor/Dataset/Doi', {
|
|
dataset,
|
|
});
|
|
}
|
|
|
|
public async doiStore({ request, response, session, auth }: HttpContext) {
|
|
const dataId = request.param('publish_id');
|
|
const user = auth.user;
|
|
if (!user) {
|
|
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
|
|
}
|
|
|
|
// Load dataset with minimal required relationships
|
|
const dataset = await Dataset.query().where('editor_id', user.id).where('publish_id', dataId).firstOrFail();
|
|
|
|
const prefix = process.env.DATACITE_PREFIX || '';
|
|
const base_domain = process.env.BASE_DOMAIN || '';
|
|
|
|
// Generate DOI metadata XML
|
|
const xmlMeta = (await Index.getDoiRegisterString(dataset)) as string;
|
|
|
|
// Prepare DOI registration data
|
|
const doiValue = `${prefix}/tethys.${dataset.publish_id}`;
|
|
const landingPageUrl = `https://doi.${getDomain(base_domain)}/${prefix}/tethys.${dataset.publish_id}`;
|
|
|
|
try {
|
|
// Register DOI with DataCite
|
|
const doiClient = new DoiClient();
|
|
const dataciteResponse = await doiClient.registerDoi(doiValue, xmlMeta, landingPageUrl);
|
|
|
|
if (dataciteResponse?.status !== 201) {
|
|
const message = `Unexpected DataCite MDS response code ${dataciteResponse?.status}`;
|
|
throw new DoiClientException(dataciteResponse?.status, message);
|
|
}
|
|
|
|
// DOI registration successful - persist and index
|
|
await this.persistDoiAndIndex(dataset, doiValue);
|
|
|
|
return response
|
|
.flash('message', 'You have successfully created a DOI for the dataset!')
|
|
.redirect()
|
|
.toRoute('editor.dataset.list');
|
|
} catch (error) {
|
|
// logger.error(`${__filename}: DOI registration failed for dataset ${dataset.id}: ${error.message}`);
|
|
|
|
if (error instanceof DoiClientException) {
|
|
// Flash error for Inertia to pick up
|
|
session.flash('errors', {
|
|
doi: `DOI registration failed: ${error.message}`,
|
|
});
|
|
// Optionally also flash a warning for your warning display
|
|
session.flash('warning', error.message);
|
|
} else {
|
|
session.flash('errors', {
|
|
general: `An unexpected error occurred: ${error.message}`,
|
|
});
|
|
}
|
|
|
|
return response.redirect().back();
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Persist DOI identifier and update search index
|
|
* Handles cache invalidation to ensure fresh indexing
|
|
*/
|
|
private async persistDoiAndIndex(dataset: Dataset, doiValue: string): Promise<void> {
|
|
// Create DOI identifier
|
|
const doiIdentifier = new DatasetIdentifier();
|
|
doiIdentifier.value = doiValue;
|
|
doiIdentifier.dataset_id = dataset.id;
|
|
doiIdentifier.type = 'doi';
|
|
doiIdentifier.status = 'findable';
|
|
|
|
// Save identifier (this will trigger database insert)
|
|
await dataset.related('identifier').save(doiIdentifier);
|
|
|
|
// Update dataset modification timestamp to reflect the change
|
|
dataset.server_date_modified = DateTime.now();
|
|
await dataset.save();
|
|
|
|
// Invalidate stale XML cache
|
|
await this.invalidateDatasetCache(dataset);
|
|
|
|
// Reload dataset with fresh state for indexing
|
|
const freshDataset = await Dataset.query().where('id', dataset.id).preload('identifier').preload('xmlCache').firstOrFail();
|
|
|
|
// Index to OpenSearch with fresh data
|
|
const index_name = process.env.OPENSEARCH_INDEX || 'tethys-records';
|
|
await Index.indexDocument(freshDataset, index_name);
|
|
|
|
logger.info(`Successfully created DOI ${doiValue} and indexed dataset ${dataset.id}`);
|
|
}
|
|
|
|
/**
|
|
* Invalidate XML cache for dataset
|
|
* Ensures fresh cache generation on next access
|
|
*/
|
|
private async invalidateDatasetCache(dataset: Dataset): Promise<void> {
|
|
await dataset.load('xmlCache');
|
|
|
|
if (dataset.xmlCache) {
|
|
await dataset.xmlCache.delete();
|
|
logger.debug(`Invalidated XML cache for dataset ${dataset.id}`);
|
|
}
|
|
}
|
|
|
|
public async show({}: HttpContext) {}
|
|
|
|
public async edit({ request, inertia, response, auth }: HttpContext) {
|
|
const id = request.param('id');
|
|
|
|
// Check if user is authenticated
|
|
const user = auth.user;
|
|
if (!user) {
|
|
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
|
|
}
|
|
|
|
// Prefilter by both id AND editor_id to ensure user has permission to edit
|
|
const datasetQuery = Dataset.query().where('id', id).where('editor_id', user.id);
|
|
datasetQuery
|
|
.preload('titles', (query) => query.orderBy('id', 'asc'))
|
|
.preload('descriptions', (query) => query.orderBy('id', 'asc'))
|
|
.preload('coverage')
|
|
.preload('licenses')
|
|
.preload('authors', (query) => query.orderBy('pivot_sort_order', 'asc'))
|
|
.preload('contributors', (query) => query.orderBy('pivot_sort_order', 'asc'))
|
|
// .preload('subjects')
|
|
.preload('subjects', (builder) => {
|
|
builder.orderBy('id', 'asc').withCount('datasets');
|
|
})
|
|
.preload('references')
|
|
.preload('files', (query) => {
|
|
query.orderBy('sort_order', 'asc'); // Sort by sort_order column
|
|
});
|
|
|
|
// This will throw 404 if editor_id does not match logged in user
|
|
const dataset = await datasetQuery.firstOrFail();
|
|
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// session.flash('errors', 'Invalid server state!');
|
|
return response
|
|
.flash(
|
|
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
|
|
'warning',
|
|
)
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
const titleTypes = Object.entries(TitleTypes)
|
|
.filter(([value]) => value !== 'Main')
|
|
.map(([key, value]) => ({ value: key, label: value }));
|
|
|
|
const descriptionTypes = Object.entries(DescriptionTypes)
|
|
.filter(([value]) => value !== 'Abstract')
|
|
.map(([key, value]) => ({ value: key, label: value }));
|
|
|
|
const languages = await Language.query().where('active', true).pluck('part1', 'part1');
|
|
|
|
// const contributorTypes = Config.get('enums.contributor_types');
|
|
const contributorTypes = Object.entries(ContributorTypes).map(([key, value]) => ({ value: key, label: value }));
|
|
|
|
// const nameTypes = Config.get('enums.name_types');
|
|
const nameTypes = Object.entries(PersonNameTypes).map(([key, value]) => ({ value: key, label: value }));
|
|
|
|
// const messages = await Database.table('messages')
|
|
// .pluck('help_text', 'metadata_element');
|
|
|
|
const projects = await Project.query().pluck('label', 'id');
|
|
|
|
const currentDate = new Date();
|
|
const currentYear = currentDate.getFullYear();
|
|
const years = Array.from({ length: currentYear - 1990 + 1 }, (_, index) => 1990 + index);
|
|
|
|
const licenses = await License.query().select('id', 'name_long').where('active', 'true').pluck('name_long', 'id');
|
|
// const userHasRoles = user.roles;
|
|
// const datasetHasLicenses = await dataset.related('licenses').query().pluck('id');
|
|
// const checkeds = dataset.licenses.first().id;
|
|
|
|
// const doctypes = {
|
|
// analysisdata: { label: 'Analysis', value: 'analysisdata' },
|
|
// measurementdata: { label: 'Measurements', value: 'measurementdata' },
|
|
// monitoring: 'Monitoring',
|
|
// remotesensing: 'Remote Sensing',
|
|
// gis: 'GIS',
|
|
// models: 'Models',
|
|
// mixedtype: 'Mixed Type',
|
|
// };
|
|
|
|
return inertia.render('Editor/Dataset/Edit', {
|
|
dataset,
|
|
titletypes: titleTypes,
|
|
descriptiontypes: descriptionTypes,
|
|
contributorTypes,
|
|
nameTypes,
|
|
languages,
|
|
// messages,
|
|
projects,
|
|
licenses,
|
|
// datasetHasLicenses: Object.keys(datasetHasLicenses).map((key) => datasetHasLicenses[key]), //convert object to array with license ids
|
|
// checkeds,
|
|
years,
|
|
// languages,
|
|
subjectTypes: SubjectTypes,
|
|
referenceIdentifierTypes: Object.entries(ReferenceIdentifierTypes).map(([key, value]) => ({ value: key, label: value })),
|
|
relationTypes: Object.entries(RelationTypes).map(([key, value]) => ({ value: key, label: value })),
|
|
doctypes: DatasetTypes,
|
|
});
|
|
}
|
|
|
|
public async update({ request, response, session, auth }: HttpContext) {
|
|
// Get the dataset id from the route parameter
|
|
const datasetId = request.param('id');
|
|
const user = auth.user;
|
|
if (!user) {
|
|
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
|
|
}
|
|
|
|
// Retrieve the dataset and load its existing files
|
|
const dataset = await Dataset.query().where('id', datasetId).where('editor_id', user.id).firstOrFail();
|
|
await dataset.load('files');
|
|
|
|
let trx: TransactionClientContract | null = null;
|
|
try {
|
|
await request.validateUsing(updateEditorDatasetValidator);
|
|
trx = await db.transaction();
|
|
// const user = (await User.find(auth.user?.id)) as User;
|
|
// await this.createDatasetAndAssociations(user, request, trx);
|
|
// const dataset = await Dataset.findOrFail(datasetId);
|
|
|
|
// save the licenses
|
|
const licenses: number[] = request.input('licenses', []);
|
|
// await dataset.useTransaction(trx).related('licenses').sync(licenses);
|
|
await dataset.useTransaction(trx).related('licenses').sync(licenses);
|
|
|
|
// save authors and contributors
|
|
await dataset.useTransaction(trx).related('authors').sync([]);
|
|
await dataset.useTransaction(trx).related('contributors').sync([]);
|
|
await savePersons(dataset, request.input('authors', []), 'author', trx);
|
|
await savePersons(dataset, request.input('contributors', []), 'contributor', trx);
|
|
|
|
//save the titles:
|
|
const titles = request.input('titles', []);
|
|
// const savedTitles:Array<Title> = [];
|
|
for (const titleData of titles) {
|
|
if (titleData.id) {
|
|
const title = await Title.findOrFail(titleData.id);
|
|
title.value = titleData.value;
|
|
title.language = titleData.language;
|
|
title.type = titleData.type;
|
|
if (title.$isDirty) {
|
|
await title.useTransaction(trx).save();
|
|
// await dataset.useTransaction(trx).related('titles').save(title);
|
|
// savedTitles.push(title);
|
|
}
|
|
} else {
|
|
const title = new Title();
|
|
title.fill(titleData);
|
|
// savedTitles.push(title);
|
|
await dataset.useTransaction(trx).related('titles').save(title);
|
|
}
|
|
}
|
|
|
|
// save the abstracts
|
|
const descriptions = request.input('descriptions', []);
|
|
// const savedTitles:Array<Title> = [];
|
|
for (const descriptionData of descriptions) {
|
|
if (descriptionData.id) {
|
|
const description = await Description.findOrFail(descriptionData.id);
|
|
description.value = descriptionData.value;
|
|
description.language = descriptionData.language;
|
|
description.type = descriptionData.type;
|
|
if (description.$isDirty) {
|
|
await description.useTransaction(trx).save();
|
|
// await dataset.useTransaction(trx).related('titles').save(title);
|
|
// savedTitles.push(title);
|
|
}
|
|
} else {
|
|
const description = new Description();
|
|
description.fill(descriptionData);
|
|
// savedTitles.push(title);
|
|
await dataset.useTransaction(trx).related('descriptions').save(description);
|
|
}
|
|
}
|
|
|
|
// Process all subjects/keywords from the request
|
|
const subjects = request.input('subjects');
|
|
for (const subjectData of subjects) {
|
|
// Case 1: Subject already exists in the database (has an ID)
|
|
if (subjectData.id) {
|
|
// Retrieve the existing subject
|
|
const existingSubject = await Subject.findOrFail(subjectData.id);
|
|
|
|
// Update subject properties from the request data
|
|
existingSubject.value = subjectData.value;
|
|
existingSubject.type = subjectData.type;
|
|
existingSubject.external_key = subjectData.external_key;
|
|
|
|
// Only save if there are actual changes
|
|
if (existingSubject.$isDirty) {
|
|
await existingSubject.save();
|
|
}
|
|
|
|
// Note: The relationship between dataset and subject is already established,
|
|
// so we don't need to attach it again
|
|
}
|
|
// Case 2: New subject being added (no ID)
|
|
else {
|
|
// Check if a subject with the same value and type already exists in the database
|
|
const subject = await Subject.firstOrNew({ value: subjectData.value, type: subjectData.type }, subjectData);
|
|
|
|
if (subject.$isNew === true) {
|
|
// If it's a completely new subject, create and associate it with the dataset
|
|
await dataset.useTransaction(trx).related('subjects').save(subject);
|
|
} else {
|
|
// If the subject already exists, just create the relationship
|
|
await dataset.useTransaction(trx).related('subjects').attach([subject.id]);
|
|
}
|
|
}
|
|
}
|
|
|
|
const subjectsToDelete = request.input('subjectsToDelete', []);
|
|
for (const subjectData of subjectsToDelete) {
|
|
if (subjectData.id) {
|
|
// const subject = await Subject.findOrFail(subjectData.id);
|
|
const subject = await Subject.query()
|
|
.where('id', subjectData.id)
|
|
.preload('datasets', (builder) => {
|
|
builder.orderBy('id', 'asc');
|
|
})
|
|
.withCount('datasets')
|
|
.firstOrFail();
|
|
|
|
// Check if the subject is used by multiple datasets
|
|
if (subject.$extras.datasets_count > 1) {
|
|
// If used by multiple datasets, just detach it from the current dataset
|
|
await dataset.useTransaction(trx).related('subjects').detach([subject.id]);
|
|
} else {
|
|
// If only used by this dataset, delete the subject completely
|
|
|
|
await dataset.useTransaction(trx).related('subjects').detach([subject.id]);
|
|
await subject.useTransaction(trx).delete();
|
|
}
|
|
}
|
|
}
|
|
|
|
// Process references
|
|
const references = request.input('references', []);
|
|
// First, get existing references to determine which ones to update vs. create
|
|
const existingReferences = await dataset.related('references').query();
|
|
const existingReferencesMap: Map<number, DatasetReference> = new Map(existingReferences.map((ref) => [ref.id, ref]));
|
|
|
|
for (const referenceData of references) {
|
|
if (existingReferencesMap.has(referenceData.id) && referenceData.id) {
|
|
// Update existing reference
|
|
const reference = existingReferencesMap.get(referenceData.id);
|
|
if (reference) {
|
|
reference.merge(referenceData);
|
|
if (reference.$isDirty) {
|
|
await reference.useTransaction(trx).save();
|
|
}
|
|
}
|
|
} else {
|
|
// Create new reference
|
|
const dataReference = new DatasetReference();
|
|
dataReference.fill(referenceData);
|
|
await dataset.useTransaction(trx).related('references').save(dataReference);
|
|
}
|
|
}
|
|
|
|
// Handle references to delete if provided
|
|
const referencesToDelete = request.input('referencesToDelete', []);
|
|
for (const referenceData of referencesToDelete) {
|
|
if (referenceData.id) {
|
|
const reference = await DatasetReference.findOrFail(referenceData.id);
|
|
await reference.useTransaction(trx).delete();
|
|
}
|
|
}
|
|
|
|
// save coverage
|
|
const coverageData = request.input('coverage');
|
|
if (coverageData) {
|
|
if (coverageData.id) {
|
|
const coverage = await Coverage.findOrFail(coverageData.id);
|
|
coverage.merge(coverageData);
|
|
if (coverage.$isDirty) {
|
|
await coverage.useTransaction(trx).save();
|
|
}
|
|
}
|
|
}
|
|
|
|
const input = request.only(['project_id', 'embargo_date', 'language', 'type', 'creating_corporation']);
|
|
// dataset.type = request.input('type');
|
|
dataset.merge(input);
|
|
dataset.server_date_modified = DateTime.now();
|
|
// let test: boolean = dataset.$isDirty;
|
|
await dataset.useTransaction(trx).save();
|
|
|
|
await trx.commit();
|
|
// console.log('Dataset has been updated successfully');
|
|
|
|
session.flash('message', 'Dataset has been updated successfully');
|
|
// return response.redirect().toRoute('user.index');
|
|
return response.redirect().toRoute('editor.dataset.edit', [dataset.id]);
|
|
} catch (error) {
|
|
if (trx !== null) {
|
|
await trx.rollback();
|
|
}
|
|
console.error('Failed to update dataset and related models:', error);
|
|
// throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` });
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
public async categorize({ inertia, request, response, auth }: HttpContext) {
|
|
const id = request.param('id');
|
|
// Check if user is authenticated
|
|
const user = auth.user;
|
|
if (!user) {
|
|
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
|
|
}
|
|
// Preload dataset and its "collections" relation
|
|
const dataset = await Dataset.query().where('id', id).where('editor_id', user.id).preload('collections').firstOrFail();
|
|
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// session.flash('errors', 'Invalid server state!');
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
const collectionRoles = await CollectionRole.query()
|
|
.whereIn('name', ['ddc', 'ccs'])
|
|
.preload('collections', (coll: Collection) => {
|
|
// preloa only top level collection with noparent_id
|
|
coll.whereNull('parent_id').orderBy('number', 'asc');
|
|
})
|
|
.exec();
|
|
|
|
return inertia.render('Editor/Dataset/Category', {
|
|
collectionRoles: collectionRoles,
|
|
dataset: dataset,
|
|
relatedCollections: dataset.collections,
|
|
});
|
|
}
|
|
|
|
public async categorizeUpdate({ request, response, session, auth }: HttpContext) {
|
|
// Get the dataset id from the route parameter
|
|
const id = request.param('id');
|
|
const user = auth.user;
|
|
if (!user) {
|
|
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
|
|
}
|
|
// Retrieve the dataset and load its existing files
|
|
const dataset = await Dataset.query().preload('files').where('id', id).where('editor_id', user.id).firstOrFail();
|
|
|
|
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be categorized. Dataset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
let trx: TransactionClientContract | null = null;
|
|
try {
|
|
trx = await db.transaction();
|
|
// const user = (await User.find(auth.user?.id)) as User;
|
|
// await this.createDatasetAndAssociations(user, request, trx);
|
|
|
|
// Retrieve the selected collections from the request.
|
|
// This should be an array of collection ids.
|
|
const collections: number[] = request.input('collections', []);
|
|
|
|
// Synchronize the dataset collections using the transaction.
|
|
await dataset.useTransaction(trx).related('collections').sync(collections);
|
|
|
|
// Commit the transaction.await trx.commit()
|
|
await trx.commit();
|
|
|
|
// Redirect with a success flash message.
|
|
// return response.flash('success', 'Dataset collections updated successfully!').redirect().toRoute('dataset.list');
|
|
|
|
session.flash('message', 'Dataset collections updated successfully!');
|
|
return response.redirect().toRoute('editor.dataset.list');
|
|
} catch (error) {
|
|
if (trx !== null) {
|
|
await trx.rollback();
|
|
}
|
|
console.error('Failed tocatgorize dataset collections:', error);
|
|
// throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` });
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
// public async update({}: HttpContextContract) {}
|
|
public async updateOpensearch({ response }: HttpContext) {
|
|
const id = 273; //request.param('id');
|
|
const dataset = await Dataset.query().preload('xmlCache').where('id', id).firstOrFail();
|
|
// add xml elements
|
|
let xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
|
|
const datasetNode = xml.root().ele('Dataset');
|
|
await this.createXmlRecord(dataset, datasetNode);
|
|
// const domNode = await this.getDatasetXmlDomNode(dataset);
|
|
// const xmlString = xml.end({ prettyPrint: true });
|
|
|
|
// const data = request.only(['field1', 'field2']); // get it from xslt
|
|
|
|
// Create an index with non-default settings.
|
|
var index_name = 'tethys-features';
|
|
|
|
const xmlString = xml.end({ prettyPrint: false });
|
|
let doc = '';
|
|
try {
|
|
const result = await SaxonJS.transform({
|
|
// stylesheetFileName: `${config.TMP_BASE_DIR}/data-quality/rules/iati.sef.json`,
|
|
stylesheetText: this.proc,
|
|
destination: 'serialized',
|
|
// sourceFileName: sourceFile,
|
|
sourceText: xmlString,
|
|
// stylesheetParams: xsltParameter,
|
|
// logLevel: 10,
|
|
});
|
|
doc = result.principalResult;
|
|
} catch (error) {
|
|
return response.status(500).json({
|
|
message: 'An error occurred while creating the user',
|
|
error: error.message,
|
|
});
|
|
}
|
|
|
|
// var settings = {
|
|
// settings: {
|
|
// index: {
|
|
// number_of_shards: 4,
|
|
// number_of_replicas: 3,
|
|
// },
|
|
// },
|
|
// };
|
|
// var test = await client.indices.create({
|
|
// index: index_name,
|
|
// body: settings,
|
|
// });
|
|
|
|
// var document = {
|
|
// title: 'Sample Document',
|
|
// authors: [
|
|
// {
|
|
// first_name: 'John',
|
|
// last_name: 'Doe',
|
|
// },
|
|
// {
|
|
// first_name: 'Jane',
|
|
// last_name: 'Smith',
|
|
// },
|
|
// ],
|
|
// year: '2018',
|
|
// genre: 'Crime fiction',
|
|
// };
|
|
|
|
// http://localhost:9200/datastets/_doc/1
|
|
|
|
// var id = '1';
|
|
|
|
try {
|
|
// console.log(doc);
|
|
let document = JSON.parse(`${doc}`);
|
|
|
|
// https://opensearch.org/docs/2.1/opensearch/supported-field-types/geo-shape/
|
|
// Define the new document
|
|
// const document = {
|
|
// title: 'Your Document Name',
|
|
// id: dataset.publish_id,
|
|
// doctype: 'GIS',
|
|
// // "location" : {
|
|
// // "type" : "point",
|
|
// // "coordinates" : [74.00, 40.71]
|
|
// // },
|
|
// geo_location: {
|
|
// type: 'linestring',
|
|
// coordinates: [
|
|
// [-77.03653, 38.897676],
|
|
// [-77.009051, 38.889939],
|
|
// ],
|
|
// },
|
|
// // geo_location: 'BBOX (71.0589, 74.0060, 42.3601, 40.7128)'
|
|
// // geo_location: {
|
|
// // type: 'envelope',
|
|
// // coordinates: [
|
|
// // [13.0, 53.0],
|
|
// // [14.0, 52.0],
|
|
// // ], // Define your BBOX coordinates
|
|
// // },
|
|
// };
|
|
|
|
// Update the document
|
|
var test = await client.index({
|
|
id: dataset.publish_id?.toString(),
|
|
index: index_name,
|
|
body: document,
|
|
refresh: true,
|
|
});
|
|
|
|
// Return the result
|
|
return response.json(test.body);
|
|
} catch (error) {
|
|
// Handle any errors
|
|
console.error(error);
|
|
return response.status(500).json({ error: 'An error occurred while updating the data.' });
|
|
}
|
|
}
|
|
|
|
public async download({ params, response }: HttpContext) {
|
|
const id = params.id;
|
|
// Find the file by ID
|
|
const file = await File.findOrFail(id);
|
|
// const filePath = await drive.use('local').getUrl('/'+ file.filePath)
|
|
const filePath = file.filePath;
|
|
const fileExt = file.filePath.split('.').pop() || '';
|
|
|
|
// Check if label already includes the extension
|
|
const fileName = file.label.toLowerCase().endsWith(`.${fileExt.toLowerCase()}`) ? file.label : `${file.label}.${fileExt}`;
|
|
|
|
// Set the response headers and download the file
|
|
response
|
|
.header('Cache-Control', 'no-cache private')
|
|
.header('Content-Description', 'File Transfer')
|
|
.header('Content-Type', file.mime_type || 'application/octet-stream')
|
|
// .header('Content-Disposition', 'inline; filename=' + fileName)
|
|
.header('Content-Transfer-Encoding', 'binary')
|
|
.header('Access-Control-Allow-Origin', '*')
|
|
.header('Access-Control-Allow-Methods', 'GET');
|
|
response.attachment(fileName);
|
|
return response.download(filePath);
|
|
}
|
|
|
|
public async destroy({}: HttpContext) {}
|
|
|
|
private async createXmlRecord(dataset: Dataset, datasetNode: XMLBuilder) {
|
|
const domNode = await this.getDatasetXmlDomNode(dataset);
|
|
if (domNode) {
|
|
datasetNode.import(domNode);
|
|
}
|
|
}
|
|
|
|
private async getDatasetXmlDomNode(dataset: Dataset): Promise<XMLBuilder | null> {
|
|
const serializer = new DatasetXmlSerializer(dataset).enableCaching().excludeEmptyFields();
|
|
// xmlModel.setModel(dataset);
|
|
|
|
// Load existing cache if available
|
|
await dataset.load('xmlCache');
|
|
if (dataset.xmlCache) {
|
|
serializer.setCache(dataset.xmlCache);
|
|
}
|
|
|
|
// return cache.getDomDocument();
|
|
const xmlDocument: XMLBuilder | null = await serializer.toXmlDocument();
|
|
return xmlDocument;
|
|
}
|
|
}
|