- Implemented dataset editing functionality for editor roles, including fetching, updating, and categorizing datasets. - Added routes and controller actions for editing, updating, and categorizing datasets within the editor interface. - Integrated UI components for managing dataset metadata, subjects, references, and files. - Enhanced keyword management with features for adding, editing, and deleting keywords, including handling keywords used by multiple datasets. - Improved reference management with features for adding, editing, and deleting dataset references. - Added validation for dataset updates using the `updateEditorDatasetValidator`. - Updated the dataset edit form to include components for managing titles, descriptions, authors, contributors, licenses, coverage, subjects, references, and files. - Implemented transaction management for dataset updates to ensure data consistency. - Added a download route for files associated with datasets. - Improved the UI for displaying and interacting with datasets in the editor index view, including adding edit and categorize buttons. - Fixed an issue where the file size was not correctly calculated. - Added a tooltip to the keyword value column in the TableKeywords component to explain the editability of keywords. - Added a section to display keywords that are marked for deletion. - Added a section to display references that are marked for deletion. - Added a restore button to the references to delete section to restore references. - Updated the SearchCategoryAutocomplete component to support read-only mode. - Updated the FormControl component to support read-only mode. - Added icons and styling improvements to various components. - Added a default value for subjectsToDelete and referencesToDelete in the dataset model. - Updated the FooterBar component to use the JustboilLogo component. - Updated the app.ts file to fetch chart data without a year parameter. - Updated the Login.vue file to invert the logo in dark mode. - Updated the AccountInfo.vue file to add a Head component.
1020 lines
43 KiB
TypeScript
1020 lines
43 KiB
TypeScript
import type { HttpContext } from '@adonisjs/core/http';
|
|
import { Client } from '@opensearch-project/opensearch';
|
|
import User from '#models/user';
|
|
import Dataset from '#models/dataset';
|
|
import DatasetIdentifier from '#models/dataset_identifier';
|
|
import XmlModel from '#app/Library/XmlModel';
|
|
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
|
|
import { create } from 'xmlbuilder2';
|
|
import { readFileSync } from 'fs';
|
|
import SaxonJS from 'saxon-js';
|
|
import { DateTime } from 'luxon';
|
|
import Index from '#app/Library/Utils/Index';
|
|
import { getDomain } from '#app/utils/utility-functions';
|
|
import { DoiClient } from '#app/Library/Doi/DoiClient';
|
|
import DoiClientException from '#app/exceptions/DoiClientException';
|
|
import logger from '@adonisjs/core/services/logger';
|
|
import { HttpException } from 'node-exceptions';
|
|
import { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model';
|
|
import vine, { SimpleMessagesProvider } from '@vinejs/vine';
|
|
import mail from '@adonisjs/mail/services/main';
|
|
import { validate } from 'deep-email-validator';
|
|
import {
|
|
TitleTypes,
|
|
DescriptionTypes,
|
|
ContributorTypes,
|
|
PersonNameTypes,
|
|
ReferenceIdentifierTypes,
|
|
RelationTypes,
|
|
SubjectTypes,
|
|
} from '#contracts/enums';
|
|
import { TransactionClientContract } from '@adonisjs/lucid/types/database';
|
|
import db from '@adonisjs/lucid/services/db';
|
|
import Project from '#models/project';
|
|
import License from '#models/license';
|
|
import Language from '#models/language';
|
|
import File from '#models/file';
|
|
import Coverage from '#models/coverage';
|
|
import Title from '#models/title';
|
|
import Description from '#models/description';
|
|
import Subject from '#models/subject';
|
|
import DatasetReference from '#models/dataset_reference';
|
|
import Collection from '#models/collection';
|
|
import CollectionRole from '#models/collection_role';
|
|
import { updateEditorDatasetValidator } from '#validators/dataset';
|
|
import { savePersons } from '#app/utils/utility-functions';
|
|
|
|
// Create a new instance of the client
|
|
const client = new Client({ node: 'http://localhost:9200' }); // replace with your OpenSearch endpoint
|
|
|
|
export default class DatasetsController {
|
|
private proc;
|
|
public messages = {
|
|
// 'required': '{{ field }} is required',
|
|
// 'licenses.minLength': 'at least {{ options.minLength }} permission must be defined',
|
|
'reviewer_id.required': 'reviewer_id must be defined',
|
|
'publisher_name.required': 'publisher name must be defined',
|
|
};
|
|
|
|
constructor() {
|
|
this.proc = readFileSync('public/assets2/solr.sef.json');
|
|
// Load the XSLT file
|
|
// this.proc = readFileSync('public/assets2/datasetxml2oai.sef.json');
|
|
}
|
|
|
|
// public async index({}: HttpContextContract) {}
|
|
public async index({ auth, request, inertia }: HttpContext) {
|
|
const user = (await User.find(auth.user?.id)) as User;
|
|
const page = request.input('page', 1);
|
|
let datasets: ModelQueryBuilderContract<typeof Dataset, Dataset> = Dataset.query();
|
|
|
|
// if (request.input('search')) {
|
|
// // users = users.whereRaw('name like %?%', [request.input('search')])
|
|
// const searchTerm = request.input('search');
|
|
// datasets.where('name', 'ilike', `%${searchTerm}%`);
|
|
// }
|
|
|
|
if (request.input('sort')) {
|
|
type SortOrder = 'asc' | 'desc' | undefined;
|
|
let attribute = request.input('sort');
|
|
let sortOrder: SortOrder = 'asc';
|
|
|
|
if (attribute.substr(0, 1) === '-') {
|
|
sortOrder = 'desc';
|
|
// attribute = substr(attribute, 1);
|
|
attribute = attribute.substr(1);
|
|
}
|
|
datasets.orderBy(attribute, sortOrder);
|
|
} else {
|
|
// users.orderBy('created_at', 'desc');
|
|
datasets.orderBy('id', 'asc');
|
|
}
|
|
|
|
// const users = await User.query().orderBy('login').paginate(page, limit);
|
|
const myDatasets = await datasets
|
|
.where('server_state', 'released')
|
|
.orWhere((dQuery) => {
|
|
dQuery
|
|
.whereIn('server_state', ['editor_accepted', 'rejected_reviewer', 'reviewed', 'published'])
|
|
.where('editor_id', user.id)
|
|
.doesntHave('identifier', 'and');
|
|
})
|
|
// .preload('identifier')
|
|
.preload('titles')
|
|
.preload('user', (query) => query.select('id', 'login'))
|
|
.preload('editor', (query) => query.select('id', 'login'))
|
|
.paginate(page, 10);
|
|
|
|
return inertia.render('Editor/Dataset/Index', {
|
|
datasets: myDatasets.serialize(),
|
|
filters: request.all(),
|
|
can: {
|
|
receive: await auth.user?.can(['dataset-receive']),
|
|
approve: await auth.user?.can(['dataset-approve']),
|
|
reject: await auth.user?.can(['dataset-editor-reject']),
|
|
edit: await auth.user?.can(['dataset-editor-update']),
|
|
delete: await auth.user?.can(['dataset-editor-delete']),
|
|
publish: await auth.user?.can(['dataset-publish']),
|
|
},
|
|
});
|
|
}
|
|
|
|
public async receive({ request, inertia, response }: HttpContext) {
|
|
const id = request.param('id');
|
|
const dataset = await Dataset.query()
|
|
.where('id', id)
|
|
.preload('titles')
|
|
.preload('descriptions')
|
|
.preload('user', (builder) => {
|
|
builder.select('id', 'login');
|
|
})
|
|
|
|
.firstOrFail();
|
|
|
|
const validStates = ['released'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// session.flash('errors', 'Invalid server state!');
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be received. Datset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.back();
|
|
}
|
|
|
|
return inertia.render('Editor/Dataset/Receive', {
|
|
dataset,
|
|
});
|
|
}
|
|
|
|
public async receiveUpdate({ auth, request, response }: HttpContext) {
|
|
const id = request.param('id');
|
|
// const { id } = params;
|
|
const dataset = await Dataset.findOrFail(id);
|
|
|
|
const validStates = ['released'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// throw new Error('Invalid server state!');
|
|
// return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back();
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be received by editor. Datset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
dataset.server_state = 'editor_accepted';
|
|
const user = (await User.find(auth.user?.id)) as User;
|
|
// dataset.editor().associate(user).save();
|
|
try {
|
|
await dataset.related('editor').associate(user); // speichert schon ab
|
|
// await dataset.save();
|
|
return response.toRoute('editor.dataset.list').flash(`You have accepted dataset ${dataset.id}!`, 'message');
|
|
} catch (error) {
|
|
// Handle any errors
|
|
console.error(error);
|
|
return response.status(500).json({ error: 'An error occurred while accepting the data.' });
|
|
}
|
|
}
|
|
|
|
public async approve({ request, inertia, response }: HttpContext) {
|
|
const id = request.param('id');
|
|
// $dataset = Dataset::with('user:id,login')->findOrFail($id);
|
|
const dataset = await Dataset.findOrFail(id);
|
|
|
|
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// session.flash('errors', 'Invalid server state!');
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be approved. Datset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.back();
|
|
}
|
|
|
|
const reviewers = await User.query()
|
|
.whereHas('roles', (builder) => {
|
|
builder.where('name', 'reviewer');
|
|
})
|
|
.pluck('login', 'id');
|
|
|
|
return inertia.render('Editor/Dataset/Approve', {
|
|
dataset,
|
|
reviewers,
|
|
});
|
|
}
|
|
|
|
public async approveUpdate({ request, response }: HttpContext) {
|
|
const approveDatasetSchema = vine.object({
|
|
reviewer_id: vine.number(),
|
|
});
|
|
try {
|
|
// await request.validate({ schema: approveDatasetSchema, messages: this.messages });
|
|
const validator = vine.compile(approveDatasetSchema);
|
|
await request.validateUsing(validator, { messagesProvider: new SimpleMessagesProvider(this.messages) });
|
|
} catch (error) {
|
|
// return response.badRequest(error.messages);
|
|
throw error;
|
|
}
|
|
const id = request.param('id');
|
|
const dataset = await Dataset.findOrFail(id);
|
|
|
|
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// session.flash('errors', 'Invalid server state!');
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be approved. Datset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.back();
|
|
}
|
|
|
|
dataset.server_state = 'approved';
|
|
if (dataset.reject_reviewer_note != null) {
|
|
dataset.reject_reviewer_note = null;
|
|
}
|
|
|
|
//save main and additional titles
|
|
const reviewer_id = request.input('reviewer_id', null);
|
|
dataset.reviewer_id = reviewer_id;
|
|
|
|
if (await dataset.save()) {
|
|
return response.toRoute('editor.dataset.list').flash('message', 'You have approved one dataset!');
|
|
}
|
|
}
|
|
|
|
public async reject({ request, inertia, response }: HttpContext) {
|
|
const id = request.param('id');
|
|
const dataset = await Dataset.query()
|
|
.where('id', id)
|
|
// .preload('titles')
|
|
// .preload('descriptions')
|
|
.preload('user', (builder) => {
|
|
builder.select('id', 'login', 'email');
|
|
})
|
|
.firstOrFail();
|
|
|
|
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// session.flash('errors', 'Invalid server state!');
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be rejected. Datset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
return inertia.render('Editor/Dataset/Reject', {
|
|
dataset,
|
|
});
|
|
}
|
|
|
|
public async rejectUpdate({ request, response, auth }: HttpContext) {
|
|
const authUser = auth.user!;
|
|
|
|
const id = request.param('id');
|
|
const dataset = await Dataset.query()
|
|
.where('id', id)
|
|
.preload('user', (builder) => {
|
|
builder.select('id', 'login', 'email');
|
|
})
|
|
.firstOrFail();
|
|
|
|
const newSchema = vine.object({
|
|
server_state: vine.string().trim(),
|
|
reject_editor_note: vine.string().trim().minLength(10).maxLength(500),
|
|
send_mail: vine.boolean().optional(),
|
|
});
|
|
|
|
try {
|
|
// await request.validate({ schema: newSchema });
|
|
const validator = vine.compile(newSchema);
|
|
await request.validateUsing(validator);
|
|
} catch (error) {
|
|
// return response.badRequest(error.messages);
|
|
throw error;
|
|
}
|
|
|
|
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// throw new Error('Invalid server state!');
|
|
// return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back();
|
|
return response
|
|
.flash(
|
|
`Invalid server state. Dataset with id ${id} cannot be rejected. Datset has server state ${dataset.server_state}.`,
|
|
'warning',
|
|
)
|
|
.redirect()
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
dataset.server_state = 'rejected_editor';
|
|
const rejectEditorNote = request.input('reject_editor_note', '');
|
|
dataset.reject_editor_note = rejectEditorNote;
|
|
|
|
// add logic for sending reject message
|
|
const sendMail = request.input('send_email', false);
|
|
// const validRecipientEmail = await this.checkEmailDomain('arno.kaimbacher@outlook.at');
|
|
const validationResult = await validate({
|
|
email: dataset.user.email,
|
|
validateSMTP: false,
|
|
});
|
|
const validRecipientEmail: boolean = validationResult.valid;
|
|
|
|
let emailStatusMessage = '';
|
|
|
|
if (sendMail == true) {
|
|
if (dataset.user.email && validRecipientEmail) {
|
|
try {
|
|
await mail.send((message) => {
|
|
message.to(dataset.user.email).subject('Dataset Rejection Notification').html(`
|
|
<p>Dear ${dataset.user.login},</p>
|
|
<p>Your dataset with ID ${dataset.id} has been rejected.</p>
|
|
<p>Reason for rejection: ${rejectEditorNote}</p>
|
|
<p>Best regards,<br>Your Tethys editor: ${authUser.login}</p>
|
|
`);
|
|
});
|
|
emailStatusMessage = ` A rejection email was successfully sent to ${dataset.user.email}.`;
|
|
} catch (error) {
|
|
logger.error(error);
|
|
return response
|
|
.flash('Dataset has not been rejected due to an email error: ' + error.message, 'error')
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
} else {
|
|
emailStatusMessage = ` However, the email could not be sent because the submitter's email address (${dataset.user.email}) is not valid.`;
|
|
}
|
|
}
|
|
|
|
await dataset.save();
|
|
return response
|
|
.flash(
|
|
`You have successfully rejected dataset ${dataset.id} submitted by ${dataset.user.login}.${emailStatusMessage}`,
|
|
'message',
|
|
)
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
public async publish({ request, inertia, response }: HttpContext) {
|
|
const id = request.param('id');
|
|
|
|
const dataset = await Dataset.query()
|
|
.where('id', id)
|
|
.preload('titles')
|
|
.preload('authors')
|
|
// .preload('persons', (builder) => {
|
|
// builder.wherePivot('role', 'author')
|
|
// })
|
|
.firstOrFail();
|
|
|
|
const validStates = ['reviewed'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// session.flash('errors', 'Invalid server state!');
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be published. Datset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.back();
|
|
}
|
|
|
|
return inertia.render('Editor/Dataset/Publish', {
|
|
dataset,
|
|
});
|
|
}
|
|
|
|
public async publishUpdate({ request, response }: HttpContext) {
|
|
const publishDatasetSchema = vine.object({
|
|
publisher_name: vine.string().trim(),
|
|
});
|
|
try {
|
|
// await request.validate({ schema: publishDatasetSchema, messages: this.messages });
|
|
const validator = vine.compile(publishDatasetSchema);
|
|
await request.validateUsing(validator, { messagesProvider: new SimpleMessagesProvider(this.messages) });
|
|
} catch (error) {
|
|
throw error;
|
|
}
|
|
const id = request.param('id');
|
|
const dataset = await Dataset.findOrFail(id);
|
|
|
|
// let test = await Dataset.getMax('publish_id');
|
|
// const maxPublishId = await Database.from('documents').max('publish_id as max_publish_id').first();
|
|
// const max = maxPublishId.max_publish_id;
|
|
const max = await Dataset.getMax('publish_id');
|
|
let publish_id = 0;
|
|
if (max != null) {
|
|
publish_id = max + 1;
|
|
} else {
|
|
publish_id = publish_id + 1;
|
|
}
|
|
dataset.publish_id = publish_id;
|
|
dataset.server_state = 'published';
|
|
dataset.server_date_published = DateTime.now();
|
|
|
|
const publisherName = request.input('publisher_name', 'Tethys');
|
|
dataset.publisher_name = publisherName;
|
|
|
|
if (await dataset.save()) {
|
|
const index_name = 'tethys-records';
|
|
await Index.indexDocument(dataset, index_name);
|
|
return response.toRoute('editor.dataset.list').flash('message', 'You have successfully published the dataset!');
|
|
}
|
|
}
|
|
|
|
public async doiCreate({ request, inertia }: HttpContext) {
|
|
const id = request.param('id');
|
|
const dataset = await Dataset.query()
|
|
.where('id', id)
|
|
.preload('titles')
|
|
.preload('descriptions')
|
|
// .preload('identifier')
|
|
.preload('authors')
|
|
.firstOrFail();
|
|
return inertia.render('Editor/Dataset/Doi', {
|
|
dataset,
|
|
});
|
|
}
|
|
|
|
public async doiStore({ request, response }: HttpContext) {
|
|
const dataId = request.param('publish_id');
|
|
const dataset = await Dataset.query()
|
|
// .preload('xmlCache')
|
|
.where('publish_id', dataId)
|
|
.firstOrFail();
|
|
const xmlMeta = (await Index.getDoiRegisterString(dataset)) as string;
|
|
|
|
let prefix = '';
|
|
let base_domain = '';
|
|
// const datacite_environment = process.env.DATACITE_ENVIRONMENT || 'debug';
|
|
prefix = process.env.DATACITE_PREFIX || '';
|
|
base_domain = process.env.BASE_DOMAIN || '';
|
|
|
|
// register DOI:
|
|
const doiValue = prefix + '/tethys.' + dataset.publish_id; //'10.21388/tethys.213'
|
|
const landingPageUrl = 'https://doi.' + getDomain(base_domain) + '/' + prefix + '/tethys.' + dataset.publish_id; //https://doi.dev.tethys.at/10.21388/tethys.213
|
|
const doiClient = new DoiClient();
|
|
const dataciteResponse = await doiClient.registerDoi(doiValue, xmlMeta, landingPageUrl);
|
|
|
|
if (dataciteResponse?.status === 201) {
|
|
// if response OK 201; save the Identifier value into db
|
|
const doiIdentifier = new DatasetIdentifier();
|
|
doiIdentifier.value = doiValue;
|
|
doiIdentifier.dataset_id = dataset.id;
|
|
doiIdentifier.type = 'doi';
|
|
doiIdentifier.status = 'findable';
|
|
// save modified date of datset for re-caching model in db an update the search index
|
|
dataset.server_date_modified = DateTime.now();
|
|
|
|
// save updated dataset to db an index to OpenSearch
|
|
try {
|
|
await dataset.related('identifier').save(doiIdentifier);
|
|
const index_name = 'tethys-records';
|
|
await Index.indexDocument(dataset, index_name);
|
|
} catch (error) {
|
|
logger.error(`${__filename}: Indexing document ${dataset.id} failed: ${error.message}`);
|
|
// Log the error or handle it as needed
|
|
throw new HttpException(error.message);
|
|
}
|
|
return response.toRoute('editor.dataset.list').flash('message', 'You have successfully created a DOI for the dataset!');
|
|
} else {
|
|
const message = `Unexpected DataCite MDS response code ${dataciteResponse?.status}`;
|
|
// Log the error or handle it as needed
|
|
throw new DoiClientException(dataciteResponse?.status, message);
|
|
}
|
|
// return response.toRoute('editor.dataset.list').flash('message', xmlMeta);
|
|
}
|
|
|
|
public async show({}: HttpContext) {}
|
|
|
|
public async edit({ request, inertia, response }: HttpContext) {
|
|
const id = request.param('id');
|
|
const datasetQuery = Dataset.query().where('id', id);
|
|
datasetQuery
|
|
.preload('titles', (query) => query.orderBy('id', 'asc'))
|
|
.preload('descriptions', (query) => query.orderBy('id', 'asc'))
|
|
.preload('coverage')
|
|
.preload('licenses')
|
|
.preload('authors')
|
|
.preload('contributors')
|
|
// .preload('subjects')
|
|
.preload('subjects', (builder) => {
|
|
builder.orderBy('id', 'asc').withCount('datasets');
|
|
})
|
|
.preload('references')
|
|
.preload('files', (query) => {
|
|
query.orderBy('sort_order', 'asc'); // Sort by sort_order column
|
|
});
|
|
|
|
const dataset = await datasetQuery.firstOrFail();
|
|
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// session.flash('errors', 'Invalid server state!');
|
|
return response
|
|
.flash(
|
|
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
|
|
'warning',
|
|
)
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
const titleTypes = Object.entries(TitleTypes)
|
|
.filter(([value]) => value !== 'Main')
|
|
.map(([key, value]) => ({ value: key, label: value }));
|
|
|
|
const descriptionTypes = Object.entries(DescriptionTypes)
|
|
.filter(([value]) => value !== 'Abstract')
|
|
.map(([key, value]) => ({ value: key, label: value }));
|
|
|
|
const languages = await Language.query().where('active', true).pluck('part1', 'part1');
|
|
|
|
// const contributorTypes = Config.get('enums.contributor_types');
|
|
const contributorTypes = Object.entries(ContributorTypes).map(([key, value]) => ({ value: key, label: value }));
|
|
|
|
// const nameTypes = Config.get('enums.name_types');
|
|
const nameTypes = Object.entries(PersonNameTypes).map(([key, value]) => ({ value: key, label: value }));
|
|
|
|
// const messages = await Database.table('messages')
|
|
// .pluck('help_text', 'metadata_element');
|
|
|
|
const projects = await Project.query().pluck('label', 'id');
|
|
|
|
const currentDate = new Date();
|
|
const currentYear = currentDate.getFullYear();
|
|
const years = Array.from({ length: currentYear - 1990 + 1 }, (_, index) => 1990 + index);
|
|
|
|
const licenses = await License.query().select('id', 'name_long').where('active', 'true').pluck('name_long', 'id');
|
|
// const userHasRoles = user.roles;
|
|
// const datasetHasLicenses = await dataset.related('licenses').query().pluck('id');
|
|
// const checkeds = dataset.licenses.first().id;
|
|
|
|
const doctypes = {
|
|
analysisdata: { label: 'Analysis', value: 'analysisdata' },
|
|
measurementdata: { label: 'Measurements', value: 'measurementdata' },
|
|
monitoring: 'Monitoring',
|
|
remotesensing: 'Remote Sensing',
|
|
gis: 'GIS',
|
|
models: 'Models',
|
|
mixedtype: 'Mixed Type',
|
|
};
|
|
|
|
return inertia.render('Editor/Dataset/Edit', {
|
|
dataset,
|
|
titletypes: titleTypes,
|
|
descriptiontypes: descriptionTypes,
|
|
contributorTypes,
|
|
nameTypes,
|
|
languages,
|
|
// messages,
|
|
projects,
|
|
licenses,
|
|
// datasetHasLicenses: Object.keys(datasetHasLicenses).map((key) => datasetHasLicenses[key]), //convert object to array with license ids
|
|
// checkeds,
|
|
years,
|
|
// languages,
|
|
subjectTypes: SubjectTypes,
|
|
referenceIdentifierTypes: Object.entries(ReferenceIdentifierTypes).map(([key, value]) => ({ value: key, label: value })),
|
|
relationTypes: Object.entries(RelationTypes).map(([key, value]) => ({ value: key, label: value })),
|
|
doctypes,
|
|
});
|
|
}
|
|
|
|
public async update({ request, response, session }: HttpContext) {
|
|
// Get the dataset id from the route parameter
|
|
const datasetId = request.param('id');
|
|
// Retrieve the dataset and load its existing files
|
|
const dataset = await Dataset.findOrFail(datasetId);
|
|
await dataset.load('files');
|
|
|
|
let trx: TransactionClientContract | null = null;
|
|
try {
|
|
await request.validateUsing(updateEditorDatasetValidator);
|
|
trx = await db.transaction();
|
|
// const user = (await User.find(auth.user?.id)) as User;
|
|
// await this.createDatasetAndAssociations(user, request, trx);
|
|
const dataset = await Dataset.findOrFail(datasetId);
|
|
|
|
// save the licenses
|
|
const licenses: number[] = request.input('licenses', []);
|
|
// await dataset.useTransaction(trx).related('licenses').sync(licenses);
|
|
await dataset.useTransaction(trx).related('licenses').sync(licenses);
|
|
|
|
// save authors and contributors
|
|
await dataset.useTransaction(trx).related('authors').sync([]);
|
|
await dataset.useTransaction(trx).related('contributors').sync([]);
|
|
await savePersons(dataset, request.input('authors', []), 'author', trx);
|
|
await savePersons(dataset, request.input('contributors', []), 'contributor', trx);
|
|
|
|
//save the titles:
|
|
const titles = request.input('titles', []);
|
|
// const savedTitles:Array<Title> = [];
|
|
for (const titleData of titles) {
|
|
if (titleData.id) {
|
|
const title = await Title.findOrFail(titleData.id);
|
|
title.value = titleData.value;
|
|
title.language = titleData.language;
|
|
title.type = titleData.type;
|
|
if (title.$isDirty) {
|
|
await title.useTransaction(trx).save();
|
|
// await dataset.useTransaction(trx).related('titles').save(title);
|
|
// savedTitles.push(title);
|
|
}
|
|
} else {
|
|
const title = new Title();
|
|
title.fill(titleData);
|
|
// savedTitles.push(title);
|
|
await dataset.useTransaction(trx).related('titles').save(title);
|
|
}
|
|
}
|
|
|
|
// save the abstracts
|
|
const descriptions = request.input('descriptions', []);
|
|
// const savedTitles:Array<Title> = [];
|
|
for (const descriptionData of descriptions) {
|
|
if (descriptionData.id) {
|
|
const description = await Description.findOrFail(descriptionData.id);
|
|
description.value = descriptionData.value;
|
|
description.language = descriptionData.language;
|
|
description.type = descriptionData.type;
|
|
if (description.$isDirty) {
|
|
await description.useTransaction(trx).save();
|
|
// await dataset.useTransaction(trx).related('titles').save(title);
|
|
// savedTitles.push(title);
|
|
}
|
|
} else {
|
|
const description = new Description();
|
|
description.fill(descriptionData);
|
|
// savedTitles.push(title);
|
|
await dataset.useTransaction(trx).related('descriptions').save(description);
|
|
}
|
|
}
|
|
|
|
// Process all subjects/keywords from the request
|
|
const subjects = request.input('subjects');
|
|
for (const subjectData of subjects) {
|
|
// Case 1: Subject already exists in the database (has an ID)
|
|
if (subjectData.id) {
|
|
// Retrieve the existing subject
|
|
const existingSubject = await Subject.findOrFail(subjectData.id);
|
|
|
|
// Update subject properties from the request data
|
|
existingSubject.value = subjectData.value;
|
|
existingSubject.type = subjectData.type;
|
|
existingSubject.external_key = subjectData.external_key;
|
|
|
|
// Only save if there are actual changes
|
|
if (existingSubject.$isDirty) {
|
|
await existingSubject.save();
|
|
}
|
|
|
|
// Note: The relationship between dataset and subject is already established,
|
|
// so we don't need to attach it again
|
|
}
|
|
// Case 2: New subject being added (no ID)
|
|
else {
|
|
// Check if a subject with the same value and type already exists in the database
|
|
const subject = await Subject.firstOrNew({ value: subjectData.value, type: subjectData.type }, subjectData);
|
|
|
|
if (subject.$isNew === true) {
|
|
// If it's a completely new subject, create and associate it with the dataset
|
|
await dataset.useTransaction(trx).related('subjects').save(subject);
|
|
} else {
|
|
// If the subject already exists, just create the relationship
|
|
await dataset.useTransaction(trx).related('subjects').attach([subject.id]);
|
|
}
|
|
}
|
|
}
|
|
|
|
const subjectsToDelete = request.input('subjectsToDelete', []);
|
|
for (const subjectData of subjectsToDelete) {
|
|
if (subjectData.id) {
|
|
// const subject = await Subject.findOrFail(subjectData.id);
|
|
const subject = await Subject.query()
|
|
.where('id', subjectData.id)
|
|
.preload('datasets', (builder) => {
|
|
builder.orderBy('id', 'asc');
|
|
})
|
|
.withCount('datasets')
|
|
.firstOrFail();
|
|
|
|
// Check if the subject is used by multiple datasets
|
|
if (subject.$extras.datasets_count > 1) {
|
|
// If used by multiple datasets, just detach it from the current dataset
|
|
await dataset.useTransaction(trx).related('subjects').detach([subject.id]);
|
|
} else {
|
|
// If only used by this dataset, delete the subject completely
|
|
|
|
await dataset.useTransaction(trx).related('subjects').detach([subject.id]);
|
|
await subject.useTransaction(trx).delete();
|
|
}
|
|
}
|
|
}
|
|
|
|
// Process references
|
|
const references = request.input('references', []);
|
|
// First, get existing references to determine which ones to update vs. create
|
|
const existingReferences = await dataset.related('references').query();
|
|
const existingReferencesMap: Map<number, DatasetReference> = new Map(existingReferences.map((ref) => [ref.id, ref]));
|
|
|
|
for (const referenceData of references) {
|
|
if (existingReferencesMap.has(referenceData.id) && referenceData.id) {
|
|
// Update existing reference
|
|
const reference = existingReferencesMap.get(referenceData.id);
|
|
if (reference) {
|
|
reference.merge(referenceData);
|
|
if (reference.$isDirty) {
|
|
await reference.useTransaction(trx).save();
|
|
}
|
|
}
|
|
} else {
|
|
// Create new reference
|
|
const dataReference = new DatasetReference();
|
|
dataReference.fill(referenceData);
|
|
await dataset.useTransaction(trx).related('references').save(dataReference);
|
|
}
|
|
}
|
|
|
|
// Handle references to delete if provided
|
|
const referencesToDelete = request.input('referencesToDelete', []);
|
|
for (const referenceData of referencesToDelete) {
|
|
if (referenceData.id) {
|
|
const reference = await DatasetReference.findOrFail(referenceData.id);
|
|
await reference.useTransaction(trx).delete();
|
|
}
|
|
}
|
|
|
|
// save coverage
|
|
const coverageData = request.input('coverage');
|
|
if (coverageData) {
|
|
if (coverageData.id) {
|
|
const coverage = await Coverage.findOrFail(coverageData.id);
|
|
coverage.merge(coverageData);
|
|
if (coverage.$isDirty) {
|
|
await coverage.useTransaction(trx).save();
|
|
}
|
|
}
|
|
}
|
|
|
|
const input = request.only(['project_id', 'embargo_date', 'language', 'type', 'creating_corporation']);
|
|
// dataset.type = request.input('type');
|
|
dataset.merge(input);
|
|
// let test: boolean = dataset.$isDirty;
|
|
await dataset.useTransaction(trx).save();
|
|
|
|
await trx.commit();
|
|
// console.log('Dataset has been updated successfully');
|
|
|
|
session.flash('message', 'Dataset has been updated successfully');
|
|
// return response.redirect().toRoute('user.index');
|
|
return response.redirect().toRoute('editor.dataset.edit', [dataset.id]);
|
|
} catch (error) {
|
|
if (trx !== null) {
|
|
await trx.rollback();
|
|
}
|
|
console.error('Failed to update dataset and related models:', error);
|
|
// throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` });
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
public async categorize({ inertia, request, response }: HttpContext) {
|
|
const id = request.param('id');
|
|
// Preload dataset and its "collections" relation
|
|
const dataset = await Dataset.query().where('id', id).preload('collections').firstOrFail();
|
|
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
// session.flash('errors', 'Invalid server state!');
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
const collectionRoles = await CollectionRole.query()
|
|
.preload('collections', (coll: Collection) => {
|
|
// preloa only top level collection with noparent_id
|
|
coll.whereNull('parent_id').orderBy('number', 'asc');
|
|
})
|
|
.exec();
|
|
|
|
return inertia.render('Editor/Dataset/Category', {
|
|
collectionRoles: collectionRoles,
|
|
dataset: dataset,
|
|
relatedCollections: dataset.collections,
|
|
});
|
|
}
|
|
|
|
public async categorizeUpdate({ request, response, session }: HttpContext) {
|
|
// Get the dataset id from the route parameter
|
|
const id = request.param('id');
|
|
const dataset = await Dataset.query().preload('files').where('id', id).firstOrFail();
|
|
|
|
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
|
if (!validStates.includes(dataset.server_state)) {
|
|
return response
|
|
.flash(
|
|
'warning',
|
|
`Invalid server state. Dataset with id ${id} cannot be categorized. Dataset has server state ${dataset.server_state}.`,
|
|
)
|
|
.redirect()
|
|
.toRoute('editor.dataset.list');
|
|
}
|
|
|
|
let trx: TransactionClientContract | null = null;
|
|
try {
|
|
trx = await db.transaction();
|
|
// const user = (await User.find(auth.user?.id)) as User;
|
|
// await this.createDatasetAndAssociations(user, request, trx);
|
|
|
|
// Retrieve the selected collections from the request.
|
|
// This should be an array of collection ids.
|
|
const collections: number[] = request.input('collections', []);
|
|
|
|
// Synchronize the dataset collections using the transaction.
|
|
await dataset.useTransaction(trx).related('collections').sync(collections);
|
|
|
|
// Commit the transaction.await trx.commit()
|
|
await trx.commit();
|
|
|
|
// Redirect with a success flash message.
|
|
// return response.flash('success', 'Dataset collections updated successfully!').redirect().toRoute('dataset.list');
|
|
|
|
session.flash('message', 'Dataset collections updated successfully!');
|
|
return response.redirect().toRoute('editor.dataset.list');
|
|
} catch (error) {
|
|
if (trx !== null) {
|
|
await trx.rollback();
|
|
}
|
|
console.error('Failed tocatgorize dataset collections:', error);
|
|
// throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` });
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
// public async update({}: HttpContextContract) {}
|
|
public async updateOpensearch({ response }: HttpContext) {
|
|
const id = 273; //request.param('id');
|
|
const dataset = await Dataset.query().preload('xmlCache').where('id', id).firstOrFail();
|
|
// add xml elements
|
|
let xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
|
|
const datasetNode = xml.root().ele('Dataset');
|
|
await this.createXmlRecord(dataset, datasetNode);
|
|
// const domNode = await this.getDatasetXmlDomNode(dataset);
|
|
// const xmlString = xml.end({ prettyPrint: true });
|
|
|
|
// const data = request.only(['field1', 'field2']); // get it from xslt
|
|
|
|
// Create an index with non-default settings.
|
|
var index_name = 'tethys-features';
|
|
|
|
const xmlString = xml.end({ prettyPrint: false });
|
|
let doc = '';
|
|
try {
|
|
const result = await SaxonJS.transform({
|
|
// stylesheetFileName: `${config.TMP_BASE_DIR}/data-quality/rules/iati.sef.json`,
|
|
stylesheetText: this.proc,
|
|
destination: 'serialized',
|
|
// sourceFileName: sourceFile,
|
|
sourceText: xmlString,
|
|
// stylesheetParams: xsltParameter,
|
|
// logLevel: 10,
|
|
});
|
|
doc = result.principalResult;
|
|
} catch (error) {
|
|
return response.status(500).json({
|
|
message: 'An error occurred while creating the user',
|
|
error: error.message,
|
|
});
|
|
}
|
|
|
|
// var settings = {
|
|
// settings: {
|
|
// index: {
|
|
// number_of_shards: 4,
|
|
// number_of_replicas: 3,
|
|
// },
|
|
// },
|
|
// };
|
|
// var test = await client.indices.create({
|
|
// index: index_name,
|
|
// body: settings,
|
|
// });
|
|
|
|
// var document = {
|
|
// title: 'Sample Document',
|
|
// authors: [
|
|
// {
|
|
// first_name: 'John',
|
|
// last_name: 'Doe',
|
|
// },
|
|
// {
|
|
// first_name: 'Jane',
|
|
// last_name: 'Smith',
|
|
// },
|
|
// ],
|
|
// year: '2018',
|
|
// genre: 'Crime fiction',
|
|
// };
|
|
|
|
// http://localhost:9200/datastets/_doc/1
|
|
|
|
// var id = '1';
|
|
|
|
try {
|
|
// console.log(doc);
|
|
let document = JSON.parse(`${doc}`);
|
|
|
|
// https://opensearch.org/docs/2.1/opensearch/supported-field-types/geo-shape/
|
|
// Define the new document
|
|
// const document = {
|
|
// title: 'Your Document Name',
|
|
// id: dataset.publish_id,
|
|
// doctype: 'GIS',
|
|
// // "location" : {
|
|
// // "type" : "point",
|
|
// // "coordinates" : [74.00, 40.71]
|
|
// // },
|
|
// geo_location: {
|
|
// type: 'linestring',
|
|
// coordinates: [
|
|
// [-77.03653, 38.897676],
|
|
// [-77.009051, 38.889939],
|
|
// ],
|
|
// },
|
|
// // geo_location: 'BBOX (71.0589, 74.0060, 42.3601, 40.7128)'
|
|
// // geo_location: {
|
|
// // type: 'envelope',
|
|
// // coordinates: [
|
|
// // [13.0, 53.0],
|
|
// // [14.0, 52.0],
|
|
// // ], // Define your BBOX coordinates
|
|
// // },
|
|
// };
|
|
|
|
// Update the document
|
|
var test = await client.index({
|
|
id: dataset.publish_id?.toString(),
|
|
index: index_name,
|
|
body: document,
|
|
refresh: true,
|
|
});
|
|
|
|
// Return the result
|
|
return response.json(test.body);
|
|
} catch (error) {
|
|
// Handle any errors
|
|
console.error(error);
|
|
return response.status(500).json({ error: 'An error occurred while updating the data.' });
|
|
}
|
|
}
|
|
|
|
public async download({ params, response }: HttpContext) {
|
|
const id = params.id;
|
|
// Find the file by ID
|
|
const file = await File.findOrFail(id);
|
|
// const filePath = await drive.use('local').getUrl('/'+ file.filePath)
|
|
const filePath = file.filePath;
|
|
const fileExt = file.filePath.split('.').pop() || '';
|
|
// Set the response headers and download the file
|
|
response.header('Content-Type', file.mime_type || 'application/octet-stream');
|
|
response.attachment(`${file.label}.${fileExt}`);
|
|
return response.download(filePath);
|
|
}
|
|
|
|
public async destroy({}: HttpContext) {}
|
|
|
|
private async createXmlRecord(dataset: Dataset, datasetNode: XMLBuilder) {
|
|
const domNode = await this.getDatasetXmlDomNode(dataset);
|
|
if (domNode) {
|
|
datasetNode.import(domNode);
|
|
}
|
|
}
|
|
|
|
private async getDatasetXmlDomNode(dataset: Dataset) {
|
|
const xmlModel = new XmlModel(dataset);
|
|
// xmlModel.setModel(dataset);
|
|
xmlModel.excludeEmptyFields();
|
|
xmlModel.caching = true;
|
|
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
|
|
// dataset.load('xmlCache');
|
|
if (dataset.xmlCache) {
|
|
xmlModel.xmlCache = dataset.xmlCache;
|
|
}
|
|
|
|
// return cache.getDomDocument();
|
|
const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
|
|
return domDocument;
|
|
}
|
|
}
|