hotfix: enhance editor dataset management and UI improvements
- Implemented dataset editing functionality for editor roles, including fetching, updating, and categorizing datasets. - Added routes and controller actions for editing, updating, and categorizing datasets within the editor interface. - Integrated UI components for managing dataset metadata, subjects, references, and files. - Enhanced keyword management with features for adding, editing, and deleting keywords, including handling keywords used by multiple datasets. - Improved reference management with features for adding, editing, and deleting dataset references. - Added validation for dataset updates using the `updateEditorDatasetValidator`. - Updated the dataset edit form to include components for managing titles, descriptions, authors, contributors, licenses, coverage, subjects, references, and files. - Implemented transaction management for dataset updates to ensure data consistency. - Added a download route for files associated with datasets. - Improved the UI for displaying and interacting with datasets in the editor index view, including adding edit and categorize buttons. - Fixed an issue where the file size was not correctly calculated. - Added a tooltip to the keyword value column in the TableKeywords component to explain the editability of keywords. - Added a section to display keywords that are marked for deletion. - Added a section to display references that are marked for deletion. - Added a restore button to the references to delete section to restore references. - Updated the SearchCategoryAutocomplete component to support read-only mode. - Updated the FormControl component to support read-only mode. - Added icons and styling improvements to various components. - Added a default value for subjectsToDelete and referencesToDelete in the dataset model. - Updated the FooterBar component to use the JustboilLogo component. - Updated the app.ts file to fetch chart data without a year parameter. - Updated the Login.vue file to invert the logo in dark mode. - Updated the AccountInfo.vue file to add a Head component.
This commit is contained in:
parent
10d159a57a
commit
f04c1f6327
30 changed files with 2284 additions and 539 deletions
|
@ -18,9 +18,32 @@ import { HttpException } from 'node-exceptions';
|
|||
import { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model';
|
||||
import vine, { SimpleMessagesProvider } from '@vinejs/vine';
|
||||
import mail from '@adonisjs/mail/services/main';
|
||||
// import { resolveMx } from 'dns/promises';
|
||||
// import * as net from 'net';
|
||||
import { validate } from 'deep-email-validator';
|
||||
import {
|
||||
TitleTypes,
|
||||
DescriptionTypes,
|
||||
ContributorTypes,
|
||||
PersonNameTypes,
|
||||
ReferenceIdentifierTypes,
|
||||
RelationTypes,
|
||||
SubjectTypes,
|
||||
} from '#contracts/enums';
|
||||
import { TransactionClientContract } from '@adonisjs/lucid/types/database';
|
||||
import db from '@adonisjs/lucid/services/db';
|
||||
import Project from '#models/project';
|
||||
import License from '#models/license';
|
||||
import Language from '#models/language';
|
||||
import File from '#models/file';
|
||||
import Coverage from '#models/coverage';
|
||||
import Title from '#models/title';
|
||||
import Description from '#models/description';
|
||||
import Subject from '#models/subject';
|
||||
import DatasetReference from '#models/dataset_reference';
|
||||
import Collection from '#models/collection';
|
||||
import CollectionRole from '#models/collection_role';
|
||||
import { updateEditorDatasetValidator } from '#validators/dataset';
|
||||
import { savePersons } from '#app/utils/utility-functions';
|
||||
|
||||
// Create a new instance of the client
|
||||
const client = new Client({ node: 'http://localhost:9200' }); // replace with your OpenSearch endpoint
|
||||
|
||||
|
@ -255,71 +278,6 @@ export default class DatasetsController {
|
|||
});
|
||||
}
|
||||
|
||||
// private async checkEmailDomain(email: string): Promise<boolean> {
|
||||
// const domain = email.split('@')[1];
|
||||
|
||||
// try {
|
||||
// // Step 1: Check MX records for the domain
|
||||
// const mxRecords = await resolveMx(domain);
|
||||
// if (mxRecords.length === 0) {
|
||||
// return false; // No MX records, can't send email
|
||||
// }
|
||||
|
||||
// // Sort MX records by priority
|
||||
// mxRecords.sort((a, b) => a.priority - b.priority);
|
||||
|
||||
// // Step 2: Attempt SMTP connection to the first available mail server
|
||||
// const smtpServer = mxRecords[0].exchange;
|
||||
|
||||
// return await this.checkMailboxExists(smtpServer, email);
|
||||
// } catch (error) {
|
||||
// console.error('Error during MX lookup or SMTP validation:', error);
|
||||
// return false;
|
||||
// }
|
||||
// }
|
||||
|
||||
//// Helper function to check if the mailbox exists using SMTP
|
||||
// private async checkMailboxExists(smtpServer: string, email: string): Promise<boolean> {
|
||||
// return new Promise((resolve, reject) => {
|
||||
// const socket = net.createConnection(25, smtpServer);
|
||||
|
||||
// socket.on('connect', () => {
|
||||
// socket.write(`HELO ${smtpServer}\r\n`);
|
||||
// socket.write(`MAIL FROM: <test@example.com>\r\n`);
|
||||
// socket.write(`RCPT TO: <${email}>\r\n`);
|
||||
// });
|
||||
|
||||
// socket.on('data', (data) => {
|
||||
// const response = data.toString();
|
||||
// if (response.includes('250')) {
|
||||
// // 250 is an SMTP success code
|
||||
// socket.end();
|
||||
// resolve(true); // Email exists
|
||||
// } else if (response.includes('550')) {
|
||||
// // 550 means the mailbox doesn't exist
|
||||
// socket.end();
|
||||
// resolve(false); // Email doesn't exist
|
||||
// }
|
||||
// });
|
||||
|
||||
// socket.on('error', (error) => {
|
||||
// console.error('SMTP connection error:', error);
|
||||
// socket.end();
|
||||
// resolve(false);
|
||||
// });
|
||||
|
||||
// socket.on('end', () => {
|
||||
// // SMTP connection closed
|
||||
// });
|
||||
|
||||
// socket.setTimeout(5000, () => {
|
||||
// // Timeout after 5 seconds
|
||||
// socket.end();
|
||||
// resolve(false); // Assume email doesn't exist if no response
|
||||
// });
|
||||
// });
|
||||
// }
|
||||
|
||||
public async rejectUpdate({ request, response, auth }: HttpContext) {
|
||||
const authUser = auth.user!;
|
||||
|
||||
|
@ -353,7 +311,7 @@ export default class DatasetsController {
|
|||
return response
|
||||
.flash(
|
||||
`Invalid server state. Dataset with id ${id} cannot be rejected. Datset has server state ${dataset.server_state}.`,
|
||||
'warning'
|
||||
'warning',
|
||||
)
|
||||
.redirect()
|
||||
.toRoute('editor.dataset.list');
|
||||
|
@ -388,7 +346,9 @@ export default class DatasetsController {
|
|||
emailStatusMessage = ` A rejection email was successfully sent to ${dataset.user.email}.`;
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
return response.flash('Dataset has not been rejected due to an email error: ' + error.message, 'error').toRoute('editor.dataset.list');
|
||||
return response
|
||||
.flash('Dataset has not been rejected due to an email error: ' + error.message, 'error')
|
||||
.toRoute('editor.dataset.list');
|
||||
}
|
||||
} else {
|
||||
emailStatusMessage = ` However, the email could not be sent because the submitter's email address (${dataset.user.email}) is not valid.`;
|
||||
|
@ -536,10 +496,375 @@ export default class DatasetsController {
|
|||
|
||||
public async show({}: HttpContext) {}
|
||||
|
||||
public async edit({}: HttpContext) {}
|
||||
public async edit({ request, inertia, response }: HttpContext) {
|
||||
const id = request.param('id');
|
||||
const datasetQuery = Dataset.query().where('id', id);
|
||||
datasetQuery
|
||||
.preload('titles', (query) => query.orderBy('id', 'asc'))
|
||||
.preload('descriptions', (query) => query.orderBy('id', 'asc'))
|
||||
.preload('coverage')
|
||||
.preload('licenses')
|
||||
.preload('authors')
|
||||
.preload('contributors')
|
||||
// .preload('subjects')
|
||||
.preload('subjects', (builder) => {
|
||||
builder.orderBy('id', 'asc').withCount('datasets');
|
||||
})
|
||||
.preload('references')
|
||||
.preload('files', (query) => {
|
||||
query.orderBy('sort_order', 'asc'); // Sort by sort_order column
|
||||
});
|
||||
|
||||
const dataset = await datasetQuery.firstOrFail();
|
||||
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
||||
if (!validStates.includes(dataset.server_state)) {
|
||||
// session.flash('errors', 'Invalid server state!');
|
||||
return response
|
||||
.flash(
|
||||
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
|
||||
'warning',
|
||||
)
|
||||
.toRoute('editor.dataset.list');
|
||||
}
|
||||
|
||||
const titleTypes = Object.entries(TitleTypes)
|
||||
.filter(([value]) => value !== 'Main')
|
||||
.map(([key, value]) => ({ value: key, label: value }));
|
||||
|
||||
const descriptionTypes = Object.entries(DescriptionTypes)
|
||||
.filter(([value]) => value !== 'Abstract')
|
||||
.map(([key, value]) => ({ value: key, label: value }));
|
||||
|
||||
const languages = await Language.query().where('active', true).pluck('part1', 'part1');
|
||||
|
||||
// const contributorTypes = Config.get('enums.contributor_types');
|
||||
const contributorTypes = Object.entries(ContributorTypes).map(([key, value]) => ({ value: key, label: value }));
|
||||
|
||||
// const nameTypes = Config.get('enums.name_types');
|
||||
const nameTypes = Object.entries(PersonNameTypes).map(([key, value]) => ({ value: key, label: value }));
|
||||
|
||||
// const messages = await Database.table('messages')
|
||||
// .pluck('help_text', 'metadata_element');
|
||||
|
||||
const projects = await Project.query().pluck('label', 'id');
|
||||
|
||||
const currentDate = new Date();
|
||||
const currentYear = currentDate.getFullYear();
|
||||
const years = Array.from({ length: currentYear - 1990 + 1 }, (_, index) => 1990 + index);
|
||||
|
||||
const licenses = await License.query().select('id', 'name_long').where('active', 'true').pluck('name_long', 'id');
|
||||
// const userHasRoles = user.roles;
|
||||
// const datasetHasLicenses = await dataset.related('licenses').query().pluck('id');
|
||||
// const checkeds = dataset.licenses.first().id;
|
||||
|
||||
const doctypes = {
|
||||
analysisdata: { label: 'Analysis', value: 'analysisdata' },
|
||||
measurementdata: { label: 'Measurements', value: 'measurementdata' },
|
||||
monitoring: 'Monitoring',
|
||||
remotesensing: 'Remote Sensing',
|
||||
gis: 'GIS',
|
||||
models: 'Models',
|
||||
mixedtype: 'Mixed Type',
|
||||
};
|
||||
|
||||
return inertia.render('Editor/Dataset/Edit', {
|
||||
dataset,
|
||||
titletypes: titleTypes,
|
||||
descriptiontypes: descriptionTypes,
|
||||
contributorTypes,
|
||||
nameTypes,
|
||||
languages,
|
||||
// messages,
|
||||
projects,
|
||||
licenses,
|
||||
// datasetHasLicenses: Object.keys(datasetHasLicenses).map((key) => datasetHasLicenses[key]), //convert object to array with license ids
|
||||
// checkeds,
|
||||
years,
|
||||
// languages,
|
||||
subjectTypes: SubjectTypes,
|
||||
referenceIdentifierTypes: Object.entries(ReferenceIdentifierTypes).map(([key, value]) => ({ value: key, label: value })),
|
||||
relationTypes: Object.entries(RelationTypes).map(([key, value]) => ({ value: key, label: value })),
|
||||
doctypes,
|
||||
});
|
||||
}
|
||||
|
||||
public async update({ request, response, session }: HttpContext) {
|
||||
// Get the dataset id from the route parameter
|
||||
const datasetId = request.param('id');
|
||||
// Retrieve the dataset and load its existing files
|
||||
const dataset = await Dataset.findOrFail(datasetId);
|
||||
await dataset.load('files');
|
||||
|
||||
let trx: TransactionClientContract | null = null;
|
||||
try {
|
||||
await request.validateUsing(updateEditorDatasetValidator);
|
||||
trx = await db.transaction();
|
||||
// const user = (await User.find(auth.user?.id)) as User;
|
||||
// await this.createDatasetAndAssociations(user, request, trx);
|
||||
const dataset = await Dataset.findOrFail(datasetId);
|
||||
|
||||
// save the licenses
|
||||
const licenses: number[] = request.input('licenses', []);
|
||||
// await dataset.useTransaction(trx).related('licenses').sync(licenses);
|
||||
await dataset.useTransaction(trx).related('licenses').sync(licenses);
|
||||
|
||||
// save authors and contributors
|
||||
await dataset.useTransaction(trx).related('authors').sync([]);
|
||||
await dataset.useTransaction(trx).related('contributors').sync([]);
|
||||
await savePersons(dataset, request.input('authors', []), 'author', trx);
|
||||
await savePersons(dataset, request.input('contributors', []), 'contributor', trx);
|
||||
|
||||
//save the titles:
|
||||
const titles = request.input('titles', []);
|
||||
// const savedTitles:Array<Title> = [];
|
||||
for (const titleData of titles) {
|
||||
if (titleData.id) {
|
||||
const title = await Title.findOrFail(titleData.id);
|
||||
title.value = titleData.value;
|
||||
title.language = titleData.language;
|
||||
title.type = titleData.type;
|
||||
if (title.$isDirty) {
|
||||
await title.useTransaction(trx).save();
|
||||
// await dataset.useTransaction(trx).related('titles').save(title);
|
||||
// savedTitles.push(title);
|
||||
}
|
||||
} else {
|
||||
const title = new Title();
|
||||
title.fill(titleData);
|
||||
// savedTitles.push(title);
|
||||
await dataset.useTransaction(trx).related('titles').save(title);
|
||||
}
|
||||
}
|
||||
|
||||
// save the abstracts
|
||||
const descriptions = request.input('descriptions', []);
|
||||
// const savedTitles:Array<Title> = [];
|
||||
for (const descriptionData of descriptions) {
|
||||
if (descriptionData.id) {
|
||||
const description = await Description.findOrFail(descriptionData.id);
|
||||
description.value = descriptionData.value;
|
||||
description.language = descriptionData.language;
|
||||
description.type = descriptionData.type;
|
||||
if (description.$isDirty) {
|
||||
await description.useTransaction(trx).save();
|
||||
// await dataset.useTransaction(trx).related('titles').save(title);
|
||||
// savedTitles.push(title);
|
||||
}
|
||||
} else {
|
||||
const description = new Description();
|
||||
description.fill(descriptionData);
|
||||
// savedTitles.push(title);
|
||||
await dataset.useTransaction(trx).related('descriptions').save(description);
|
||||
}
|
||||
}
|
||||
|
||||
// Process all subjects/keywords from the request
|
||||
const subjects = request.input('subjects');
|
||||
for (const subjectData of subjects) {
|
||||
// Case 1: Subject already exists in the database (has an ID)
|
||||
if (subjectData.id) {
|
||||
// Retrieve the existing subject
|
||||
const existingSubject = await Subject.findOrFail(subjectData.id);
|
||||
|
||||
// Update subject properties from the request data
|
||||
existingSubject.value = subjectData.value;
|
||||
existingSubject.type = subjectData.type;
|
||||
existingSubject.external_key = subjectData.external_key;
|
||||
|
||||
// Only save if there are actual changes
|
||||
if (existingSubject.$isDirty) {
|
||||
await existingSubject.save();
|
||||
}
|
||||
|
||||
// Note: The relationship between dataset and subject is already established,
|
||||
// so we don't need to attach it again
|
||||
}
|
||||
// Case 2: New subject being added (no ID)
|
||||
else {
|
||||
// Check if a subject with the same value and type already exists in the database
|
||||
const subject = await Subject.firstOrNew({ value: subjectData.value, type: subjectData.type }, subjectData);
|
||||
|
||||
if (subject.$isNew === true) {
|
||||
// If it's a completely new subject, create and associate it with the dataset
|
||||
await dataset.useTransaction(trx).related('subjects').save(subject);
|
||||
} else {
|
||||
// If the subject already exists, just create the relationship
|
||||
await dataset.useTransaction(trx).related('subjects').attach([subject.id]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const subjectsToDelete = request.input('subjectsToDelete', []);
|
||||
for (const subjectData of subjectsToDelete) {
|
||||
if (subjectData.id) {
|
||||
// const subject = await Subject.findOrFail(subjectData.id);
|
||||
const subject = await Subject.query()
|
||||
.where('id', subjectData.id)
|
||||
.preload('datasets', (builder) => {
|
||||
builder.orderBy('id', 'asc');
|
||||
})
|
||||
.withCount('datasets')
|
||||
.firstOrFail();
|
||||
|
||||
// Check if the subject is used by multiple datasets
|
||||
if (subject.$extras.datasets_count > 1) {
|
||||
// If used by multiple datasets, just detach it from the current dataset
|
||||
await dataset.useTransaction(trx).related('subjects').detach([subject.id]);
|
||||
} else {
|
||||
// If only used by this dataset, delete the subject completely
|
||||
|
||||
await dataset.useTransaction(trx).related('subjects').detach([subject.id]);
|
||||
await subject.useTransaction(trx).delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process references
|
||||
const references = request.input('references', []);
|
||||
// First, get existing references to determine which ones to update vs. create
|
||||
const existingReferences = await dataset.related('references').query();
|
||||
const existingReferencesMap: Map<number, DatasetReference> = new Map(existingReferences.map((ref) => [ref.id, ref]));
|
||||
|
||||
for (const referenceData of references) {
|
||||
if (existingReferencesMap.has(referenceData.id) && referenceData.id) {
|
||||
// Update existing reference
|
||||
const reference = existingReferencesMap.get(referenceData.id);
|
||||
if (reference) {
|
||||
reference.merge(referenceData);
|
||||
if (reference.$isDirty) {
|
||||
await reference.useTransaction(trx).save();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Create new reference
|
||||
const dataReference = new DatasetReference();
|
||||
dataReference.fill(referenceData);
|
||||
await dataset.useTransaction(trx).related('references').save(dataReference);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle references to delete if provided
|
||||
const referencesToDelete = request.input('referencesToDelete', []);
|
||||
for (const referenceData of referencesToDelete) {
|
||||
if (referenceData.id) {
|
||||
const reference = await DatasetReference.findOrFail(referenceData.id);
|
||||
await reference.useTransaction(trx).delete();
|
||||
}
|
||||
}
|
||||
|
||||
// save coverage
|
||||
const coverageData = request.input('coverage');
|
||||
if (coverageData) {
|
||||
if (coverageData.id) {
|
||||
const coverage = await Coverage.findOrFail(coverageData.id);
|
||||
coverage.merge(coverageData);
|
||||
if (coverage.$isDirty) {
|
||||
await coverage.useTransaction(trx).save();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const input = request.only(['project_id', 'embargo_date', 'language', 'type', 'creating_corporation']);
|
||||
// dataset.type = request.input('type');
|
||||
dataset.merge(input);
|
||||
// let test: boolean = dataset.$isDirty;
|
||||
await dataset.useTransaction(trx).save();
|
||||
|
||||
await trx.commit();
|
||||
// console.log('Dataset has been updated successfully');
|
||||
|
||||
session.flash('message', 'Dataset has been updated successfully');
|
||||
// return response.redirect().toRoute('user.index');
|
||||
return response.redirect().toRoute('editor.dataset.edit', [dataset.id]);
|
||||
} catch (error) {
|
||||
if (trx !== null) {
|
||||
await trx.rollback();
|
||||
}
|
||||
console.error('Failed to update dataset and related models:', error);
|
||||
// throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
public async categorize({ inertia, request, response }: HttpContext) {
|
||||
const id = request.param('id');
|
||||
// Preload dataset and its "collections" relation
|
||||
const dataset = await Dataset.query().where('id', id).preload('collections').firstOrFail();
|
||||
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
||||
if (!validStates.includes(dataset.server_state)) {
|
||||
// session.flash('errors', 'Invalid server state!');
|
||||
return response
|
||||
.flash(
|
||||
'warning',
|
||||
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
|
||||
)
|
||||
.redirect()
|
||||
.toRoute('editor.dataset.list');
|
||||
}
|
||||
|
||||
const collectionRoles = await CollectionRole.query()
|
||||
.preload('collections', (coll: Collection) => {
|
||||
// preloa only top level collection with noparent_id
|
||||
coll.whereNull('parent_id').orderBy('number', 'asc');
|
||||
})
|
||||
.exec();
|
||||
|
||||
return inertia.render('Editor/Dataset/Category', {
|
||||
collectionRoles: collectionRoles,
|
||||
dataset: dataset,
|
||||
relatedCollections: dataset.collections,
|
||||
});
|
||||
}
|
||||
|
||||
public async categorizeUpdate({ request, response, session }: HttpContext) {
|
||||
// Get the dataset id from the route parameter
|
||||
const id = request.param('id');
|
||||
const dataset = await Dataset.query().preload('files').where('id', id).firstOrFail();
|
||||
|
||||
const validStates = ['editor_accepted', 'rejected_reviewer'];
|
||||
if (!validStates.includes(dataset.server_state)) {
|
||||
return response
|
||||
.flash(
|
||||
'warning',
|
||||
`Invalid server state. Dataset with id ${id} cannot be categorized. Dataset has server state ${dataset.server_state}.`,
|
||||
)
|
||||
.redirect()
|
||||
.toRoute('editor.dataset.list');
|
||||
}
|
||||
|
||||
let trx: TransactionClientContract | null = null;
|
||||
try {
|
||||
trx = await db.transaction();
|
||||
// const user = (await User.find(auth.user?.id)) as User;
|
||||
// await this.createDatasetAndAssociations(user, request, trx);
|
||||
|
||||
// Retrieve the selected collections from the request.
|
||||
// This should be an array of collection ids.
|
||||
const collections: number[] = request.input('collections', []);
|
||||
|
||||
// Synchronize the dataset collections using the transaction.
|
||||
await dataset.useTransaction(trx).related('collections').sync(collections);
|
||||
|
||||
// Commit the transaction.await trx.commit()
|
||||
await trx.commit();
|
||||
|
||||
// Redirect with a success flash message.
|
||||
// return response.flash('success', 'Dataset collections updated successfully!').redirect().toRoute('dataset.list');
|
||||
|
||||
session.flash('message', 'Dataset collections updated successfully!');
|
||||
return response.redirect().toRoute('editor.dataset.list');
|
||||
} catch (error) {
|
||||
if (trx !== null) {
|
||||
await trx.rollback();
|
||||
}
|
||||
console.error('Failed tocatgorize dataset collections:', error);
|
||||
// throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// public async update({}: HttpContextContract) {}
|
||||
public async update({ response }: HttpContext) {
|
||||
public async updateOpensearch({ response }: HttpContext) {
|
||||
const id = 273; //request.param('id');
|
||||
const dataset = await Dataset.query().preload('xmlCache').where('id', id).firstOrFail();
|
||||
// add xml elements
|
||||
|
@ -655,6 +980,19 @@ export default class DatasetsController {
|
|||
}
|
||||
}
|
||||
|
||||
public async download({ params, response }: HttpContext) {
|
||||
const id = params.id;
|
||||
// Find the file by ID
|
||||
const file = await File.findOrFail(id);
|
||||
// const filePath = await drive.use('local').getUrl('/'+ file.filePath)
|
||||
const filePath = file.filePath;
|
||||
const fileExt = file.filePath.split('.').pop() || '';
|
||||
// Set the response headers and download the file
|
||||
response.header('Content-Type', file.mime_type || 'application/octet-stream');
|
||||
response.attachment(`${file.label}.${fileExt}`);
|
||||
return response.download(filePath);
|
||||
}
|
||||
|
||||
public async destroy({}: HttpContext) {}
|
||||
|
||||
private async createXmlRecord(dataset: Dataset, datasetNode: XMLBuilder) {
|
||||
|
|
|
@ -29,12 +29,8 @@ import {
|
|||
} from '#contracts/enums';
|
||||
import { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model';
|
||||
import DatasetReference from '#models/dataset_reference';
|
||||
import { cuid } from '@adonisjs/core/helpers';
|
||||
import File from '#models/file';
|
||||
import ClamScan from 'clamscan';
|
||||
// import { ValidationException } from '@adonisjs/validator';
|
||||
// import Drive from '@ioc:Adonis/Core/Drive';
|
||||
// import drive from '#services/drive';
|
||||
import drive from '@adonisjs/drive/services/main';
|
||||
import path from 'path';
|
||||
import { Exception } from '@adonisjs/core/exceptions';
|
||||
|
@ -945,10 +941,9 @@ export default class DatasetController {
|
|||
// session.flash('errors', 'Invalid server state!');
|
||||
return response
|
||||
.flash(
|
||||
'warning',
|
||||
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
|
||||
'warning',
|
||||
)
|
||||
.redirect()
|
||||
.toRoute('dataset.list');
|
||||
}
|
||||
|
||||
|
@ -1020,7 +1015,11 @@ export default class DatasetController {
|
|||
const dataset = await Dataset.findOrFail(datasetId);
|
||||
await dataset.load('files');
|
||||
// Accumulate the size of the already related files
|
||||
const preExistingFileSize = dataset.files.reduce((acc, file) => acc + file.fileSize, 0);
|
||||
// const preExistingFileSize = dataset.files.reduce((acc, file) => acc + file.fileSize, 0);
|
||||
let preExistingFileSize = 0;
|
||||
for (const file of dataset.files) {
|
||||
preExistingFileSize += Number(file.fileSize);
|
||||
}
|
||||
|
||||
const uploadedTmpFiles: string[] = [];
|
||||
// Only process multipart if the request has a multipart content type
|
||||
|
@ -1150,22 +1149,97 @@ export default class DatasetController {
|
|||
}
|
||||
}
|
||||
|
||||
// await dataset.useTransaction(trx).related('subjects').sync([]);
|
||||
const keywords = request.input('subjects');
|
||||
for (const keywordData of keywords) {
|
||||
if (keywordData.id) {
|
||||
const subject = await Subject.findOrFail(keywordData.id);
|
||||
// await dataset.useTransaction(trx).related('subjects').attach([keywordData.id]);
|
||||
subject.value = keywordData.value;
|
||||
subject.type = keywordData.type;
|
||||
subject.external_key = keywordData.external_key;
|
||||
if (subject.$isDirty) {
|
||||
await subject.save();
|
||||
// Process all subjects/keywords from the request
|
||||
const subjects = request.input('subjects');
|
||||
for (const subjectData of subjects) {
|
||||
// Case 1: Subject already exists in the database (has an ID)
|
||||
if (subjectData.id) {
|
||||
// Retrieve the existing subject
|
||||
const existingSubject = await Subject.findOrFail(subjectData.id);
|
||||
|
||||
// Update subject properties from the request data
|
||||
existingSubject.value = subjectData.value;
|
||||
existingSubject.type = subjectData.type;
|
||||
existingSubject.external_key = subjectData.external_key;
|
||||
|
||||
// Only save if there are actual changes
|
||||
if (existingSubject.$isDirty) {
|
||||
await existingSubject.save();
|
||||
}
|
||||
|
||||
// Note: The relationship between dataset and subject is already established,
|
||||
// so we don't need to attach it again
|
||||
}
|
||||
// Case 2: New subject being added (no ID)
|
||||
else {
|
||||
// Check if a subject with the same value and type already exists in the database
|
||||
const subject = await Subject.firstOrNew({ value: subjectData.value, type: subjectData.type }, subjectData);
|
||||
|
||||
if (subject.$isNew === true) {
|
||||
// If it's a completely new subject, create and associate it with the dataset
|
||||
await dataset.useTransaction(trx).related('subjects').save(subject);
|
||||
} else {
|
||||
// If the subject already exists, just create the relationship
|
||||
await dataset.useTransaction(trx).related('subjects').attach([subject.id]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const subjectsToDelete = request.input('subjectsToDelete', []);
|
||||
for (const subjectData of subjectsToDelete) {
|
||||
if (subjectData.id) {
|
||||
// const subject = await Subject.findOrFail(subjectData.id);
|
||||
const subject = await Subject.query()
|
||||
.where('id', subjectData.id)
|
||||
.preload('datasets', (builder) => {
|
||||
builder.orderBy('id', 'asc');
|
||||
})
|
||||
.withCount('datasets')
|
||||
.firstOrFail();
|
||||
|
||||
// Check if the subject is used by multiple datasets
|
||||
if (subject.$extras.datasets_count > 1) {
|
||||
// If used by multiple datasets, just detach it from the current dataset
|
||||
await dataset.useTransaction(trx).related('subjects').detach([subject.id]);
|
||||
} else {
|
||||
// If only used by this dataset, delete the subject completely
|
||||
|
||||
await dataset.useTransaction(trx).related('subjects').detach([subject.id]);
|
||||
await subject.useTransaction(trx).delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process references
|
||||
const references = request.input('references', []);
|
||||
// First, get existing references to determine which ones to update vs. create
|
||||
const existingReferences = await dataset.related('references').query();
|
||||
const existingReferencesMap: Map<number, DatasetReference> = new Map(existingReferences.map((ref) => [ref.id, ref]));
|
||||
|
||||
for (const referenceData of references) {
|
||||
if (existingReferencesMap.has(referenceData.id) && referenceData.id) {
|
||||
// Update existing reference
|
||||
const reference = existingReferencesMap.get(referenceData.id);
|
||||
if (reference) {
|
||||
reference.merge(referenceData);
|
||||
if (reference.$isDirty) {
|
||||
await reference.useTransaction(trx).save();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const keyword = new Subject();
|
||||
keyword.fill(keywordData);
|
||||
await dataset.useTransaction(trx).related('subjects').save(keyword, false);
|
||||
// Create new reference
|
||||
const dataReference = new DatasetReference();
|
||||
dataReference.fill(referenceData);
|
||||
await dataset.useTransaction(trx).related('references').save(dataReference);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle references to delete if provided
|
||||
const referencesToDelete = request.input('referencesToDelete', []);
|
||||
for (const referenceData of referencesToDelete) {
|
||||
if (referenceData.id) {
|
||||
const reference = await DatasetReference.findOrFail(referenceData.id);
|
||||
await reference.useTransaction(trx).delete();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1269,7 +1343,7 @@ export default class DatasetController {
|
|||
await dataset.useTransaction(trx).save();
|
||||
|
||||
await trx.commit();
|
||||
console.log('Dataset and related models created successfully');
|
||||
console.log('Dataset has been updated successfully');
|
||||
|
||||
session.flash('message', 'Dataset has been updated successfully');
|
||||
// return response.redirect().toRoute('user.index');
|
||||
|
|
|
@ -3,6 +3,13 @@ import type { BodyParserConfig } from '#models/types';
|
|||
import { createId } from '@paralleldrive/cuid2';
|
||||
import { tmpdir } from 'node:os';
|
||||
import config from '@adonisjs/core/services/config';
|
||||
import Dataset from '#models/dataset';
|
||||
import { TransactionClientContract } from '@adonisjs/lucid/types/database';
|
||||
import Person from '#models/person';
|
||||
|
||||
interface Dictionary {
|
||||
[index: string]: string;
|
||||
}
|
||||
|
||||
export function sum(a: number, b: number): number {
|
||||
return a + b;
|
||||
|
@ -78,3 +85,40 @@ export function formatBytes(bytes: number): string {
|
|||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
export async function savePersons(dataset: Dataset, persons: any[], role: string, trx: TransactionClientContract) {
|
||||
for (const [key, person] of persons.entries()) {
|
||||
const pivotData = {
|
||||
role: role,
|
||||
sort_order: key + 1,
|
||||
allow_email_contact: false,
|
||||
...extractPivotAttributes(person), // Merge pivot attributes here
|
||||
};
|
||||
|
||||
if (person.id !== undefined) {
|
||||
await dataset
|
||||
.useTransaction(trx)
|
||||
.related('persons')
|
||||
.attach({
|
||||
[person.id]: pivotData,
|
||||
});
|
||||
} else {
|
||||
const dataPerson = new Person();
|
||||
dataPerson.fill(person);
|
||||
await dataset.useTransaction(trx).related('persons').save(dataPerson, false, pivotData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to extract pivot attributes from a person object
|
||||
function extractPivotAttributes(person: any) {
|
||||
const pivotAttributes: Dictionary = {};
|
||||
for (const key in person) {
|
||||
if (key.startsWith('pivot_')) {
|
||||
// pivotAttributes[key] = person[key];
|
||||
const cleanKey = key.replace('pivot_', ''); // Remove 'pivot_' prefix
|
||||
pivotAttributes[cleanKey] = person[key];
|
||||
}
|
||||
}
|
||||
return pivotAttributes;
|
||||
}
|
||||
|
|
|
@ -314,12 +314,137 @@ export const updateDatasetValidator = vine.compile(
|
|||
}),
|
||||
);
|
||||
|
||||
// files: schema.array([rules.minLength(1)]).members(
|
||||
// schema.file({
|
||||
// size: '512mb',
|
||||
// extnames: ['jpg', 'gif', 'png', 'tif', 'pdf', 'zip', 'fgb', 'nc', 'qml', 'ovr', 'gpkg', 'gml', 'gpx', 'kml', 'kmz', 'json'],
|
||||
// }),
|
||||
// ),
|
||||
export const updateEditorDatasetValidator = vine.compile(
|
||||
vine.object({
|
||||
// first step
|
||||
language: vine
|
||||
.string()
|
||||
.trim()
|
||||
.regex(/^[a-zA-Z0-9]+$/),
|
||||
licenses: vine.array(vine.number()).minLength(1), // define at least one license for the new dataset
|
||||
rights: vine.string().in(['true']),
|
||||
// second step
|
||||
type: vine.string().trim().minLength(3).maxLength(255),
|
||||
creating_corporation: vine.string().trim().minLength(3).maxLength(255),
|
||||
titles: vine
|
||||
.array(
|
||||
vine.object({
|
||||
value: vine.string().trim().minLength(3).maxLength(255),
|
||||
type: vine.enum(Object.values(TitleTypes)),
|
||||
language: vine
|
||||
.string()
|
||||
.trim()
|
||||
.minLength(2)
|
||||
.maxLength(255)
|
||||
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
|
||||
}),
|
||||
)
|
||||
// .minLength(2)
|
||||
.arrayContainsTypes({ typeA: 'main', typeB: 'translated' }),
|
||||
descriptions: vine
|
||||
.array(
|
||||
vine.object({
|
||||
value: vine.string().trim().minLength(3).maxLength(2500),
|
||||
type: vine.enum(Object.values(DescriptionTypes)),
|
||||
language: vine
|
||||
.string()
|
||||
.trim()
|
||||
.minLength(2)
|
||||
.maxLength(255)
|
||||
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
|
||||
}),
|
||||
)
|
||||
.arrayContainsTypes({ typeA: 'abstract', typeB: 'translated' }),
|
||||
authors: vine
|
||||
.array(
|
||||
vine.object({
|
||||
email: vine
|
||||
.string()
|
||||
.trim()
|
||||
.maxLength(255)
|
||||
.email()
|
||||
.normalizeEmail()
|
||||
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
|
||||
first_name: vine.string().trim().minLength(3).maxLength(255),
|
||||
last_name: vine.string().trim().minLength(3).maxLength(255),
|
||||
}),
|
||||
)
|
||||
.minLength(1)
|
||||
.distinct('email'),
|
||||
contributors: vine
|
||||
.array(
|
||||
vine.object({
|
||||
email: vine
|
||||
.string()
|
||||
.trim()
|
||||
.maxLength(255)
|
||||
.email()
|
||||
.normalizeEmail()
|
||||
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
|
||||
first_name: vine.string().trim().minLength(3).maxLength(255),
|
||||
last_name: vine.string().trim().minLength(3).maxLength(255),
|
||||
pivot_contributor_type: vine.enum(Object.keys(ContributorTypes)),
|
||||
}),
|
||||
)
|
||||
.distinct('email')
|
||||
.optional(),
|
||||
// third step
|
||||
project_id: vine.number().optional(),
|
||||
// embargo_date: schema.date.optional({ format: 'yyyy-MM-dd' }, [rules.after(10, 'days')]),
|
||||
embargo_date: vine
|
||||
.date({
|
||||
formats: ['YYYY-MM-DD'],
|
||||
})
|
||||
.afterOrEqual((_field) => {
|
||||
return dayjs().add(10, 'day').format('YYYY-MM-DD');
|
||||
})
|
||||
.optional(),
|
||||
coverage: vine.object({
|
||||
x_min: vine.number(),
|
||||
x_max: vine.number(),
|
||||
y_min: vine.number(),
|
||||
y_max: vine.number(),
|
||||
elevation_absolut: vine.number().positive().optional(),
|
||||
elevation_min: vine.number().positive().optional().requiredIfExists('elevation_max'),
|
||||
elevation_max: vine.number().positive().optional().requiredIfExists('elevation_min'),
|
||||
// type: vine.enum(Object.values(DescriptionTypes)),
|
||||
depth_absolut: vine.number().negative().optional(),
|
||||
depth_min: vine.number().negative().optional().requiredIfExists('depth_max'),
|
||||
depth_max: vine.number().negative().optional().requiredIfExists('depth_min'),
|
||||
time_abolute: vine.date({ formats: { utc: true } }).optional(),
|
||||
time_min: vine
|
||||
.date({ formats: { utc: true } })
|
||||
.beforeField('time_max')
|
||||
.optional()
|
||||
.requiredIfExists('time_max'),
|
||||
time_max: vine
|
||||
.date({ formats: { utc: true } })
|
||||
.afterField('time_min')
|
||||
.optional()
|
||||
.requiredIfExists('time_min'),
|
||||
}),
|
||||
references: vine
|
||||
.array(
|
||||
vine.object({
|
||||
value: vine.string().trim().minLength(3).maxLength(255).validateReference({ typeField: 'type' }),
|
||||
type: vine.enum(Object.values(ReferenceIdentifierTypes)),
|
||||
relation: vine.enum(Object.values(RelationTypes)),
|
||||
label: vine.string().trim().minLength(2).maxLength(255),
|
||||
}),
|
||||
)
|
||||
.optional(),
|
||||
subjects: vine
|
||||
.array(
|
||||
vine.object({
|
||||
value: vine.string().trim().minLength(3).maxLength(255),
|
||||
// pivot_contributor_type: vine.enum(Object.keys(ContributorTypes)),
|
||||
language: vine.string().trim().minLength(2).maxLength(255),
|
||||
}),
|
||||
)
|
||||
.minLength(3)
|
||||
.distinct('value'),
|
||||
}),
|
||||
);
|
||||
|
||||
let messagesProvider = new SimpleMessagesProvider({
|
||||
'minLength': '{{ field }} must be at least {{ min }} characters long',
|
||||
|
|
Loading…
Add table
editor.link_modal.header
Reference in a new issue