hotfix: enhance editor dataset management and UI improvements
- Implemented dataset editing functionality for editor roles, including fetching, updating, and categorizing datasets. - Added routes and controller actions for editing, updating, and categorizing datasets within the editor interface. - Integrated UI components for managing dataset metadata, subjects, references, and files. - Enhanced keyword management with features for adding, editing, and deleting keywords, including handling keywords used by multiple datasets. - Improved reference management with features for adding, editing, and deleting dataset references. - Added validation for dataset updates using the `updateEditorDatasetValidator`. - Updated the dataset edit form to include components for managing titles, descriptions, authors, contributors, licenses, coverage, subjects, references, and files. - Implemented transaction management for dataset updates to ensure data consistency. - Added a download route for files associated with datasets. - Improved the UI for displaying and interacting with datasets in the editor index view, including adding edit and categorize buttons. - Fixed an issue where the file size was not correctly calculated. - Added a tooltip to the keyword value column in the TableKeywords component to explain the editability of keywords. - Added a section to display keywords that are marked for deletion. - Added a section to display references that are marked for deletion. - Added a restore button to the references to delete section to restore references. - Updated the SearchCategoryAutocomplete component to support read-only mode. - Updated the FormControl component to support read-only mode. - Added icons and styling improvements to various components. - Added a default value for subjectsToDelete and referencesToDelete in the dataset model. - Updated the FooterBar component to use the JustboilLogo component. - Updated the app.ts file to fetch chart data without a year parameter. - Updated the Login.vue file to invert the logo in dark mode. - Updated the AccountInfo.vue file to add a Head component.
This commit is contained in:
parent
10d159a57a
commit
f04c1f6327
30 changed files with 2284 additions and 539 deletions
|
@ -29,12 +29,8 @@ import {
|
|||
} from '#contracts/enums';
|
||||
import { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model';
|
||||
import DatasetReference from '#models/dataset_reference';
|
||||
import { cuid } from '@adonisjs/core/helpers';
|
||||
import File from '#models/file';
|
||||
import ClamScan from 'clamscan';
|
||||
// import { ValidationException } from '@adonisjs/validator';
|
||||
// import Drive from '@ioc:Adonis/Core/Drive';
|
||||
// import drive from '#services/drive';
|
||||
import drive from '@adonisjs/drive/services/main';
|
||||
import path from 'path';
|
||||
import { Exception } from '@adonisjs/core/exceptions';
|
||||
|
@ -945,10 +941,9 @@ export default class DatasetController {
|
|||
// session.flash('errors', 'Invalid server state!');
|
||||
return response
|
||||
.flash(
|
||||
'warning',
|
||||
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
|
||||
'warning',
|
||||
)
|
||||
.redirect()
|
||||
.toRoute('dataset.list');
|
||||
}
|
||||
|
||||
|
@ -1020,7 +1015,11 @@ export default class DatasetController {
|
|||
const dataset = await Dataset.findOrFail(datasetId);
|
||||
await dataset.load('files');
|
||||
// Accumulate the size of the already related files
|
||||
const preExistingFileSize = dataset.files.reduce((acc, file) => acc + file.fileSize, 0);
|
||||
// const preExistingFileSize = dataset.files.reduce((acc, file) => acc + file.fileSize, 0);
|
||||
let preExistingFileSize = 0;
|
||||
for (const file of dataset.files) {
|
||||
preExistingFileSize += Number(file.fileSize);
|
||||
}
|
||||
|
||||
const uploadedTmpFiles: string[] = [];
|
||||
// Only process multipart if the request has a multipart content type
|
||||
|
@ -1150,22 +1149,97 @@ export default class DatasetController {
|
|||
}
|
||||
}
|
||||
|
||||
// await dataset.useTransaction(trx).related('subjects').sync([]);
|
||||
const keywords = request.input('subjects');
|
||||
for (const keywordData of keywords) {
|
||||
if (keywordData.id) {
|
||||
const subject = await Subject.findOrFail(keywordData.id);
|
||||
// await dataset.useTransaction(trx).related('subjects').attach([keywordData.id]);
|
||||
subject.value = keywordData.value;
|
||||
subject.type = keywordData.type;
|
||||
subject.external_key = keywordData.external_key;
|
||||
if (subject.$isDirty) {
|
||||
await subject.save();
|
||||
// Process all subjects/keywords from the request
|
||||
const subjects = request.input('subjects');
|
||||
for (const subjectData of subjects) {
|
||||
// Case 1: Subject already exists in the database (has an ID)
|
||||
if (subjectData.id) {
|
||||
// Retrieve the existing subject
|
||||
const existingSubject = await Subject.findOrFail(subjectData.id);
|
||||
|
||||
// Update subject properties from the request data
|
||||
existingSubject.value = subjectData.value;
|
||||
existingSubject.type = subjectData.type;
|
||||
existingSubject.external_key = subjectData.external_key;
|
||||
|
||||
// Only save if there are actual changes
|
||||
if (existingSubject.$isDirty) {
|
||||
await existingSubject.save();
|
||||
}
|
||||
|
||||
// Note: The relationship between dataset and subject is already established,
|
||||
// so we don't need to attach it again
|
||||
}
|
||||
// Case 2: New subject being added (no ID)
|
||||
else {
|
||||
// Check if a subject with the same value and type already exists in the database
|
||||
const subject = await Subject.firstOrNew({ value: subjectData.value, type: subjectData.type }, subjectData);
|
||||
|
||||
if (subject.$isNew === true) {
|
||||
// If it's a completely new subject, create and associate it with the dataset
|
||||
await dataset.useTransaction(trx).related('subjects').save(subject);
|
||||
} else {
|
||||
// If the subject already exists, just create the relationship
|
||||
await dataset.useTransaction(trx).related('subjects').attach([subject.id]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const subjectsToDelete = request.input('subjectsToDelete', []);
|
||||
for (const subjectData of subjectsToDelete) {
|
||||
if (subjectData.id) {
|
||||
// const subject = await Subject.findOrFail(subjectData.id);
|
||||
const subject = await Subject.query()
|
||||
.where('id', subjectData.id)
|
||||
.preload('datasets', (builder) => {
|
||||
builder.orderBy('id', 'asc');
|
||||
})
|
||||
.withCount('datasets')
|
||||
.firstOrFail();
|
||||
|
||||
// Check if the subject is used by multiple datasets
|
||||
if (subject.$extras.datasets_count > 1) {
|
||||
// If used by multiple datasets, just detach it from the current dataset
|
||||
await dataset.useTransaction(trx).related('subjects').detach([subject.id]);
|
||||
} else {
|
||||
// If only used by this dataset, delete the subject completely
|
||||
|
||||
await dataset.useTransaction(trx).related('subjects').detach([subject.id]);
|
||||
await subject.useTransaction(trx).delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process references
|
||||
const references = request.input('references', []);
|
||||
// First, get existing references to determine which ones to update vs. create
|
||||
const existingReferences = await dataset.related('references').query();
|
||||
const existingReferencesMap: Map<number, DatasetReference> = new Map(existingReferences.map((ref) => [ref.id, ref]));
|
||||
|
||||
for (const referenceData of references) {
|
||||
if (existingReferencesMap.has(referenceData.id) && referenceData.id) {
|
||||
// Update existing reference
|
||||
const reference = existingReferencesMap.get(referenceData.id);
|
||||
if (reference) {
|
||||
reference.merge(referenceData);
|
||||
if (reference.$isDirty) {
|
||||
await reference.useTransaction(trx).save();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const keyword = new Subject();
|
||||
keyword.fill(keywordData);
|
||||
await dataset.useTransaction(trx).related('subjects').save(keyword, false);
|
||||
// Create new reference
|
||||
const dataReference = new DatasetReference();
|
||||
dataReference.fill(referenceData);
|
||||
await dataset.useTransaction(trx).related('references').save(dataReference);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle references to delete if provided
|
||||
const referencesToDelete = request.input('referencesToDelete', []);
|
||||
for (const referenceData of referencesToDelete) {
|
||||
if (referenceData.id) {
|
||||
const reference = await DatasetReference.findOrFail(referenceData.id);
|
||||
await reference.useTransaction(trx).delete();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1269,7 +1343,7 @@ export default class DatasetController {
|
|||
await dataset.useTransaction(trx).save();
|
||||
|
||||
await trx.commit();
|
||||
console.log('Dataset and related models created successfully');
|
||||
console.log('Dataset has been updated successfully');
|
||||
|
||||
session.flash('message', 'Dataset has been updated successfully');
|
||||
// return response.redirect().toRoute('user.index');
|
||||
|
|
Loading…
Add table
editor.link_modal.header
Reference in a new issue