hot-fix: Add ORCID validation and improve dataset editing UX
### Major Features - Add comprehensive ORCID validation with checksum verification - Implement unsaved changes detection and auto-save functionality - Enhanced form component reactivity and state management ### ORCID Implementation - Create custom VineJS ORCID validation rule with MOD-11-2 algorithm - Add ORCID fields to Person model and TablePersons component - Update dataset validators to include ORCID validation - Add descriptive placeholder text for ORCID input fields ### UI/UX Improvements - Add UnsavedChangesWarning component with detailed change tracking - Improve FormCheckRadio and FormCheckRadioGroup reactivity - Enhanced BaseButton with proper disabled state handling - Better error handling and user feedback in file validation ### Data Management - Implement sophisticated change detection for all dataset fields - Add proper handling of array ordering for authors/contributors - Improve license selection with better state management - Enhanced subject/keyword processing with duplicate detection ### Technical Improvements - Optimize search indexing with conditional updates based on modification dates - Update person model column mapping for ORCID - Improve validation error messages and user guidance - Better handling of file uploads and deletion tracking ### Dependencies - Update various npm packages (AWS SDK, Babel, Vite, etc.) - Add baseline-browser-mapping for better browser compatibility ### Bug Fixes - Fix form reactivity issues with checkbox/radio groups - Improve error handling in file validation rules - Better handling of edge cases in change detection
This commit is contained in:
parent
06ed2f3625
commit
8f67839f93
16 changed files with 2657 additions and 1168 deletions
|
@ -12,10 +12,8 @@ import { getDomain } from '#app/utils/utility-functions';
|
|||
import { BaseCommand, flags } from '@adonisjs/core/ace';
|
||||
import { CommandOptions } from '@adonisjs/core/types/ace';
|
||||
import env from '#start/env';
|
||||
// import db from '@adonisjs/lucid/services/db';
|
||||
// import { default as Dataset } from '#models/dataset';
|
||||
import logger from '@adonisjs/core/services/logger';
|
||||
|
||||
import { DateTime } from 'luxon';
|
||||
|
||||
const opensearchNode = env.get('OPENSEARCH_HOST', 'localhost');
|
||||
const client = new Client({ node: `${opensearchNode}` }); // replace with your OpenSearch endpoint
|
||||
|
@ -30,11 +28,10 @@ export default class IndexDatasets extends BaseCommand {
|
|||
public publish_id: number;
|
||||
|
||||
public static options: CommandOptions = {
|
||||
startApp: true,
|
||||
staysAlive: false,
|
||||
startApp: true, // Ensures the IoC container is ready to use
|
||||
staysAlive: false, // Command exits after running
|
||||
};
|
||||
|
||||
|
||||
async run() {
|
||||
logger.debug('Hello world!');
|
||||
// const { default: Dataset } = await import('#models/dataset');
|
||||
|
@ -44,10 +41,12 @@ export default class IndexDatasets extends BaseCommand {
|
|||
const index_name = 'tethys-records';
|
||||
|
||||
for (var dataset of datasets) {
|
||||
// Logger.info(`File publish_id ${dataset.publish_id}`);
|
||||
// const jsonString = await this.getJsonString(dataset, proc);
|
||||
// console.log(jsonString);
|
||||
await this.indexDocument(dataset, index_name, proc);
|
||||
const shouldUpdate = await this.shouldUpdateDataset(dataset, index_name);
|
||||
if (shouldUpdate) {
|
||||
await this.indexDocument(dataset, index_name, proc);
|
||||
} else {
|
||||
logger.info(`Dataset with publish_id ${dataset.publish_id} is up to date, skipping indexing`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -65,6 +64,46 @@ export default class IndexDatasets extends BaseCommand {
|
|||
return await query.exec();
|
||||
}
|
||||
|
||||
private async shouldUpdateDataset(dataset: Dataset, index_name: string): Promise<boolean> {
|
||||
try {
|
||||
// Check if publish_id exists before proceeding
|
||||
if (!dataset.publish_id) {
|
||||
// Return true to update since document doesn't exist in OpenSearch yet
|
||||
return true;
|
||||
}
|
||||
// Get the existing document from OpenSearch
|
||||
const response = await client.get({
|
||||
index: index_name,
|
||||
id: dataset.publish_id?.toString(),
|
||||
});
|
||||
|
||||
const existingDoc = response.body._source;
|
||||
|
||||
// Compare server_date_modified
|
||||
if (existingDoc && existingDoc.server_date_modified) {
|
||||
// Convert Unix timestamp (seconds) to milliseconds for DateTime.fromMillis()
|
||||
const existingModified = DateTime.fromMillis(Number(existingDoc.server_date_modified) * 1000);
|
||||
const currentModified = dataset.server_date_modified;
|
||||
|
||||
// Only update if the dataset has been modified more recently
|
||||
if (currentModified <= existingModified) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
// If document doesn't exist or other error, we should index it
|
||||
if (error.statusCode === 404) {
|
||||
logger.info(`Dataset with publish_id ${dataset.publish_id} not found in index, will create new document`);
|
||||
return true;
|
||||
}
|
||||
|
||||
logger.warn(`Error checking existing document for publish_id ${dataset.publish_id}: ${error.message}`);
|
||||
return true; // Index anyway if we can't determine the status
|
||||
}
|
||||
}
|
||||
|
||||
private async indexDocument(dataset: Dataset, index_name: string, proc: Buffer): Promise<void> {
|
||||
try {
|
||||
const doc = await this.getJsonString(dataset, proc);
|
||||
|
@ -78,7 +117,8 @@ export default class IndexDatasets extends BaseCommand {
|
|||
});
|
||||
logger.info(`dataset with publish_id ${dataset.publish_id} successfully indexed`);
|
||||
} catch (error) {
|
||||
logger.error(`An error occurred while indexing dataset with publish_id ${dataset.publish_id}.`);
|
||||
logger.error(`An error occurred while indexing dataset with publish_id ${dataset.publish_id}.
|
||||
Error: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
editor.link_modal.header
Reference in a new issue