Compare commits

..

No commits in common. "master" and "master" have entirely different histories.

216 changed files with 14399 additions and 25382 deletions

View file

@ -17,6 +17,4 @@ REDIS_PORT=6379
REDIS_PASSWORD= REDIS_PASSWORD=
SMTP_HOST= SMTP_HOST=
SMTP_PORT= SMTP_PORT=
RESEND_API_KEY= RESEND_API_KEY=
OPENSEARCH_HOST=http://localhost
OPENSEARCH_CORE=tethys-records

View file

@ -13,7 +13,7 @@ jobs:
uses: actions/checkout@v3 uses: actions/checkout@v3
- run: echo "The ${{ github.repository }} repository has been cloned to the runner." - run: echo "The ${{ github.repository }} repository has been cloned to the runner."
- run: echo "The workflow is now ready to test your code on the runner." - run: echo "The workflow is now ready to test your code on the runner."
- name: List files in the repository - name: List files in the repository:
run: | run: |
ls ${{ github.workspace }} ls ${{ github.workspace }}
- run: echo "This job's status is ${{ job.status }}." - run: echo "This job's status is ${{ job.status }}."

View file

@ -1,78 +0,0 @@
# This is a Gitea Actions workflow configuration file for running CI tests on the `feat/checkReferenceType` branch.
# The workflow is named "CI" and runs on the latest Ubuntu environment using a Node.js 20 Docker container.
# It sets up a PostgreSQL service with specified environment variables and health checks.
# The workflow includes the following steps:
# 1. Checkout the repository using the actions/checkout@v3 action.
# 2. Install Node.js dependencies using `npm ci`.
# 3. Create a `.env.test` file by copying from `.env.example`.
# 4. Set up environment variables in the `.env.test` file, including database connection details and other app-specific settings.
# 5. Run functional tests using the `node ace test functional --groups "ReferenceValidation"` command.
name: CI
run-name: Running tests for checkReferenceType branch
on:
push:
branches:
- feat/checkReferenceType
jobs:
container-job:
runs-on: ubuntu-latest
# Docker Hub image that `container-job` executes in
container: node:20-bullseye
services:
# Label used to access the service container
postgres:
image: postgres:latest
env:
POSTGRES_USER: alice
POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
POSTGRES_DB: tethys_dev
# ports:
# - 5432:5432
options: |
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout repository
uses: actions/checkout@v3
# - name: Set up Node.js
# uses: actions/setup-node@v2
# with:
# node-version: '20'
- name: Install dependencies
run: npm ci
- name: Create .env.test file
run: cp .env.example .env.test
- name: Set up environment variables
run: |
echo "DB_CONNECTION=pg" >> .env.test
echo "PG_HOST=postgres" >> .env.test
echo "PG_PORT=5432" >> .env.test
echo "PG_USER=alice" >> .env.test
echo "PG_PASSWORD=${{ secrets.POSTGRES_PASSWORD }}" >> .env.test
echo "PG_DB_NAME=tethys_dev" >> .env.test
echo "NODE_ENV=test" >> .env.test
echo "ASSETS_DRIVER=fake" >> .env.test
echo "SESSION_DRIVER=memory" >> .env.test
echo "HASH_DRIVER=bcrypt" >> .env.test
echo "HOST=127.0.0.1" >> .env.test
echo "PORT=3333" >> .env.test
echo "APP_NAME=TethysCloud" >> .env.test
echo "APP_URL=http://${HOST}:${PORT}" >> .env.test
echo "CACHE_VIEWS=false" >> .env.test
echo "APP_KEY=pfi5N2ACN4tMJ5d8d8BPHfh3FEuvleej" >> .env.test
echo "DRIVE_DISK=local" >> .env.test
echo "OAI_LIST_SIZE=200" >> .env.test
echo "OPENSEARCH_HOST=${{ secrets.OPENSEARCH_HOST }}" >> .env.test
echo "OPENSEARCH_CORE=tethys-records" >> .env.test
- name: Run tests
run: node ace test functional --groups "ReferenceValidation"

View file

@ -4,13 +4,7 @@
name: CI Pipeline name: CI Pipeline
run-name: ${{ github.actor }} is running CI pipeline run-name: ${{ github.actor }} is running CI pipeline
# trigger build when pushing, or when creating a pull request # trigger build when pushing, or when creating a pull request
on: on: [push, pull_request]
push:
branches:
- master
pull_request:
branches:
- master
jobs: jobs:
# Label of the container job # Label of the container job
@ -18,7 +12,7 @@ jobs:
# run build on latest ubuntu # run build on latest ubuntu
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: node:20-bullseye container: node:18-bullseye
services: services:
mydb: mydb:
@ -76,7 +70,6 @@ jobs:
&& echo "CACHE_VIEWS=false" >> .env.test && echo "CACHE_VIEWS=false" >> .env.test
&& echo "APP_KEY=pfi5N2ACN4tMJ5d8d8BPHfh3FEuvleej" >> .env.test && echo "APP_KEY=pfi5N2ACN4tMJ5d8d8BPHfh3FEuvleej" >> .env.test
&& echo "DRIVE_DISK=local" >> .env.test && echo "DRIVE_DISK=local" >> .env.test
&& echo "OAI_LIST_SIZE=200" >> .env.test
# finally run the tests # finally run the tests
# - run: npm test # - run: npm test
@ -102,4 +95,3 @@ jobs:
# uses: coverallsapp/github-action@master # uses: coverallsapp/github-action@master
# with: # with:
# github-token: ${{ secrets.GITHUB_TOKEN }} # github-token: ${{ secrets.GITHUB_TOKEN }}

1
.gitignore vendored
View file

@ -7,4 +7,3 @@ coverage
tmp tmp
docker-compose.yml docker-compose.yml
.env.test .env.test
public/assets

View file

@ -1,63 +1,57 @@
################## First Stage - Creating base ######################### ################## First Stage - Creating base #########################
# Created a variable to hold our node base image # Created a variable to hold our node base image
ARG NODE_IMAGE=node:22-trixie-slim ARG NODE_IMAGE=node:20-bookworm-slim
FROM $NODE_IMAGE AS base FROM $NODE_IMAGE AS base
# Install dumb-init and ClamAV, and perform ClamAV database update # Install dumb-init and ClamAV, and perform ClamAV database update
RUN apt-get update \ RUN apt update \
&& apt-get install -y --no-install-recommends \ && apt-get install -y dumb-init clamav clamav-daemon nano \
dumb-init \
clamav \
clamav-daemon \
clamdscan \
ca-certificates \
&& rm -rf /var/lib/apt/lists/* \ && rm -rf /var/lib/apt/lists/* \
# Creating folders and changing ownerships # Creating folders and changing ownerships
&& mkdir -p /home/node/app \ && mkdir -p /home/node/app && chown node:node /home/node/app \
&& mkdir -p /var/lib/clamav \ && mkdir -p /var/lib/clamav \
&& mkdir /usr/local/share/clamav \ && mkdir /usr/local/share/clamav \
&& chown -R node:clamav /var/lib/clamav /usr/local/share/clamav /etc/clamav \
# permissions
&& mkdir /var/run/clamav \ && mkdir /var/run/clamav \
&& mkdir -p /var/log/clamav \ && chown node:clamav /var/run/clamav \
&& mkdir -p /tmp/clamav-logs \ && chmod 750 /var/run/clamav
# -----------------------------------------------
# Set ownership and permissions # --- ClamAV & FeshClam -------------------------
&& chown node:node /home/node/app \ # -----------------------------------------------
# && chown -R node:clamav /var/lib/clamav /usr/local/share/clamav /etc/clamav /var/run/clamav \ # RUN \
&& chown -R node:clamav /var/lib/clamav /usr/local/share/clamav /etc/clamav /var/run/clamav /var/log/clamav \ # chmod 644 /etc/clamav/freshclam.conf && \
&& chown -R node:clamav /etc/clamav \ # freshclam && \
&& chmod 755 /tmp/clamav-logs \ # mkdir /var/run/clamav && \
&& chmod 750 /var/run/clamav \ # chown -R clamav:root /var/run/clamav
&& chmod 755 /var/lib/clamav \
&& chmod 755 /var/log/clamav \
# Add node user to clamav group and allow sudo for clamav commands
&& usermod -a -G clamav node
# && chmod 666 /var/run/clamav/clamd.socket
# Make directories group-writable so node (as member of clamav group) can access them
# && chmod 750 /var/run/clamav /var/lib/clamav /var/log/clamav /tmp/clamav-logs
# # initial update of av databases
# RUN freshclam
# Configure ClamAV - copy config files before switching user # Configure Clam AV...
# COPY --chown=node:clamav ./*.conf /etc/clamav/
COPY --chown=node:clamav ./*.conf /etc/clamav/ COPY --chown=node:clamav ./*.conf /etc/clamav/
# # permissions
# RUN mkdir /var/run/clamav && \
# chown node:clamav /var/run/clamav && \
# chmod 750 /var/run/clamav
# Setting the working directory # Setting the working directory
WORKDIR /home/node/app WORKDIR /home/node/app
# Changing the current active user to "node" # Changing the current active user to "node"
# Download initial ClamAV database as root before switching users
USER node USER node
RUN freshclam --quiet || echo "Initial database download failed - will retry at runtime"
# Copy entrypoint script # initial update of av databases
RUN freshclam
# VOLUME /var/lib/clamav
COPY --chown=node:clamav docker-entrypoint.sh /home/node/app/docker-entrypoint.sh COPY --chown=node:clamav docker-entrypoint.sh /home/node/app/docker-entrypoint.sh
RUN chmod +x /home/node/app/docker-entrypoint.sh RUN chmod +x /home/node/app/docker-entrypoint.sh
ENV TZ="Europe/Vienna" ENV TZ="Europe/Vienna"
################## Second Stage - Installing dependencies ########## ################## Second Stage - Installing dependencies ##########
# In this stage, we will start installing dependencies # In this stage, we will start installing dependencies
FROM base AS dependencies FROM base AS dependencies
@ -76,6 +70,7 @@ ENV NODE_ENV=production
# We run "node ace build" to build the app (dist folder) for production # We run "node ace build" to build the app (dist folder) for production
RUN node ace build --ignore-ts-errors RUN node ace build --ignore-ts-errors
# RUN node ace build --production # RUN node ace build --production
# RUN node ace build --ignore-ts-errors
################## Final Stage - Production ######################### ################## Final Stage - Production #########################
@ -93,7 +88,6 @@ RUN npm ci --omit=dev
# Copy files to the working directory from the build folder the user # Copy files to the working directory from the build folder the user
COPY --chown=node:node --from=build /home/node/app/build . COPY --chown=node:node --from=build /home/node/app/build .
# Expose port # Expose port
# EXPOSE 3310
EXPOSE 3333 EXPOSE 3333
ENTRYPOINT ["/home/node/app/docker-entrypoint.sh"] ENTRYPOINT ["/home/node/app/docker-entrypoint.sh"]
# Run the command to start the server using "dumb-init" # Run the command to start the server using "dumb-init"

22
LICENSE
View file

@ -1,22 +0,0 @@
MIT License
Copyright (c) 2025 Tethys Research Repository
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE

7
ace.js
View file

@ -15,11 +15,10 @@
/** /**
* Register hook to process TypeScript files using ts-node * Register hook to process TypeScript files using ts-node
*/ */
// import { register } from 'node:module'; import { register } from 'node:module'
// register('ts-node/esm', import.meta.url); register('ts-node/esm', import.meta.url)
import 'ts-node-maintained/register/esm';
/** /**
* Import ace console entrypoint * Import ace console entrypoint
*/ */
await import('./bin/console.js'); await import('./bin/console.js')

View file

@ -1,7 +1,7 @@
import { defineConfig } from '@adonisjs/core/app'; import { defineConfig } from '@adonisjs/core/app'
export default defineConfig({ export default defineConfig({
/* /*
|-------------------------------------------------------------------------- |--------------------------------------------------------------------------
| Commands | Commands
|-------------------------------------------------------------------------- |--------------------------------------------------------------------------
@ -10,12 +10,12 @@ export default defineConfig({
| will be scanned automatically from the "./commands" directory. | will be scanned automatically from the "./commands" directory.
*/ */
commands: [ commands: [
() => import('@adonisjs/core/commands'), () => import('@adonisjs/core/commands'),
() => import('@adonisjs/lucid/commands'), () => import('@adonisjs/lucid/commands'),
() => import('@adonisjs/mail/commands') () => import('@adonisjs/mail/commands')
], ],
/* /*
|-------------------------------------------------------------------------- |--------------------------------------------------------------------------
| Preloads | Preloads
|-------------------------------------------------------------------------- |--------------------------------------------------------------------------
@ -23,23 +23,19 @@ export default defineConfig({
| List of modules to import before starting the application. | List of modules to import before starting the application.
| |
*/ */
preloads: [ preloads: [
() => import('./start/routes.js'), () => import('./start/routes.js'),
() => import('./start/kernel.js'), () => import('./start/kernel.js'),
() => import('#start/validator'), () => import('#start/validator'),
// () => import('#start/rules/unique'), () => import('#start/rules/unique'),
// () => import('#start/rules/translated_language'), () => import('#start/rules/translated_language'),
// () => import('#start/rules/unique_person'), () => import('#start/rules/unique_person'),
// // () => import('#start/rules/file_length'), () => import('#start/rules/file_length'),
// // () => import('#start/rules/file_scan'), () => import('#start/rules/file_scan'),
// // () => import('#start/rules/allowed_extensions_mimetypes'), () => import('#start/rules/allowed_extensions_mimetypes'),
// () => import('#start/rules/dependent_array_min_length'), () => import('#start/rules/dependent_array_min_length')
// () => import('#start/rules/referenceValidation'), ],
// () => import('#start/rules/valid_mimetype'), /*
// () => import('#start/rules/array_contains_types'),
// () => import('#start/rules/orcid'),
],
/*
|-------------------------------------------------------------------------- |--------------------------------------------------------------------------
| Service providers | Service providers
|-------------------------------------------------------------------------- |--------------------------------------------------------------------------
@ -48,49 +44,48 @@ export default defineConfig({
| application | application
| |
*/ */
providers: [ providers: [
// () => import('./providers/AppProvider.js'), // () => import('./providers/AppProvider.js'),
() => import('@adonisjs/core/providers/app_provider'), () => import('@adonisjs/core/providers/app_provider'),
() => import('@adonisjs/core/providers/hash_provider'), () => import('@adonisjs/core/providers/hash_provider'),
{ {
file: () => import('@adonisjs/core/providers/repl_provider'), file: () => import('@adonisjs/core/providers/repl_provider'),
environment: ['repl', 'test'], environment: ['repl', 'test'],
}, },
() => import('@adonisjs/session/session_provider'), () => import('@adonisjs/session/session_provider'),
() => import('@adonisjs/core/providers/edge_provider'), () => import('@adonisjs/core/providers/edge_provider'),
() => import('@adonisjs/shield/shield_provider'), () => import('@adonisjs/shield/shield_provider'),
// () => import('@eidellev/inertia-adonisjs'), // () => import('@eidellev/inertia-adonisjs'),
// () => import('@adonisjs/inertia/inertia_provider'), // () => import('@adonisjs/inertia/inertia_provider'),
() => import('#providers/app_provider'), () => import('#providers/app_provider'),
() => import('#providers/inertia_provider'), () => import('#providers/inertia_provider'),
() => import('@adonisjs/lucid/database_provider'), () => import('@adonisjs/lucid/database_provider'),
() => import('@adonisjs/auth/auth_provider'), () => import('@adonisjs/auth/auth_provider'),
// () => import('@eidellev/adonis-stardust'), // () => import('@eidellev/adonis-stardust'),
() => import('@adonisjs/redis/redis_provider'), () => import('@adonisjs/redis/redis_provider'),
// () => import('@adonisjs/encore/encore_provider'), () => import('@adonisjs/encore/encore_provider'),
() => import('@adonisjs/static/static_provider'), () => import('@adonisjs/static/static_provider'),
() => import('#providers/stardust_provider'), () => import('#providers/stardust_provider'),
() => import('#providers/query_builder_provider'), () => import('#providers/query_builder_provider'),
() => import('#providers/token_worker_provider'), () => import('#providers/token_worker_provider'),
() => import('#providers/rule_provider'), // () => import('#providers/validator_provider'),
// () => import('#providers/drive/provider/drive_provider'), () => import('#providers/drive/provider/drive_provider'),
() => import('@adonisjs/drive/drive_provider'), // () => import('@adonisjs/core/providers/vinejs_provider'),
// () => import('@adonisjs/core/providers/vinejs_provider'), () => import('#providers/vinejs_provider'),
() => import('#providers/vinejs_provider'), () => import('@adonisjs/mail/mail_provider')
() => import('@adonisjs/mail/mail_provider'), // () => import('#providers/mail_provider'),
() => import('@adonisjs/vite/vite_provider'), ],
], metaFiles: [
metaFiles: [ {
{ pattern: 'public/**',
pattern: 'public/**', reloadServer: false,
reloadServer: false, },
}, {
{ pattern: 'resources/views/**/*.edge',
pattern: 'resources/views/**/*.edge', reloadServer: false,
reloadServer: false, },
}, ],
], /*
/*
|-------------------------------------------------------------------------- |--------------------------------------------------------------------------
| Tests | Tests
|-------------------------------------------------------------------------- |--------------------------------------------------------------------------
@ -99,24 +94,22 @@ export default defineConfig({
| and add additional suites. | and add additional suites.
| |
*/ */
tests: { tests: {
suites: [ suites: [
{ {
files: ['tests/unit/**/*.spec(.ts|.js)'], files: ['tests/unit/**/*.spec(.ts|.js)'],
name: 'unit', name: 'unit',
timeout: 2000, timeout: 2000,
}, },
{ {
files: ['tests/functional/**/*.spec(.ts|.js)'], files: ['tests/functional/**/*.spec(.ts|.js)'],
name: 'functional', name: 'functional',
timeout: 30000, timeout: 30000,
}, },
], ],
forceExit: false, forceExit: false,
}, },
assetsBundler: false,
hooks: {
onBuildStarting: [() => import('@adonisjs/vite/build_hook')],
}, })
// assetsBundler: false
});

View file

@ -85,9 +85,7 @@ export default class AdminuserController {
// return response.badRequest(error.messages); // return response.badRequest(error.messages);
throw error; throw error;
} }
const input = request.only(['login', 'email', 'password', 'first_name', 'last_name']);
const input: Record<string, any> = request.only(['login', 'email','first_name', 'last_name']);
input.password = request.input('new_password');
const user = await User.create(input); const user = await User.create(input);
if (request.input('roles')) { if (request.input('roles')) {
const roles: Array<number> = request.input('roles'); const roles: Array<number> = request.input('roles');
@ -97,6 +95,7 @@ export default class AdminuserController {
session.flash('message', 'User has been created successfully'); session.flash('message', 'User has been created successfully');
return response.redirect().toRoute('settings.user.index'); return response.redirect().toRoute('settings.user.index');
} }
public async show({ request, inertia }: HttpContext) { public async show({ request, inertia }: HttpContext) {
const id = request.param('id'); const id = request.param('id');
const user = await User.query().where('id', id).firstOrFail(); const user = await User.query().where('id', id).firstOrFail();
@ -140,11 +139,9 @@ export default class AdminuserController {
}); });
// password is optional // password is optional
let input: Record<string, any>; let input;
if (request.input('password')) {
if (request.input('new_password')) { input = request.only(['login', 'email', 'password', 'first_name', 'last_name']);
input = request.only(['login', 'email', 'first_name', 'last_name']);
input.password = request.input('new_password');
} else { } else {
input = request.only(['login', 'email', 'first_name', 'last_name']); input = request.only(['login', 'email', 'first_name', 'last_name']);
} }
@ -159,6 +156,7 @@ export default class AdminuserController {
session.flash('message', 'User has been updated successfully'); session.flash('message', 'User has been updated successfully');
return response.redirect().toRoute('settings.user.index'); return response.redirect().toRoute('settings.user.index');
} }
public async destroy({ request, response, session }: HttpContext) { public async destroy({ request, response, session }: HttpContext) {
const id = request.param('id'); const id = request.param('id');
const user = await User.findOrFail(id); const user = await User.findOrFail(id);

View file

@ -25,7 +25,6 @@ export default class MimetypeController {
const newDatasetSchema = vine.object({ const newDatasetSchema = vine.object({
name: vine.string().trim().isUnique({ table: 'mime_types', column: 'name' }), name: vine.string().trim().isUnique({ table: 'mime_types', column: 'name' }),
file_extension: vine.array(vine.string()).minLength(1), // define at least one extension for the new mimetype file_extension: vine.array(vine.string()).minLength(1), // define at least one extension for the new mimetype
alternate_mimetype: vine.array(vine.string().isValidMimetype()).distinct().optional(), // define alias mimetypes
enabled: vine.boolean(), enabled: vine.boolean(),
}); });
// await request.validate({ schema: newDatasetSchema, messages: this.messages }); // await request.validate({ schema: newDatasetSchema, messages: this.messages });
@ -33,22 +32,18 @@ export default class MimetypeController {
// Step 2 - Validate request body against the schema // Step 2 - Validate request body against the schema
// await request.validate({ schema: newDatasetSchema, messages: this.messages }); // await request.validate({ schema: newDatasetSchema, messages: this.messages });
const validator = vine.compile(newDatasetSchema); const validator = vine.compile(newDatasetSchema);
validator.messagesProvider = new SimpleMessagesProvider(this.messages); validator.messagesProvider = new SimpleMessagesProvider(this.messages);
await request.validateUsing(validator, { messagesProvider: new SimpleMessagesProvider(this.messages) }); await request.validateUsing(validator);
} catch (error) { } catch (error) {
// Step 3 - Handle errors // Step 3 - Handle errors
// return response.badRequest(error.messages); // return response.badRequest(error.messages);
throw error; throw error;
} }
const input = request.only(['name', 'enabled', 'file_extension', 'alternate_mimetype']); const input = request.only(['name', 'enabled', 'file_extension']);
// Concatenate the file_extensions array into a string with '|' as the separator // Concatenate the file_extensions array into a string with '|' as the separator
if (Array.isArray(input.file_extension)) { if (Array.isArray(input.file_extension)) {
input.file_extension = input.file_extension.join('|'); input.file_extension = input.file_extension.join('|');
} }
// Concatenate the alias_mimetype array into a string with '|' as the separator
if (Array.isArray(input.alternate_mimetype)) {
input.alternate_mimetype = input.alternate_mimetype.join('|');
}
await MimeType.create(input); await MimeType.create(input);
// if (request.input('roles')) { // if (request.input('roles')) {
// const roles: Array<number> = request.input('roles'); // const roles: Array<number> = request.input('roles');
@ -64,7 +59,7 @@ export default class MimetypeController {
'maxLength': '{{ field }} must be less then {{ max }} characters long', 'maxLength': '{{ field }} must be less then {{ max }} characters long',
'isUnique': '{{ field }} must be unique, and this value is already taken', 'isUnique': '{{ field }} must be unique, and this value is already taken',
'required': '{{ field }} is required', 'required': '{{ field }} is required',
'file_extension.array.minLength': 'at least {{ min }} mimetypes must be defined', 'file_extension.minLength': 'at least {{ min }} mimetypes must be defined',
'file_extension.*.string': 'Each file extension must be a valid string', // Adjusted to match the type 'file_extension.*.string': 'Each file extension must be a valid string', // Adjusted to match the type
}; };

View file

@ -76,24 +76,23 @@ export default class MailSettingsController {
public async sendTestMail({ response, auth }: HttpContext) { public async sendTestMail({ response, auth }: HttpContext) {
const user = auth.user!; const user = auth.user!;
const userEmail = user.email; const userEmail = user.email;
// let mailManager = await app.container.make('mail.manager'); // let mailManager = await app.container.make('mail.manager');
// let iwas = mailManager.use(); // let iwas = mailManager.use();
// let test = mail.config.mailers.smtp(); // let test = mail.config.mailers.smtp();
if (!userEmail) { if (!userEmail) {
return response.badRequest({ message: 'User email is not set. Please update your profile.' }); return response.badRequest({ message: 'User email is not set. Please update your profile.' });
} }
try { try {
await mail.send( await mail.send((message) => {
(message) => { message
message // .from(Config.get('mail.from.address'))
// .from(Config.get('mail.from.address')) .from('tethys@geosphere.at')
.from('tethys@geosphere.at') .to(userEmail)
.to(userEmail) .subject('Test Email')
.subject('Test Email') .html('<p>If you received this email, the email configuration seems to be correct.</p>');
.html('<p>If you received this email, the email configuration seems to be correct.</p>'); });
});
return response.json({ success: true, message: 'Test email sent successfully' }); return response.json({ success: true, message: 'Test email sent successfully' });
// return response.flash('Test email sent successfully!', 'message').redirect().back(); // return response.flash('Test email sent successfully!', 'message').redirect().back();

View file

@ -4,29 +4,17 @@ import Person from '#models/person';
// node ace make:controller Author // node ace make:controller Author
export default class AuthorsController { export default class AuthorsController {
public async index({}: HttpContext) { public async index({}: HttpContext) {
// select * from gba.persons
// where exists (select * from gba.documents inner join gba.link_documents_persons on "documents"."id" = "link_documents_persons"."document_id"
// where ("link_documents_persons"."role" = 'author') and ("persons"."id" = "link_documents_persons"."person_id"));
const authors = await Person.query() const authors = await Person.query()
.select([ .whereHas('datasets', (dQuery) => {
'id', dQuery.wherePivot('role', 'author');
'academic_title', })
'first_name', .withCount('datasets', (query) => {
'last_name', query.as('datasets_count');
'identifier_orcid', });
'status',
'name_type',
'created_at'
// Note: 'email' is omitted
])
.preload('datasets')
.where('name_type', 'Personal')
.whereHas('datasets', (dQuery) => {
dQuery.wherePivot('role', 'author');
})
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.orderBy('datasets_count', 'desc');
return authors; return authors;
} }
@ -37,10 +25,7 @@ export default class AuthorsController {
if (request.input('filter')) { if (request.input('filter')) {
// users = users.whereRaw('name like %?%', [request.input('search')]) // users = users.whereRaw('name like %?%', [request.input('search')])
const searchTerm = request.input('filter'); const searchTerm = request.input('filter');
authors.andWhere((query) => { authors.whereILike('first_name', `%${searchTerm}%`).orWhereILike('last_name', `%${searchTerm}%`);
query.whereILike('first_name', `%${searchTerm}%`)
.orWhereILike('last_name', `%${searchTerm}%`);
});
// .orWhere('email', 'like', `%${searchTerm}%`); // .orWhere('email', 'like', `%${searchTerm}%`);
} }

View file

@ -1,212 +1,65 @@
import type { HttpContext } from '@adonisjs/core/http'; import type { HttpContext } from '@adonisjs/core/http';
import { StatusCodes } from 'http-status-codes'; import { StatusCodes } from 'http-status-codes';
import redis from '@adonisjs/redis/services/main'; // import * as fs from 'fs';
// import * as path from 'path';
const PREFIXES = ['von', 'van', 'de', 'del', 'della', 'di', 'da', 'dos', 'du', 'le', 'la']; const prefixes = ['von', 'van'];
const DEFAULT_SIZE = 50;
const MIN_SIZE = 16;
const MAX_SIZE = 512;
const FONT_SIZE_RATIO = 0.4;
const COLOR_LIGHTENING_PERCENT = 60;
const COLOR_DARKENING_FACTOR = 0.6;
const CACHE_TTL = 24 * 60 * 60; // 24 hours instead of 1 hour
// node ace make:controller Author
export default class AvatarController { export default class AvatarController {
public async generateAvatar({ request, response }: HttpContext) { public async generateAvatar({ request, response }: HttpContext) {
try { try {
const { name, size = DEFAULT_SIZE } = request.only(['name', 'size']); const { name, background, textColor, size } = request.only(['name', 'background', 'textColor', 'size']);
// Enhanced validation
if (!name || typeof name !== 'string' || name.trim().length === 0) {
return response.status(StatusCodes.BAD_REQUEST).json({
error: 'Name is required and must be a non-empty string',
});
}
const parsedSize = this.validateSize(size);
if (!parsedSize.isValid) {
return response.status(StatusCodes.BAD_REQUEST).json({
error: parsedSize.error,
});
}
// Build a unique cache key for the given name and size
const cacheKey = `avatar:${this.sanitizeName(name)}-${parsedSize.value}`;
// const cacheKey = `avatar:${name.trim().toLowerCase()}-${size}`;
try {
const cachedSvg = await redis.get(cacheKey);
if (cachedSvg) {
this.setResponseHeaders(response);
return response.send(cachedSvg);
}
} catch (redisError) {
// Log redis error but continue without cache
console.warn('Redis cache read failed:', redisError);
}
// Generate initials
// const initials = name
// .split(' ')
// .map((part) => part.charAt(0).toUpperCase())
// .join('');
const initials = this.getInitials(name); const initials = this.getInitials(name);
const colors = this.generateColors(name);
const svgContent = this.createSvg(size, colors, initials);
// // Cache the generated avatar for future use, e.g. 1 hour expiry // Define SVG content with dynamic values for initials, background color, text color, and size
try { const svgContent = `
await redis.setex(cacheKey, CACHE_TTL, svgContent); <svg width="${size || 50}" height="${size || 50}" xmlns="http://www.w3.org/2000/svg">
} catch (redisError) { <rect width="100%" height="100%" fill="#${background || '7F9CF5'}"/>
// Log but don't fail the request <text x="50%" y="50%" dominant-baseline="middle" text-anchor="middle" font-weight="bold" font-family="Arial, sans-serif" font-size="${
console.warn('Redis cache write failed:', redisError); (size / 100) * 40 || 25
} }" fill="#${textColor || 'ffffff'}">${initials}</text>
</svg>
`;
// Set response headers for SVG content
response.header('Content-type', 'image/svg+xml');
response.header('Cache-Control', 'no-cache');
response.header('Pragma', 'no-cache');
response.header('Expires', '0');
this.setResponseHeaders(response);
return response.send(svgContent); return response.send(svgContent);
} catch (error) { } catch (error) {
console.error('Avatar generation error:', error); return response.status(StatusCodes.OK).json({ error: error.message });
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
error: 'Failed to generate avatar',
});
} }
} }
private validateSize(size: any): { isValid: boolean; value?: number; error?: string } { private getInitials(name: string) {
const numSize = Number(size); const parts = name.split(' ');
let initials = '';
if (isNaN(numSize)) { if (parts.length >= 2) {
return { isValid: false, error: 'Size must be a valid number' };
}
if (numSize < MIN_SIZE || numSize > MAX_SIZE) {
return {
isValid: false,
error: `Size must be between ${MIN_SIZE} and ${MAX_SIZE}`,
};
}
return { isValid: true, value: Math.floor(numSize) };
}
private sanitizeName(name: string): string {
return name
.trim()
.toLowerCase()
.replace(/[^a-z0-9\s-]/gi, '');
}
private getInitials(name: string): string {
const sanitized = name.trim().replace(/\s+/g, ' '); // normalize whitespace
const parts = sanitized
.split(' ')
.filter((part) => part.length > 0)
.map((part) => part.trim());
if (parts.length === 0) {
return 'NA';
}
if (parts.length === 1) {
// For single word, take first 2 characters or first char if only 1 char
return parts[0].substring(0, Math.min(2, parts[0].length)).toUpperCase();
}
return this.getMultiWordInitials(parts);
}
private getMultiWordInitials(parts: string[]): string {
// Filter out prefixes and short words
const significantParts = parts.filter((part) => !PREFIXES.includes(part.toLowerCase()) && part.length > 1);
if (significantParts.length === 0) {
// Fallback to first and last regardless of prefixes
const firstName = parts[0]; const firstName = parts[0];
const lastName = parts[parts.length - 1]; const lastName = parts[parts.length - 1];
return (firstName.charAt(0) + lastName.charAt(0)).toUpperCase();
const firstInitial = firstName.charAt(0).toUpperCase();
const lastInitial = lastName.charAt(0).toUpperCase();
if (prefixes.includes(lastName.toLowerCase()) && lastName === lastName.toUpperCase()) {
initials = firstInitial + lastName.charAt(1).toUpperCase();
} else {
initials = firstInitial + lastInitial;
}
} else if (parts.length === 1) {
initials = parts[0].substring(0, 2).toUpperCase();
} }
if (significantParts.length === 1) { return initials;
return significantParts[0].substring(0, 2).toUpperCase();
}
// Take first and last significant parts
const firstName = significantParts[0];
const lastName = significantParts[significantParts.length - 1];
return (firstName.charAt(0) + lastName.charAt(0)).toUpperCase();
}
private generateColors(name: string): { background: string; text: string } {
const baseColor = this.getColorFromName(name);
return {
background: this.lightenColor(baseColor, COLOR_LIGHTENING_PERCENT),
text: this.darkenColor(baseColor),
};
}
private createSvg(size: number, colors: { background: string; text: string }, initials: string): string {
const fontSize = Math.max(12, Math.floor(size * FONT_SIZE_RATIO)); // Ensure readable font size
// Escape any potential HTML/XML characters in initials
const escapedInitials = this.escapeXml(initials);
return `<svg width="${size}" height="${size}" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 ${size} ${size}">
<rect width="100%" height="100%" fill="#${colors.background}" rx="${size * 0.1}"/>
<text x="50%" y="50%" dominant-baseline="central" text-anchor="middle"
font-weight="600" font-family="-apple-system, BlinkMacSystemFont, 'Segoe UI', system-ui, sans-serif"
font-size="${fontSize}" fill="#${colors.text}">${escapedInitials}</text>
</svg>`;
}
private escapeXml(text: string): string {
return text.replace(/&/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;').replace(/"/g, '&quot;').replace(/'/g, '&apos;');
}
private setResponseHeaders(response: HttpContext['response']): void {
response.header('Content-Type', 'image/svg+xml');
response.header('Cache-Control', 'public, max-age=86400'); // Cache for 1 day
response.header('ETag', `"${Date.now()}"`); // Simple ETag
}
private getColorFromName(name: string): string {
let hash = 0;
const normalizedName = name.toLowerCase().trim();
for (let i = 0; i < normalizedName.length; i++) {
hash = normalizedName.charCodeAt(i) + ((hash << 5) - hash);
hash = hash & hash; // Convert to 32-bit integer
}
// Ensure we get vibrant colors by constraining the color space
const colorParts = [];
for (let i = 0; i < 3; i++) {
let value = (hash >> (i * 8)) & 0xff;
// Ensure minimum color intensity for better contrast
value = Math.max(50, value);
colorParts.push(value.toString(16).padStart(2, '0'));
}
return colorParts.join('');
}
private lightenColor(hexColor: string, percent: number): string {
const r = parseInt(hexColor.substring(0, 2), 16);
const g = parseInt(hexColor.substring(2, 4), 16);
const b = parseInt(hexColor.substring(4, 6), 16);
const lightenValue = (value: number) => Math.min(255, Math.floor(value + (255 - value) * (percent / 100)));
const newR = lightenValue(r);
const newG = lightenValue(g);
const newB = lightenValue(b);
return ((newR << 16) | (newG << 8) | newB).toString(16).padStart(6, '0');
}
private darkenColor(hexColor: string): string {
const r = parseInt(hexColor.slice(0, 2), 16);
const g = parseInt(hexColor.slice(2, 4), 16);
const b = parseInt(hexColor.slice(4, 6), 16);
const darkenValue = (value: number) => Math.max(0, Math.floor(value * COLOR_DARKENING_FACTOR));
const darkerR = darkenValue(r);
const darkerG = darkenValue(g);
const darkerB = darkenValue(b);
return ((darkerR << 16) + (darkerG << 8) + darkerB).toString(16).padStart(6, '0');
} }
} }

View file

@ -1,36 +1,19 @@
import type { HttpContext } from '@adonisjs/core/http'; import type { HttpContext } from '@adonisjs/core/http';
// import Person from 'App/Models/Person';
import Dataset from '#models/dataset'; import Dataset from '#models/dataset';
import { StatusCodes } from 'http-status-codes'; import { StatusCodes } from 'http-status-codes';
import DatasetReference from '#models/dataset_reference';
// node ace make:controller Author // node ace make:controller Author
export default class DatasetController { export default class DatasetController {
/** public async index({}: HttpContext) {
* GET /api/datasets // select * from gba.persons
* Find all published datasets // where exists (select * from gba.documents inner join gba.link_documents_persons on "documents"."id" = "link_documents_persons"."document_id"
*/ // where ("link_documents_persons"."role" = 'author') and ("persons"."id" = "link_documents_persons"."person_id"));
public async index({ response }: HttpContext) { const datasets = await Dataset.query().where('server_state', 'published').orWhere('server_state', 'deleted');
try {
const datasets = await Dataset.query()
.where(function (query) {
query.where('server_state', 'published').orWhere('server_state', 'deleted');
})
.preload('titles')
.preload('identifier')
.orderBy('server_date_published', 'desc');
return response.status(StatusCodes.OK).json(datasets); return datasets;
} catch (error) {
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
message: error.message || 'Some error occurred while retrieving datasets.',
});
}
} }
/**
* GET /api/dataset
* Find all published datasets
*/
public async findAll({ response }: HttpContext) { public async findAll({ response }: HttpContext) {
try { try {
const datasets = await Dataset.query() const datasets = await Dataset.query()
@ -46,279 +29,34 @@ export default class DatasetController {
} }
} }
/** public async findOne({ params }: HttpContext) {
* GET /api/dataset/:publish_id const datasets = await Dataset.query()
* Find one dataset by publish_id .where('publish_id', params.publish_id)
*/ .preload('titles')
public async findOne({ response, params }: HttpContext) { .preload('descriptions')
try { .preload('user')
const dataset = await Dataset.query() .preload('authors', (builder) => {
.where('publish_id', params.publish_id) builder.orderBy('pivot_sort_order', 'asc');
.preload('titles') })
.preload('descriptions') // Using 'descriptions' instead of 'abstracts' .preload('contributors', (builder) => {
.preload('user', (builder) => { builder.orderBy('pivot_sort_order', 'asc');
builder.select(['id', 'firstName', 'lastName', 'avatar', 'login']); })
}) .preload('subjects')
.preload('authors', (builder) => { .preload('coverage')
builder .preload('licenses')
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type']) .preload('references')
.withCount('datasets', (query) => { .preload('project')
query.as('datasets_count'); .preload('referenced_by', (builder) => {
}) builder.preload('dataset', (builder) => {
.pivotColumns(['role', 'sort_order']) builder.preload('identifier');
.orderBy('pivot_sort_order', 'asc');
})
.preload('contributors', (builder) => {
builder
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.pivotColumns(['role', 'sort_order', 'contributor_type'])
.orderBy('pivot_sort_order', 'asc');
})
.preload('subjects')
.preload('coverage')
.preload('licenses')
.preload('references')
.preload('project')
// .preload('referenced_by', (builder) => {
// builder.preload('dataset', (builder) => {
// builder.preload('identifier');
// });
// })
.preload('files', (builder) => {
builder.preload('hashvalues');
})
.preload('identifier')
.first(); // Use first() instead of firstOrFail() to handle not found gracefully
if (!dataset) {
return response.status(StatusCodes.NOT_FOUND).json({
message: `Cannot find Dataset with publish_id=${params.publish_id}.`,
}); });
} })
.preload('files', (builder) => {
builder.preload('hashvalues');
})
.preload('identifier')
.firstOrFail();
// Build the version chain return datasets;
const versionChain = await this.buildVersionChain(dataset);
// Add version chain to response
const responseData = {
...dataset.toJSON(),
versionChain: versionChain,
};
// return response.status(StatusCodes.OK).json(dataset);
return response.status(StatusCodes.OK).json(responseData);
} catch (error) {
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
message: error.message || `Error retrieving Dataset with publish_id=${params.publish_id}.`,
});
}
}
/**
* GET /:prefix/:value
* Find dataset by identifier (e.g., https://doi.tethys.at/10.24341/tethys.99.2)
*/
public async findByIdentifier({ response, params }: HttpContext) {
const identifierValue = `${params.prefix}/${params.value}`;
// Optional: Validate DOI format
if (!identifierValue.match(/^10\.\d+\/[a-zA-Z0-9._-]+\.[0-9]+(?:\.[0-9]+)*$/)) {
return response.status(StatusCodes.BAD_REQUEST).json({
message: `Invalid DOI format: ${identifierValue}`,
});
}
try {
// Method 1: Using subquery with whereIn (most similar to your original)
const dataset = await Dataset.query()
// .whereIn('id', (subQuery) => {
// subQuery.select('dataset_id').from('dataset_identifiers').where('value', identifierValue);
// })
.whereHas('identifier', (builder) => {
builder.where('value', identifierValue);
})
.preload('titles')
.preload('descriptions') // Using 'descriptions' instead of 'abstracts'
.preload('user', (builder) => {
builder.select(['id', 'firstName', 'lastName', 'avatar', 'login']);
})
.preload('authors', (builder) => {
builder
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.pivotColumns(['role', 'sort_order'])
.wherePivot('role', 'author')
.orderBy('pivot_sort_order', 'asc');
})
.preload('contributors', (builder) => {
builder
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.pivotColumns(['role', 'sort_order', 'contributor_type'])
.wherePivot('role', 'contributor')
.orderBy('pivot_sort_order', 'asc');
})
.preload('subjects')
.preload('coverage')
.preload('licenses')
.preload('references')
.preload('project')
// .preload('referenced_by', (builder) => {
// builder.preload('dataset', (builder) => {
// builder.preload('identifier');
// });
// })
.preload('files', (builder) => {
builder.preload('hashvalues');
})
.preload('identifier')
.first();
if (!dataset) {
return response.status(StatusCodes.NOT_FOUND).json({
message: `Cannot find Dataset with identifier=${identifierValue}.`,
});
}
// Build the version chain
const versionChain = await this.buildVersionChain(dataset);
// Add version chain to response
const responseData = {
...dataset.toJSON(),
versionChain: versionChain,
};
// return response.status(StatusCodes.OK).json(dataset);
return response.status(StatusCodes.OK).json(responseData);
} catch (error) {
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
message: error.message || `Error retrieving Dataset with identifier=${identifierValue}.`,
});
}
}
/**
* Build the complete version chain for a dataset
* Traverses both backwards (previous versions) and forwards (newer versions)
*/
private async buildVersionChain(dataset: Dataset) {
const versionChain = {
current: {
id: dataset.id,
publish_id: dataset.publish_id,
doi: dataset.identifier?.value || null,
main_title: dataset.mainTitle || null,
server_date_published: dataset.server_date_published,
},
previousVersions: [] as any[],
newerVersions: [] as any[],
};
// Get all previous versions (going backwards in time)
versionChain.previousVersions = await this.getPreviousVersions(dataset.id);
// Get all newer versions (going forwards in time)
versionChain.newerVersions = await this.getNewerVersions(dataset.id);
return versionChain;
}
/**
* Recursively get all previous versions
*/
private async getPreviousVersions(datasetId: number, visited: Set<number> = new Set()): Promise<any[]> {
// Prevent infinite loops
if (visited.has(datasetId)) {
return [];
}
visited.add(datasetId);
const previousVersions: any[] = [];
// Find references where this dataset "IsNewVersionOf" another dataset
const previousRefs = await DatasetReference.query()
.where('document_id', datasetId)
.where('relation', 'IsNewVersionOf')
.whereNotNull('related_document_id');
for (const ref of previousRefs) {
if (!ref.related_document_id) continue;
const previousDataset = await Dataset.query()
.where('id', ref.related_document_id)
.preload('identifier')
.preload('titles')
.first();
if (previousDataset) {
const versionInfo = {
id: previousDataset.id,
publish_id: previousDataset.publish_id,
doi: previousDataset.identifier?.value || null,
main_title: previousDataset.mainTitle || null,
server_date_published: previousDataset.server_date_published,
relation: 'IsPreviousVersionOf', // From perspective of current dataset
};
previousVersions.push(versionInfo);
// Recursively get even older versions
const olderVersions = await this.getPreviousVersions(previousDataset.id, visited);
previousVersions.push(...olderVersions);
}
}
return previousVersions;
}
/**
* Recursively get all newer versions
*/
private async getNewerVersions(datasetId: number, visited: Set<number> = new Set()): Promise<any[]> {
// Prevent infinite loops
if (visited.has(datasetId)) {
return [];
}
visited.add(datasetId);
const newerVersions: any[] = [];
// Find references where this dataset "IsPreviousVersionOf" another dataset
const newerRefs = await DatasetReference.query()
.where('document_id', datasetId)
.where('relation', 'IsPreviousVersionOf')
.whereNotNull('related_document_id');
for (const ref of newerRefs) {
if (!ref.related_document_id) continue;
const newerDataset = await Dataset.query().where('id', ref.related_document_id).preload('identifier').preload('titles').first();
if (newerDataset) {
const versionInfo = {
id: newerDataset.id,
publish_id: newerDataset.publish_id,
doi: newerDataset.identifier?.value || null,
main_title: newerDataset.mainTitle || null,
server_date_published: newerDataset.server_date_published,
relation: 'IsNewVersionOf', // From perspective of current dataset
};
newerVersions.push(versionInfo);
// Recursively get even newer versions
const evenNewerVersions = await this.getNewerVersions(newerDataset.id, visited);
newerVersions.push(...evenNewerVersions);
}
}
return newerVersions;
} }
} }

View file

@ -2,103 +2,53 @@ import type { HttpContext } from '@adonisjs/core/http';
import File from '#models/file'; import File from '#models/file';
import { StatusCodes } from 'http-status-codes'; import { StatusCodes } from 'http-status-codes';
import * as fs from 'fs'; import * as fs from 'fs';
import { DateTime } from 'luxon'; import * as path from 'path';
// node ace make:controller Author // node ace make:controller Author
export default class FileController { export default class FileController {
// @Get("download/:id") // @Get("download/:id")
public async findOne({ response, params }: HttpContext) { public async findOne({ response, params }: HttpContext) {
const id = params.id; const id = params.id;
// const file = await File.findOrFail(id); const file = await File.findOrFail(id);
// Load file with its related dataset to check embargo // const file = await File.findOne({
const file = await File.query() // where: { id: id },
.where('id', id) // });
.preload('dataset') // or 'dataset' - whatever your relationship is named if (file) {
.firstOrFail(); const filePath = '/storage/app/public/' + file.pathName;
const ext = path.extname(filePath);
const fileName = file.label + ext;
try {
fs.accessSync(filePath, fs.constants.R_OK); //| fs.constants.W_OK);
// console.log("can read/write:", path);
response
.header('Cache-Control', 'no-cache private')
.header('Content-Description', 'File Transfer')
.header('Content-Type', file.mimeType)
.header('Content-Disposition', 'inline; filename=' + fileName)
.header('Content-Transfer-Encoding', 'binary')
.header('Access-Control-Allow-Origin', '*')
.header('Access-Control-Allow-Methods', 'GET,POST');
response.status(StatusCodes.OK).download(filePath);
} catch (err) {
// console.log("no access:", path);
response.status(StatusCodes.NOT_FOUND).send({
message: `File with id ${id} doesn't exist on file server`,
});
}
if (!file) { // res.status(StatusCodes.OK).sendFile(filePath, (err) => {
return response.status(StatusCodes.NOT_FOUND).send({ // // res.setHeader("Content-Type", "application/json");
// // res.removeHeader("Content-Disposition");
// res.status(StatusCodes.NOT_FOUND).send({
// message: `File with id ${id} doesn't exist on file server`,
// });
// });
} else {
response.status(StatusCodes.NOT_FOUND).send({
message: `Cannot find File with id=${id}.`, message: `Cannot find File with id=${id}.`,
}); });
} }
const dataset = file.dataset;
// Files from unpublished datasets are now blocked
if (dataset.server_state !== 'published') {
return response.status(StatusCodes.FORBIDDEN).send({
message: `File access denied: Dataset is not published.`,
});
}
if (dataset && this.isUnderEmbargo(dataset.embargo_date)) {
return response.status(StatusCodes.FORBIDDEN).send({
message: `File is under embargo until ${dataset.embargo_date?.toFormat('yyyy-MM-dd')}`,
});
}
// Proceed with file download
const filePath = '/storage/app/data/' + file.pathName;
const fileExt = file.filePath.split('.').pop() || '';
// const fileName = file.label + fileExt;
const fileName = file.label.toLowerCase().endsWith(`.${fileExt.toLowerCase()}`) ? file.label : `${file.label}.${fileExt}`;
// Determine if file can be previewed inline in browser
const canPreviewInline = (mimeType: string): boolean => {
const type = mimeType.toLowerCase();
return (
type === 'application/pdf' ||
type.startsWith('image/') ||
type.startsWith('text/') ||
type === 'application/json' ||
type === 'application/xml' ||
// Uncomment if you want video/audio inline
type.startsWith('video/') ||
type.startsWith('audio/')
);
};
const disposition = canPreviewInline(file.mimeType) ? 'inline' : 'attachment';
try {
fs.accessSync(filePath, fs.constants.R_OK); //| fs.constants.W_OK);
// console.log("can read/write:", filePath);
response
.header('Cache-Control', 'no-cache private')
.header('Content-Description', 'File Transfer')
.header('Content-Type', file.mimeType)
.header('Content-Disposition', `${disposition}; filename="${fileName}"`)
.header('Content-Transfer-Encoding', 'binary')
.header('Access-Control-Allow-Origin', '*')
.header('Access-Control-Allow-Methods', 'GET');
response.status(StatusCodes.OK).download(filePath);
} catch (err) {
// console.log("no access:", path);
response.status(StatusCodes.NOT_FOUND).send({
message: `File with id ${id} doesn't exist on file server`,
});
}
}
/**
* Check if the dataset is under embargo
* Compares only dates (ignoring time) for embargo check
* @param embargoDate - The embargo date from dataset
* @returns true if under embargo, false if embargo has passed or no embargo set
*/
private isUnderEmbargo(embargoDate: DateTime | null): boolean {
// No embargo date set - allow download
if (!embargoDate) {
return false;
}
// Get current date at start of day (00:00:00)
const today = DateTime.now().startOf('day');
// Get embargo date at start of day (00:00:00)
const embargoDateOnly = embargoDate.startOf('day');
// File is under embargo if embargo date is after today
// This means the embargo lifts at the start of the embargo date
return embargoDateOnly >= today;
} }
} }

View file

@ -17,8 +17,7 @@ export default class HomeController {
// .preload('authors') // .preload('authors')
// .orderBy('server_date_published'); // .orderBy('server_date_published');
const datasets = await db const datasets = await db.from('documents as doc')
.from('documents as doc')
.select(['publish_id', 'server_date_published', db.raw(`date_part('year', server_date_published) as pub_year`)]) .select(['publish_id', 'server_date_published', db.raw(`date_part('year', server_date_published) as pub_year`)])
.where('server_state', serverState) .where('server_state', serverState)
.innerJoin('link_documents_persons as ba', 'doc.id', 'ba.document_id') .innerJoin('link_documents_persons as ba', 'doc.id', 'ba.document_id')
@ -60,6 +59,7 @@ export default class HomeController {
// const year = params.year; // const year = params.year;
// const from = parseInt(year); // const from = parseInt(year);
try { try {
// const datasets = await Database.from('documents as doc') // const datasets = await Database.from('documents as doc')
// .select([Database.raw(`date_part('month', server_date_published) as pub_month`), Database.raw('COUNT(*) as count')]) // .select([Database.raw(`date_part('month', server_date_published) as pub_month`), Database.raw('COUNT(*) as count')])
// .where('server_state', serverState) // .where('server_state', serverState)
@ -68,12 +68,9 @@ export default class HomeController {
// .groupBy('pub_month'); // .groupBy('pub_month');
// // .orderBy('server_date_published'); // // .orderBy('server_date_published');
// Calculate the last 4 years including the current year const years = [2021, 2022, 2023]; // Add the second year
const currentYear = new Date().getFullYear();
const years = Array.from({ length: 4 }, (_, i) => currentYear - (i + 1)).reverse();
const result = await db const result = await db.from('documents as doc')
.from('documents as doc')
.select([ .select([
db.raw(`date_part('year', server_date_published) as pub_year`), db.raw(`date_part('year', server_date_published) as pub_year`),
db.raw(`date_part('month', server_date_published) as pub_month`), db.raw(`date_part('month', server_date_published) as pub_month`),
@ -86,7 +83,7 @@ export default class HomeController {
.groupBy('pub_year', 'pub_month') .groupBy('pub_year', 'pub_month')
.orderBy('pub_year', 'asc') .orderBy('pub_year', 'asc')
.orderBy('pub_month', 'asc'); .orderBy('pub_month', 'asc');
const labels = Array.from({ length: 12 }, (_, i) => i + 1); // Assuming 12 months const labels = Array.from({ length: 12 }, (_, i) => i + 1); // Assuming 12 months
const inputDatasets: Map<string, ChartDataset> = result.reduce((acc, item) => { const inputDatasets: Map<string, ChartDataset> = result.reduce((acc, item) => {
@ -103,15 +100,15 @@ export default class HomeController {
acc[pub_year].data[pub_month - 1] = parseInt(count); acc[pub_year].data[pub_month - 1] = parseInt(count);
return acc; return acc ;
}, {}); }, {});
const outputDatasets = Object.entries(inputDatasets).map(([year, data]) => ({ const outputDatasets = Object.entries(inputDatasets).map(([year, data]) => ({
data: data.data, data: data.data,
label: year, label: year,
borderColor: data.borderColor, borderColor: data.borderColor,
fill: data.fill, fill: data.fill
})); }));
const data = { const data = {
labels: labels, labels: labels,
@ -129,11 +126,11 @@ export default class HomeController {
private getRandomHexColor() { private getRandomHexColor() {
const letters = '0123456789ABCDEF'; const letters = '0123456789ABCDEF';
let color = '#'; let color = '#';
for (let i = 0; i < 6; i++) { for (let i = 0; i < 6; i++) {
color += letters[Math.floor(Math.random() * 16)]; color += letters[Math.floor(Math.random() * 16)];
} }
return color; return color;
} }
} }
@ -142,4 +139,5 @@ interface ChartDataset {
label: string; label: string;
borderColor: string; borderColor: string;
fill: boolean; fill: boolean;
} }

View file

@ -9,24 +9,6 @@ import BackupCode from '#models/backup_code';
// Here we are generating secret and recovery codes for the user thats enabling 2FA and storing them to our database. // Here we are generating secret and recovery codes for the user thats enabling 2FA and storing them to our database.
export default class UserController { export default class UserController {
public async getSubmitters({ response }: HttpContext) {
try {
const submitters = await User.query()
.preload('roles', (query) => {
query.where('name', 'submitter')
})
.whereHas('roles', (query) => {
query.where('name', 'submitter')
})
.exec();
return submitters;
} catch (error) {
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
message: 'Invalid TOTP state',
});
}
}
public async enable({ auth, response, request }: HttpContext) { public async enable({ auth, response, request }: HttpContext) {
const user = (await User.find(auth.user?.id)) as User; const user = (await User.find(auth.user?.id)) as User;
// await user.load('totp_secret'); // await user.load('totp_secret');

View file

@ -1,36 +0,0 @@
import type { HttpContext } from '@adonisjs/core/http';
import Collection from '#models/collection';
export default class CollectionsController {
public async show({ params, response }: HttpContext) {
// Get the collection id from route parameters
const collectionId = params.id;
// Find the selected collection by id
const collection = await Collection.find(collectionId);
if (!collection) {
return response.status(404).json({ message: 'Collection not found' });
}
// Query for narrower concepts: collections whose parent_id equals the selected collection's id
const narrowerCollections = await Collection.query().where('parent_id', collection.id) || [];
// For broader concept, if the selected collection has a parent_id fetch that record (otherwise null)
const broaderCollection: Collection[] | never[] | null = await (async () => {
if (collection.parent_id) {
// Try to fetch the parent...
const parent = await Collection.find(collection.parent_id)
// If found, return it wrapped in an array; if not found, return null (or empty array if you prefer)
return parent ? [parent] : null
}
return []
})()
// Return the selected collection along with its narrower and broader concepts in JSON format
return response.json({
selectedCollection: collection,
narrowerCollections,
broaderCollection,
});
}
}

View file

@ -5,7 +5,7 @@ import BackupCode from '#models/backup_code';
// import InvalidCredentialException from 'App/Exceptions/InvalidCredentialException'; // import InvalidCredentialException from 'App/Exceptions/InvalidCredentialException';
import { authValidator } from '#validators/auth'; import { authValidator } from '#validators/auth';
import hash from '@adonisjs/core/services/hash'; import hash from '@adonisjs/core/services/hash';
import db from '@adonisjs/lucid/services/db';
import TwoFactorAuthProvider from '#app/services/TwoFactorAuthProvider'; import TwoFactorAuthProvider from '#app/services/TwoFactorAuthProvider';
// import { Authenticator } from '@adonisjs/auth'; // import { Authenticator } from '@adonisjs/auth';
// import { LoginState } from 'Contracts/enums'; // import { LoginState } from 'Contracts/enums';
@ -29,10 +29,6 @@ export default class AuthController {
const { email, password } = request.only(['email', 'password']); const { email, password } = request.only(['email', 'password']);
try { try {
await db.connection().rawQuery('SELECT 1')
// // attempt to verify credential and login user // // attempt to verify credential and login user
// await auth.use('web').attempt(email, plainPassword); // await auth.use('web').attempt(email, plainPassword);
@ -55,9 +51,6 @@ export default class AuthController {
await auth.use('web').login(user); await auth.use('web').login(user);
} catch (error) { } catch (error) {
if (error.code === 'ECONNREFUSED') {
throw error
}
// if login fails, return vague form message and redirect back // if login fails, return vague form message and redirect back
session.flash('message', 'Your username, email, or password is incorrect'); session.flash('message', 'Your username, email, or password is incorrect');
return response.redirect().back(); return response.redirect().back();

View file

@ -6,11 +6,6 @@ import hash from '@adonisjs/core/services/hash';
// import { schema, rules } from '@adonisjs/validator'; // import { schema, rules } from '@adonisjs/validator';
import vine from '@vinejs/vine'; import vine from '@vinejs/vine';
import BackupCodeStorage, { SecureRandom } from '#services/backup_code_storage'; import BackupCodeStorage, { SecureRandom } from '#services/backup_code_storage';
import path from 'path';
import crypto from 'crypto';
// import drive from '#services/drive';
import drive from '@adonisjs/drive/services/main';
import logger from '@adonisjs/core/services/logger';
// Here we are generating secret and recovery codes for the user thats enabling 2FA and storing them to our database. // Here we are generating secret and recovery codes for the user thats enabling 2FA and storing them to our database.
export default class UserController { export default class UserController {
@ -33,7 +28,7 @@ export default class UserController {
user: user, user: user,
twoFactorEnabled: user.isTwoFactorEnabled, twoFactorEnabled: user.isTwoFactorEnabled,
// code: await TwoFactorAuthProvider.generateQrCode(user), // code: await TwoFactorAuthProvider.generateQrCode(user),
backupState: backupState, backupState: backupState,
}); });
} }
@ -45,8 +40,10 @@ export default class UserController {
// }); // });
const passwordSchema = vine.object({ const passwordSchema = vine.object({
// first step // first step
old_password: vine.string().trim(), old_password: vine
// .regex(/^[a-zA-Z0-9]+$/), .string()
.trim()
.regex(/^[a-zA-Z0-9]+$/),
new_password: vine.string().confirmed({ confirmationField: 'confirm_password' }).trim().minLength(8).maxLength(255), new_password: vine.string().confirmed({ confirmationField: 'confirm_password' }).trim().minLength(8).maxLength(255),
}); });
try { try {
@ -57,9 +54,9 @@ export default class UserController {
// return response.badRequest(error.messages); // return response.badRequest(error.messages);
throw error; throw error;
} }
try { try {
const user = (await auth.user) as User; const user = await auth.user as User;
const { old_password, new_password } = request.only(['old_password', 'new_password']); const { old_password, new_password } = request.only(['old_password', 'new_password']);
// if (!(old_password && new_password && confirm_password)) { // if (!(old_password && new_password && confirm_password)) {
@ -85,171 +82,6 @@ export default class UserController {
} }
} }
public async profile({ inertia, auth }: HttpContext) {
const user = await User.find(auth.user?.id);
// let test = await drive.use().getUrl(user?.avatar);
// user?.preload('roles');
const avatarFullPathUrl = user?.avatar ? await drive.use('public').getUrl(user.avatar) : null;
return inertia.render('profile/show', {
user: user,
defaultUrl: avatarFullPathUrl,
});
}
/**
* Update the user's profile information.
*
* @param {HttpContext} ctx - The HTTP context object.
* @returns {Promise<void>}
*/
public async profileUpdate({ auth, request, response, session }: HttpContext) {
if (!auth.user) {
session.flash('error', 'You must be logged in to update your profile.');
return response.redirect().toRoute('login');
}
const updateProfileValidator = vine.withMetaData<{ userId: number }>().compile(
vine.object({
first_name: vine.string().trim().minLength(4).maxLength(255),
last_name: vine.string().trim().minLength(4).maxLength(255),
login: vine.string().trim().minLength(4).maxLength(255),
email: vine
.string()
.trim()
.maxLength(255)
.email()
.normalizeEmail()
.isUnique({ table: 'accounts', column: 'email', whereNot: (field) => field.meta.userId }),
avatar: vine
.myfile({
size: '2mb',
extnames: ['jpg', 'jpeg', 'png', 'gif', 'webp', 'svg'],
})
// .allowedMimetypeExtensions({
// allowedExtensions: ['jpg', 'jpeg', 'png', 'gif', 'webp', 'svg'],
// })
.optional(),
}),
);
const user = await User.find(auth.user.id);
if (!user) {
session.flash('error', 'User not found.');
return response.redirect().toRoute('login');
}
try {
// validate update form
await request.validateUsing(updateProfileValidator, {
meta: {
userId: user.id,
},
});
const { login, email, first_name, last_name } = request.only(['login', 'email', 'first_name', 'last_name']);
const sanitizedData: { [key: string]: any } = {
login: login?.trim(),
email: email?.toLowerCase().trim(),
first_name: first_name?.trim(),
last_name: last_name?.trim(),
// avatar: "",
};
const toCamelCase = (str: string) => str.replace(/_([a-z])/g, (g) => g[1].toUpperCase());
const hasInputChanges = Object.keys(sanitizedData).some((key) => {
const camelKey = toCamelCase(key);
return sanitizedData[key] !== (user.$attributes as { [key: string]: any })[camelKey];
});
let hasAvatarChanged = false;
const avatar = request.file('avatar');
if (avatar) {
const fileHash = crypto
.createHash('sha256')
.update(avatar.clientName + avatar.size)
.digest('hex');
const fileName = `avatar-${fileHash}.${avatar.extname}`;
const avatarFullPath = path.join('/uploads', `${user.login}`, fileName);
if (user.avatar != avatarFullPath) {
if (user.avatar) {
await drive.use('public').delete(user.avatar);
}
hasAvatarChanged = user.avatar !== avatarFullPath;
await avatar.moveToDisk(avatarFullPath, 'public', {
name: fileName,
overwrite: true, // overwrite in case of conflict
disk: 'public',
});
sanitizedData.avatar = avatarFullPath;
}
}
if (!hasInputChanges && !hasAvatarChanged) {
session.flash('message', 'No changes were made.');
return response.redirect().back();
}
await user.merge(sanitizedData).save();
session.flash('message', 'User has been updated successfully');
return response.redirect().toRoute('settings.profile.edit');
} catch (error) {
logger.error('Profile update failed:', error);
// session.flash('errors', 'Profile update failed. Please try again.');
// return response.redirect().back();
throw error;
}
}
public async passwordUpdate({ auth, request, response, session }: HttpContext) {
// const passwordSchema = schema.create({
// old_password: schema.string({ trim: true }, [rules.required()]),
// new_password: schema.string({ trim: true }, [rules.minLength(8), rules.maxLength(255), rules.confirmed('confirm_password')]),
// confirm_password: schema.string({ trim: true }, [rules.required()]),
// });
const passwordSchema = vine.object({
// first step
old_password: vine.string().trim(),
// .regex(/^[a-zA-Z0-9]+$/),
new_password: vine.string().confirmed({ confirmationField: 'confirm_password' }).trim().minLength(8).maxLength(255),
});
try {
// await request.validate({ schema: passwordSchema });
const validator = vine.compile(passwordSchema);
await request.validateUsing(validator);
} catch (error) {
// return response.badRequest(error.messages);
throw error;
}
try {
const user = (await auth.user) as User;
const { old_password, new_password } = request.only(['old_password', 'new_password']);
// if (!(old_password && new_password && confirm_password)) {
// return response.status(400).send({ warning: 'Old password and new password are required.' });
// }
// Verify if the provided old password matches the user's current password
const isSame = await hash.verify(user.password, old_password);
if (!isSame) {
session.flash('warning', 'Old password is incorrect.');
return response.redirect().back();
// return response.flash('warning', 'Old password is incorrect.').redirect().back();
}
// Hash the new password before updating the user's password
user.password = new_password;
await user.save();
// return response.status(200).send({ message: 'Password updated successfully.' });
session.flash({ message: 'Password updated successfully.' });
return response.redirect().toRoute('settings.profile.edit');
} catch (error) {
// return response.status(500).send({ message: 'Internal server error.' });
return response.flash('warning', `Invalid server state. Internal server error.`).redirect().back();
}
}
public async enableTwoFactorAuthentication({ auth, response, session }: HttpContext): Promise<void> { public async enableTwoFactorAuthentication({ auth, response, session }: HttpContext): Promise<void> {
// const user: User | undefined = auth?.user; // const user: User | undefined = auth?.user;
const user = (await User.find(auth.user?.id)) as User; const user = (await User.find(auth.user?.id)) as User;
@ -283,7 +115,7 @@ export default class UserController {
} else { } else {
session.flash('error', 'User not found.'); session.flash('error', 'User not found.');
} }
return response.redirect().back(); return response.redirect().back();
// return inertia.render('Auth/AccountInfo', { // return inertia.render('Auth/AccountInfo', {
// // status: { // // status: {

View file

@ -3,7 +3,7 @@ import { Client } from '@opensearch-project/opensearch';
import User from '#models/user'; import User from '#models/user';
import Dataset from '#models/dataset'; import Dataset from '#models/dataset';
import DatasetIdentifier from '#models/dataset_identifier'; import DatasetIdentifier from '#models/dataset_identifier';
import DatasetXmlSerializer from '#app/Library/DatasetXmlSerializer'; import XmlModel from '#app/Library/XmlModel';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js'; import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import { create } from 'xmlbuilder2'; import { create } from 'xmlbuilder2';
import { readFileSync } from 'fs'; import { readFileSync } from 'fs';
@ -18,33 +18,9 @@ import { HttpException } from 'node-exceptions';
import { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model'; import { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model';
import vine, { SimpleMessagesProvider } from '@vinejs/vine'; import vine, { SimpleMessagesProvider } from '@vinejs/vine';
import mail from '@adonisjs/mail/services/main'; import mail from '@adonisjs/mail/services/main';
// import { resolveMx } from 'dns/promises';
// import * as net from 'net';
import { validate } from 'deep-email-validator'; import { validate } from 'deep-email-validator';
import {
TitleTypes,
DescriptionTypes,
ContributorTypes,
PersonNameTypes,
ReferenceIdentifierTypes,
RelationTypes,
SubjectTypes,
DatasetTypes,
} from '#contracts/enums';
import { TransactionClientContract } from '@adonisjs/lucid/types/database';
import db from '@adonisjs/lucid/services/db';
import Project from '#models/project';
import License from '#models/license';
import Language from '#models/language';
import File from '#models/file';
import Coverage from '#models/coverage';
import Title from '#models/title';
import Description from '#models/description';
import Subject from '#models/subject';
import DatasetReference from '#models/dataset_reference';
import Collection from '#models/collection';
import CollectionRole from '#models/collection_role';
import { updateEditorDatasetValidator } from '#validators/dataset';
import { savePersons } from '#app/utils/utility-functions';
// Create a new instance of the client // Create a new instance of the client
const client = new Client({ node: 'http://localhost:9200' }); // replace with your OpenSearch endpoint const client = new Client({ node: 'http://localhost:9200' }); // replace with your OpenSearch endpoint
@ -87,15 +63,8 @@ export default class DatasetsController {
} }
datasets.orderBy(attribute, sortOrder); datasets.orderBy(attribute, sortOrder);
} else { } else {
// datasets.orderBy('id', 'asc'); // users.orderBy('created_at', 'desc');
// Custom ordering to prioritize rejected_editor state datasets.orderBy('id', 'asc');
datasets.orderByRaw(`
CASE
WHEN server_state = 'rejected_reviewer' THEN 0
ELSE 1
END ASC,
id ASC
`);
} }
// const users = await User.query().orderBy('login').paginate(page, limit); // const users = await User.query().orderBy('login').paginate(page, limit);
@ -188,16 +157,10 @@ export default class DatasetsController {
} }
} }
public async approve({ request, inertia, response, auth }: HttpContext) { public async approve({ request, inertia, response }: HttpContext) {
const id = request.param('id'); const id = request.param('id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
// $dataset = Dataset::with('user:id,login')->findOrFail($id); // $dataset = Dataset::with('user:id,login')->findOrFail($id);
const dataset = await Dataset.query().where('id', id).where('editor_id', user.id).firstOrFail(); const dataset = await Dataset.findOrFail(id);
const validStates = ['editor_accepted', 'rejected_reviewer']; const validStates = ['editor_accepted', 'rejected_reviewer'];
if (!validStates.includes(dataset.server_state)) { if (!validStates.includes(dataset.server_state)) {
@ -223,7 +186,7 @@ export default class DatasetsController {
}); });
} }
public async approveUpdate({ request, response, auth }: HttpContext) { public async approveUpdate({ request, response }: HttpContext) {
const approveDatasetSchema = vine.object({ const approveDatasetSchema = vine.object({
reviewer_id: vine.number(), reviewer_id: vine.number(),
}); });
@ -236,11 +199,7 @@ export default class DatasetsController {
throw error; throw error;
} }
const id = request.param('id'); const id = request.param('id');
const user = auth.user; const dataset = await Dataset.findOrFail(id);
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const dataset = await Dataset.query().where('id', id).where('editor_id', user.id).firstOrFail();
const validStates = ['editor_accepted', 'rejected_reviewer']; const validStates = ['editor_accepted', 'rejected_reviewer'];
if (!validStates.includes(dataset.server_state)) { if (!validStates.includes(dataset.server_state)) {
@ -258,9 +217,6 @@ export default class DatasetsController {
if (dataset.reject_reviewer_note != null) { if (dataset.reject_reviewer_note != null) {
dataset.reject_reviewer_note = null; dataset.reject_reviewer_note = null;
} }
if (dataset.reject_editor_note != null) {
dataset.reject_editor_note = null;
}
//save main and additional titles //save main and additional titles
const reviewer_id = request.input('reviewer_id', null); const reviewer_id = request.input('reviewer_id', null);
@ -271,15 +227,10 @@ export default class DatasetsController {
} }
} }
public async reject({ request, inertia, response, auth }: HttpContext) { public async reject({ request, inertia, response }: HttpContext) {
const id = request.param('id'); const id = request.param('id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const dataset = await Dataset.query() const dataset = await Dataset.query()
.where('id', id) .where('id', id)
.where('editor_id', user.id) // Ensure the user is the editor of the dataset
// .preload('titles') // .preload('titles')
// .preload('descriptions') // .preload('descriptions')
.preload('user', (builder) => { .preload('user', (builder) => {
@ -304,17 +255,77 @@ export default class DatasetsController {
}); });
} }
// private async checkEmailDomain(email: string): Promise<boolean> {
// const domain = email.split('@')[1];
// try {
// // Step 1: Check MX records for the domain
// const mxRecords = await resolveMx(domain);
// if (mxRecords.length === 0) {
// return false; // No MX records, can't send email
// }
// // Sort MX records by priority
// mxRecords.sort((a, b) => a.priority - b.priority);
// // Step 2: Attempt SMTP connection to the first available mail server
// const smtpServer = mxRecords[0].exchange;
// return await this.checkMailboxExists(smtpServer, email);
// } catch (error) {
// console.error('Error during MX lookup or SMTP validation:', error);
// return false;
// }
// }
//// Helper function to check if the mailbox exists using SMTP
// private async checkMailboxExists(smtpServer: string, email: string): Promise<boolean> {
// return new Promise((resolve, reject) => {
// const socket = net.createConnection(25, smtpServer);
// socket.on('connect', () => {
// socket.write(`HELO ${smtpServer}\r\n`);
// socket.write(`MAIL FROM: <test@example.com>\r\n`);
// socket.write(`RCPT TO: <${email}>\r\n`);
// });
// socket.on('data', (data) => {
// const response = data.toString();
// if (response.includes('250')) {
// // 250 is an SMTP success code
// socket.end();
// resolve(true); // Email exists
// } else if (response.includes('550')) {
// // 550 means the mailbox doesn't exist
// socket.end();
// resolve(false); // Email doesn't exist
// }
// });
// socket.on('error', (error) => {
// console.error('SMTP connection error:', error);
// socket.end();
// resolve(false);
// });
// socket.on('end', () => {
// // SMTP connection closed
// });
// socket.setTimeout(5000, () => {
// // Timeout after 5 seconds
// socket.end();
// resolve(false); // Assume email doesn't exist if no response
// });
// });
// }
public async rejectUpdate({ request, response, auth }: HttpContext) { public async rejectUpdate({ request, response, auth }: HttpContext) {
const authUser = auth.user!; const authUser = auth.user!;
if (!authUser) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const id = request.param('id'); const id = request.param('id');
const dataset = await Dataset.query() const dataset = await Dataset.query()
.where('id', id) .where('id', id)
.where('editor_id', authUser.id) // Ensure the user is the editor of the dataset
.preload('user', (builder) => { .preload('user', (builder) => {
builder.select('id', 'login', 'email'); builder.select('id', 'login', 'email');
}) })
@ -342,7 +353,7 @@ export default class DatasetsController {
return response return response
.flash( .flash(
`Invalid server state. Dataset with id ${id} cannot be rejected. Datset has server state ${dataset.server_state}.`, `Invalid server state. Dataset with id ${id} cannot be rejected. Datset has server state ${dataset.server_state}.`,
'warning', 'warning'
) )
.redirect() .redirect()
.toRoute('editor.dataset.list'); .toRoute('editor.dataset.list');
@ -377,9 +388,7 @@ export default class DatasetsController {
emailStatusMessage = ` A rejection email was successfully sent to ${dataset.user.email}.`; emailStatusMessage = ` A rejection email was successfully sent to ${dataset.user.email}.`;
} catch (error) { } catch (error) {
logger.error(error); logger.error(error);
return response return response.flash('Dataset has not been rejected due to an email error: ' + error.message, 'error').toRoute('editor.dataset.list');
.flash('Dataset has not been rejected due to an email error: ' + error.message, 'error')
.toRoute('editor.dataset.list');
} }
} else { } else {
emailStatusMessage = ` However, the email could not be sent because the submitter's email address (${dataset.user.email}) is not valid.`; emailStatusMessage = ` However, the email could not be sent because the submitter's email address (${dataset.user.email}) is not valid.`;
@ -395,16 +404,11 @@ export default class DatasetsController {
.toRoute('editor.dataset.list'); .toRoute('editor.dataset.list');
} }
public async publish({ request, inertia, response, auth }: HttpContext) { public async publish({ request, inertia, response }: HttpContext) {
const id = request.param('id'); const id = request.param('id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const dataset = await Dataset.query() const dataset = await Dataset.query()
.where('id', id) .where('id', id)
.where('editor_id', user.id) // Ensure the user is the editor of the dataset
.preload('titles') .preload('titles')
.preload('authors') .preload('authors')
// .preload('persons', (builder) => { // .preload('persons', (builder) => {
@ -426,14 +430,10 @@ export default class DatasetsController {
return inertia.render('Editor/Dataset/Publish', { return inertia.render('Editor/Dataset/Publish', {
dataset, dataset,
can: {
reject: await auth.user?.can(['dataset-editor-reject']),
publish: await auth.user?.can(['dataset-publish']),
},
}); });
} }
public async publishUpdate({ request, response, auth }: HttpContext) { public async publishUpdate({ request, response }: HttpContext) {
const publishDatasetSchema = vine.object({ const publishDatasetSchema = vine.object({
publisher_name: vine.string().trim(), publisher_name: vine.string().trim(),
}); });
@ -445,12 +445,7 @@ export default class DatasetsController {
throw error; throw error;
} }
const id = request.param('id'); const id = request.param('id');
const user = auth.user; const dataset = await Dataset.findOrFail(id);
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const dataset = await Dataset.query().where('id', id).where('editor_id', user.id).firstOrFail();
// let test = await Dataset.getMax('publish_id'); // let test = await Dataset.getMax('publish_id');
// const maxPublishId = await Database.from('documents').max('publish_id as max_publish_id').first(); // const maxPublishId = await Database.from('documents').max('publish_id as max_publish_id').first();
@ -476,139 +471,10 @@ export default class DatasetsController {
} }
} }
public async rejectToReviewer({ request, inertia, response, auth }: HttpContext) { public async doiCreate({ request, inertia }: HttpContext) {
const id = request.param('id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const dataset = await Dataset.query()
.where('id', id)
.where('editor_id', user.id) // Ensure the user is the editor of the dataset
.preload('reviewer', (builder) => {
builder.select('id', 'login', 'email');
})
.firstOrFail();
const validStates = ['reviewed'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be rejected to the reviewer. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.toRoute('editor.dataset.list');
}
return inertia.render('Editor/Dataset/RejectToReviewer', {
dataset,
});
}
public async rejectToReviewerUpdate({ request, response, auth }: HttpContext) {
const authUser = auth.user!;
if (!authUser) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const id = request.param('id'); const id = request.param('id');
const dataset = await Dataset.query() const dataset = await Dataset.query()
.where('id', id) .where('id', id)
.where('editor_id', authUser.id) // Ensure the user is the editor of the dataset
.preload('reviewer', (builder) => {
builder.select('id', 'login', 'email');
})
.firstOrFail();
const newSchema = vine.object({
server_state: vine.string().trim(),
reject_editor_note: vine.string().trim().minLength(10).maxLength(500),
send_mail: vine.boolean().optional(),
});
try {
// await request.validate({ schema: newSchema });
const validator = vine.compile(newSchema);
await request.validateUsing(validator);
} catch (error) {
// return response.badRequest(error.messages);
throw error;
}
const validStates = ['reviewed'];
if (!validStates.includes(dataset.server_state)) {
// throw new Error('Invalid server state!');
// return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back();
return response
.flash(
`Invalid server state. Dataset with id ${id} cannot be rejected to reviewer. Datset has server state ${dataset.server_state}.`,
'warning',
)
.redirect()
.toRoute('editor.dataset.list');
}
dataset.server_state = 'rejected_to_reviewer';
const rejectEditorNote = request.input('reject_editor_note', '');
dataset.reject_editor_note = rejectEditorNote;
// add logic for sending reject message
const sendMail = request.input('send_email', false);
// const validRecipientEmail = await this.checkEmailDomain('arno.kaimbacher@outlook.at');
const validationResult = await validate({
email: dataset.reviewer.email,
validateSMTP: false,
});
const validRecipientEmail: boolean = validationResult.valid;
await dataset.save();
let emailStatusMessage = '';
if (sendMail == true) {
if (dataset.reviewer.email && validRecipientEmail) {
try {
await mail.send((message) => {
message.to(dataset.reviewer.email).subject('Dataset Rejection Notification').html(`
<p>Dear ${dataset.reviewer.login},</p>
<p>Your dataset with ID ${dataset.id} has been rejected.</p>
<p>Reason for rejection: ${rejectEditorNote}</p>
<p>Best regards,<br>Your Tethys editor: ${authUser.login}</p>
`);
});
emailStatusMessage = ` A rejection email was successfully sent to ${dataset.reviewer.email}.`;
} catch (error) {
logger.error(error);
return response
.flash('Dataset has not been rejected due to an email error: ' + error.message, 'error')
.toRoute('editor.dataset.list');
}
} else {
emailStatusMessage = ` However, the email could not be sent because the submitter's email address (${dataset.reviewer.email}) is not valid.`;
}
}
return response
.flash(
`You have successfully rejected dataset ${dataset.id} reviewed by ${dataset.reviewer.login}.${emailStatusMessage}`,
'message',
)
.toRoute('editor.dataset.list');
}
public async doiCreate({ request, inertia, auth, response }: HttpContext) {
const id = request.param('id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const dataset = await Dataset.query()
.where('id', id)
.where('editor_id', user.id) // Ensure the user is the editor of the dataset
.preload('titles') .preload('titles')
.preload('descriptions') .preload('descriptions')
// .preload('identifier') // .preload('identifier')
@ -619,494 +485,61 @@ export default class DatasetsController {
}); });
} }
public async doiStore({ request, response, auth }: HttpContext) { public async doiStore({ request, response }: HttpContext) {
const dataId = request.param('publish_id'); const dataId = request.param('publish_id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
// Load dataset with minimal required relationships
const dataset = await Dataset.query() const dataset = await Dataset.query()
.where('editor_id', user.id) // Ensure the user is the editor of the dataset // .preload('xmlCache')
.where('publish_id', dataId) .where('publish_id', dataId)
.firstOrFail(); .firstOrFail();
const prefix = process.env.DATACITE_PREFIX || '';
const base_domain = process.env.BASE_DOMAIN || '';
// Generate DOI metadata XML
const xmlMeta = (await Index.getDoiRegisterString(dataset)) as string; const xmlMeta = (await Index.getDoiRegisterString(dataset)) as string;
// Prepare DOI registration data let prefix = '';
const doiValue = `${prefix}/tethys.${dataset.publish_id}`; //'10.21388/tethys.213' let base_domain = '';
const landingPageUrl = `https://doi.${getDomain(base_domain)}/${prefix}/tethys.${dataset.publish_id}`; //https://doi.dev.tethys.at/10.21388/tethys.213 // const datacite_environment = process.env.DATACITE_ENVIRONMENT || 'debug';
prefix = process.env.DATACITE_PREFIX || '';
base_domain = process.env.BASE_DOMAIN || '';
// Register DOI with DataCite // register DOI:
const doiValue = prefix + '/tethys.' + dataset.publish_id; //'10.21388/tethys.213'
const landingPageUrl = 'https://doi.' + getDomain(base_domain) + '/' + prefix + '/tethys.' + dataset.publish_id; //https://doi.dev.tethys.at/10.21388/tethys.213
const doiClient = new DoiClient(); const doiClient = new DoiClient();
const dataciteResponse = await doiClient.registerDoi(doiValue, xmlMeta, landingPageUrl); const dataciteResponse = await doiClient.registerDoi(doiValue, xmlMeta, landingPageUrl);
if (dataciteResponse?.status !== 201) { if (dataciteResponse?.status === 201) {
// if response OK 201; save the Identifier value into db
const doiIdentifier = new DatasetIdentifier();
doiIdentifier.value = doiValue;
doiIdentifier.dataset_id = dataset.id;
doiIdentifier.type = 'doi';
doiIdentifier.status = 'findable';
// save modified date of datset for re-caching model in db an update the search index
dataset.server_date_modified = DateTime.now();
// save updated dataset to db an index to OpenSearch
try {
await dataset.related('identifier').save(doiIdentifier);
const index_name = 'tethys-records';
await Index.indexDocument(dataset, index_name);
} catch (error) {
logger.error(`${__filename}: Indexing document ${dataset.id} failed: ${error.message}`);
// Log the error or handle it as needed
throw new HttpException(error.message);
}
return response.toRoute('editor.dataset.list').flash('message', 'You have successfully created a DOI for the dataset!');
} else {
const message = `Unexpected DataCite MDS response code ${dataciteResponse?.status}`; const message = `Unexpected DataCite MDS response code ${dataciteResponse?.status}`;
// Log the error or handle it as needed
throw new DoiClientException(dataciteResponse?.status, message); throw new DoiClientException(dataciteResponse?.status, message);
} }
// DOI registration successful - persist and index
try {
// Save identifier
await this.persistDoiAndIndex(dataset, doiValue);
return response.toRoute('editor.dataset.list').flash('message', 'You have successfully created a DOI for the dataset!');
} catch (error) {
logger.error(`${__filename}: Failed to persist DOI and index dataset ${dataset.id}: ${error.message}`);
throw new HttpException(error.message);
}
// return response.toRoute('editor.dataset.list').flash('message', xmlMeta); // return response.toRoute('editor.dataset.list').flash('message', xmlMeta);
} }
/**
* Persist DOI identifier and update search index
* Handles cache invalidation to ensure fresh indexing
*/
private async persistDoiAndIndex(dataset: Dataset, doiValue: string): Promise<void> {
// Create DOI identifier
const doiIdentifier = new DatasetIdentifier();
doiIdentifier.value = doiValue;
doiIdentifier.dataset_id = dataset.id;
doiIdentifier.type = 'doi';
doiIdentifier.status = 'findable';
// Save identifier (this will trigger database insert)
await dataset.related('identifier').save(doiIdentifier);
// Update dataset modification timestamp to reflect the change
dataset.server_date_modified = DateTime.now();
await dataset.save();
// Invalidate stale XML cache
await this.invalidateDatasetCache(dataset);
// Reload dataset with fresh state for indexing
const freshDataset = await Dataset.query().where('id', dataset.id).preload('identifier').preload('xmlCache').firstOrFail();
// Index to OpenSearch with fresh data
const index_name = process.env.OPENSEARCH_INDEX || 'tethys-records';
await Index.indexDocument(freshDataset, index_name);
logger.info(`Successfully created DOI ${doiValue} and indexed dataset ${dataset.id}`);
}
/**
* Invalidate XML cache for dataset
* Ensures fresh cache generation on next access
*/
private async invalidateDatasetCache(dataset: Dataset): Promise<void> {
await dataset.load('xmlCache');
if (dataset.xmlCache) {
await dataset.xmlCache.delete();
logger.debug(`Invalidated XML cache for dataset ${dataset.id}`);
}
}
public async show({}: HttpContext) {} public async show({}: HttpContext) {}
public async edit({ request, inertia, response, auth }: HttpContext) { public async edit({}: HttpContext) {}
const id = request.param('id');
// Check if user is authenticated
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
// Prefilter by both id AND editor_id to ensure user has permission to edit
const datasetQuery = Dataset.query().where('id', id).where('editor_id', user.id);
datasetQuery
.preload('titles', (query) => query.orderBy('id', 'asc'))
.preload('descriptions', (query) => query.orderBy('id', 'asc'))
.preload('coverage')
.preload('licenses')
.preload('authors', (query) => query.orderBy('pivot_sort_order', 'asc'))
.preload('contributors', (query) => query.orderBy('pivot_sort_order', 'asc'))
// .preload('subjects')
.preload('subjects', (builder) => {
builder.orderBy('id', 'asc').withCount('datasets');
})
.preload('references')
.preload('files', (query) => {
query.orderBy('sort_order', 'asc'); // Sort by sort_order column
});
// This will throw 404 if editor_id does not match logged in user
const dataset = await datasetQuery.firstOrFail();
const validStates = ['editor_accepted', 'rejected_reviewer'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
'warning',
)
.toRoute('editor.dataset.list');
}
const titleTypes = Object.entries(TitleTypes)
.filter(([value]) => value !== 'Main')
.map(([key, value]) => ({ value: key, label: value }));
const descriptionTypes = Object.entries(DescriptionTypes)
.filter(([value]) => value !== 'Abstract')
.map(([key, value]) => ({ value: key, label: value }));
const languages = await Language.query().where('active', true).pluck('part1', 'part1');
// const contributorTypes = Config.get('enums.contributor_types');
const contributorTypes = Object.entries(ContributorTypes).map(([key, value]) => ({ value: key, label: value }));
// const nameTypes = Config.get('enums.name_types');
const nameTypes = Object.entries(PersonNameTypes).map(([key, value]) => ({ value: key, label: value }));
// const messages = await Database.table('messages')
// .pluck('help_text', 'metadata_element');
const projects = await Project.query().pluck('label', 'id');
const currentDate = new Date();
const currentYear = currentDate.getFullYear();
const years = Array.from({ length: currentYear - 1990 + 1 }, (_, index) => 1990 + index);
const licenses = await License.query().select('id', 'name_long').where('active', 'true').pluck('name_long', 'id');
// const userHasRoles = user.roles;
// const datasetHasLicenses = await dataset.related('licenses').query().pluck('id');
// const checkeds = dataset.licenses.first().id;
// const doctypes = {
// analysisdata: { label: 'Analysis', value: 'analysisdata' },
// measurementdata: { label: 'Measurements', value: 'measurementdata' },
// monitoring: 'Monitoring',
// remotesensing: 'Remote Sensing',
// gis: 'GIS',
// models: 'Models',
// mixedtype: 'Mixed Type',
// };
return inertia.render('Editor/Dataset/Edit', {
dataset,
titletypes: titleTypes,
descriptiontypes: descriptionTypes,
contributorTypes,
nameTypes,
languages,
// messages,
projects,
licenses,
// datasetHasLicenses: Object.keys(datasetHasLicenses).map((key) => datasetHasLicenses[key]), //convert object to array with license ids
// checkeds,
years,
// languages,
subjectTypes: SubjectTypes,
referenceIdentifierTypes: Object.entries(ReferenceIdentifierTypes).map(([key, value]) => ({ value: key, label: value })),
relationTypes: Object.entries(RelationTypes).map(([key, value]) => ({ value: key, label: value })),
doctypes: DatasetTypes,
});
}
public async update({ request, response, session, auth }: HttpContext) {
// Get the dataset id from the route parameter
const datasetId = request.param('id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
// Retrieve the dataset and load its existing files
const dataset = await Dataset.query().where('id', datasetId).where('editor_id', user.id).firstOrFail();
await dataset.load('files');
let trx: TransactionClientContract | null = null;
try {
await request.validateUsing(updateEditorDatasetValidator);
trx = await db.transaction();
// const user = (await User.find(auth.user?.id)) as User;
// await this.createDatasetAndAssociations(user, request, trx);
// const dataset = await Dataset.findOrFail(datasetId);
// save the licenses
const licenses: number[] = request.input('licenses', []);
// await dataset.useTransaction(trx).related('licenses').sync(licenses);
await dataset.useTransaction(trx).related('licenses').sync(licenses);
// save authors and contributors
await dataset.useTransaction(trx).related('authors').sync([]);
await dataset.useTransaction(trx).related('contributors').sync([]);
await savePersons(dataset, request.input('authors', []), 'author', trx);
await savePersons(dataset, request.input('contributors', []), 'contributor', trx);
//save the titles:
const titles = request.input('titles', []);
// const savedTitles:Array<Title> = [];
for (const titleData of titles) {
if (titleData.id) {
const title = await Title.findOrFail(titleData.id);
title.value = titleData.value;
title.language = titleData.language;
title.type = titleData.type;
if (title.$isDirty) {
await title.useTransaction(trx).save();
// await dataset.useTransaction(trx).related('titles').save(title);
// savedTitles.push(title);
}
} else {
const title = new Title();
title.fill(titleData);
// savedTitles.push(title);
await dataset.useTransaction(trx).related('titles').save(title);
}
}
// save the abstracts
const descriptions = request.input('descriptions', []);
// const savedTitles:Array<Title> = [];
for (const descriptionData of descriptions) {
if (descriptionData.id) {
const description = await Description.findOrFail(descriptionData.id);
description.value = descriptionData.value;
description.language = descriptionData.language;
description.type = descriptionData.type;
if (description.$isDirty) {
await description.useTransaction(trx).save();
// await dataset.useTransaction(trx).related('titles').save(title);
// savedTitles.push(title);
}
} else {
const description = new Description();
description.fill(descriptionData);
// savedTitles.push(title);
await dataset.useTransaction(trx).related('descriptions').save(description);
}
}
// Process all subjects/keywords from the request
const subjects = request.input('subjects');
for (const subjectData of subjects) {
// Case 1: Subject already exists in the database (has an ID)
if (subjectData.id) {
// Retrieve the existing subject
const existingSubject = await Subject.findOrFail(subjectData.id);
// Update subject properties from the request data
existingSubject.value = subjectData.value;
existingSubject.type = subjectData.type;
existingSubject.external_key = subjectData.external_key;
// Only save if there are actual changes
if (existingSubject.$isDirty) {
await existingSubject.save();
}
// Note: The relationship between dataset and subject is already established,
// so we don't need to attach it again
}
// Case 2: New subject being added (no ID)
else {
// Check if a subject with the same value and type already exists in the database
const subject = await Subject.firstOrNew({ value: subjectData.value, type: subjectData.type }, subjectData);
if (subject.$isNew === true) {
// If it's a completely new subject, create and associate it with the dataset
await dataset.useTransaction(trx).related('subjects').save(subject);
} else {
// If the subject already exists, just create the relationship
await dataset.useTransaction(trx).related('subjects').attach([subject.id]);
}
}
}
const subjectsToDelete = request.input('subjectsToDelete', []);
for (const subjectData of subjectsToDelete) {
if (subjectData.id) {
// const subject = await Subject.findOrFail(subjectData.id);
const subject = await Subject.query()
.where('id', subjectData.id)
.preload('datasets', (builder) => {
builder.orderBy('id', 'asc');
})
.withCount('datasets')
.firstOrFail();
// Check if the subject is used by multiple datasets
if (subject.$extras.datasets_count > 1) {
// If used by multiple datasets, just detach it from the current dataset
await dataset.useTransaction(trx).related('subjects').detach([subject.id]);
} else {
// If only used by this dataset, delete the subject completely
await dataset.useTransaction(trx).related('subjects').detach([subject.id]);
await subject.useTransaction(trx).delete();
}
}
}
// Process references
const references = request.input('references', []);
// First, get existing references to determine which ones to update vs. create
const existingReferences = await dataset.related('references').query();
const existingReferencesMap: Map<number, DatasetReference> = new Map(existingReferences.map((ref) => [ref.id, ref]));
for (const referenceData of references) {
if (existingReferencesMap.has(referenceData.id) && referenceData.id) {
// Update existing reference
const reference = existingReferencesMap.get(referenceData.id);
if (reference) {
reference.merge(referenceData);
if (reference.$isDirty) {
await reference.useTransaction(trx).save();
}
}
} else {
// Create new reference
const dataReference = new DatasetReference();
dataReference.fill(referenceData);
await dataset.useTransaction(trx).related('references').save(dataReference);
}
}
// Handle references to delete if provided
const referencesToDelete = request.input('referencesToDelete', []);
for (const referenceData of referencesToDelete) {
if (referenceData.id) {
const reference = await DatasetReference.findOrFail(referenceData.id);
await reference.useTransaction(trx).delete();
}
}
// save coverage
const coverageData = request.input('coverage');
if (coverageData) {
if (coverageData.id) {
const coverage = await Coverage.findOrFail(coverageData.id);
coverage.merge(coverageData);
if (coverage.$isDirty) {
await coverage.useTransaction(trx).save();
}
}
}
const input = request.only(['project_id', 'embargo_date', 'language', 'type', 'creating_corporation']);
// dataset.type = request.input('type');
dataset.merge(input);
dataset.server_date_modified = DateTime.now();
// let test: boolean = dataset.$isDirty;
await dataset.useTransaction(trx).save();
await trx.commit();
// console.log('Dataset has been updated successfully');
session.flash('message', 'Dataset has been updated successfully');
// return response.redirect().toRoute('user.index');
return response.redirect().toRoute('editor.dataset.edit', [dataset.id]);
} catch (error) {
if (trx !== null) {
await trx.rollback();
}
console.error('Failed to update dataset and related models:', error);
// throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` });
throw error;
}
}
public async categorize({ inertia, request, response, auth }: HttpContext) {
const id = request.param('id');
// Check if user is authenticated
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
// Preload dataset and its "collections" relation
const dataset = await Dataset.query().where('id', id).where('editor_id', user.id).preload('collections').firstOrFail();
const validStates = ['editor_accepted', 'rejected_reviewer'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.toRoute('editor.dataset.list');
}
const collectionRoles = await CollectionRole.query()
.whereIn('name', ['ddc', 'ccs'])
.preload('collections', (coll: Collection) => {
// preloa only top level collection with noparent_id
coll.whereNull('parent_id').orderBy('number', 'asc');
})
.exec();
return inertia.render('Editor/Dataset/Category', {
collectionRoles: collectionRoles,
dataset: dataset,
relatedCollections: dataset.collections,
});
}
public async categorizeUpdate({ request, response, session, auth }: HttpContext) {
// Get the dataset id from the route parameter
const id = request.param('id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
// Retrieve the dataset and load its existing files
const dataset = await Dataset.query().preload('files').where('id', id).where('editor_id', user.id).firstOrFail();
const validStates = ['editor_accepted', 'rejected_reviewer'];
if (!validStates.includes(dataset.server_state)) {
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be categorized. Dataset has server state ${dataset.server_state}.`,
)
.redirect()
.toRoute('editor.dataset.list');
}
let trx: TransactionClientContract | null = null;
try {
trx = await db.transaction();
// const user = (await User.find(auth.user?.id)) as User;
// await this.createDatasetAndAssociations(user, request, trx);
// Retrieve the selected collections from the request.
// This should be an array of collection ids.
const collections: number[] = request.input('collections', []);
// Synchronize the dataset collections using the transaction.
await dataset.useTransaction(trx).related('collections').sync(collections);
// Commit the transaction.await trx.commit()
await trx.commit();
// Redirect with a success flash message.
// return response.flash('success', 'Dataset collections updated successfully!').redirect().toRoute('dataset.list');
session.flash('message', 'Dataset collections updated successfully!');
return response.redirect().toRoute('editor.dataset.list');
} catch (error) {
if (trx !== null) {
await trx.rollback();
}
console.error('Failed tocatgorize dataset collections:', error);
// throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` });
throw error;
}
}
// public async update({}: HttpContextContract) {} // public async update({}: HttpContextContract) {}
public async updateOpensearch({ response }: HttpContext) { public async update({ response }: HttpContext) {
const id = 273; //request.param('id'); const id = 273; //request.param('id');
const dataset = await Dataset.query().preload('xmlCache').where('id', id).firstOrFail(); const dataset = await Dataset.query().preload('xmlCache').where('id', id).firstOrFail();
// add xml elements // add xml elements
@ -1222,30 +655,6 @@ export default class DatasetsController {
} }
} }
public async download({ params, response }: HttpContext) {
const id = params.id;
// Find the file by ID
const file = await File.findOrFail(id);
// const filePath = await drive.use('local').getUrl('/'+ file.filePath)
const filePath = file.filePath;
const fileExt = file.filePath.split('.').pop() || '';
// Check if label already includes the extension
const fileName = file.label.toLowerCase().endsWith(`.${fileExt.toLowerCase()}`) ? file.label : `${file.label}.${fileExt}`;
// Set the response headers and download the file
response
.header('Cache-Control', 'no-cache private')
.header('Content-Description', 'File Transfer')
.header('Content-Type', file.mime_type || 'application/octet-stream')
// .header('Content-Disposition', 'inline; filename=' + fileName)
.header('Content-Transfer-Encoding', 'binary')
.header('Access-Control-Allow-Origin', '*')
.header('Access-Control-Allow-Methods', 'GET');
response.attachment(fileName);
return response.download(filePath);
}
public async destroy({}: HttpContext) {} public async destroy({}: HttpContext) {}
private async createXmlRecord(dataset: Dataset, datasetNode: XMLBuilder) { private async createXmlRecord(dataset: Dataset, datasetNode: XMLBuilder) {
@ -1255,18 +664,19 @@ export default class DatasetsController {
} }
} }
private async getDatasetXmlDomNode(dataset: Dataset): Promise<XMLBuilder | null> { private async getDatasetXmlDomNode(dataset: Dataset) {
const serializer = new DatasetXmlSerializer(dataset).enableCaching().excludeEmptyFields(); const xmlModel = new XmlModel(dataset);
// xmlModel.setModel(dataset); // xmlModel.setModel(dataset);
xmlModel.excludeEmptyFields();
// Load existing cache if available xmlModel.caching = true;
await dataset.load('xmlCache'); // const cache = dataset.xmlCache ? dataset.xmlCache : null;
// dataset.load('xmlCache');
if (dataset.xmlCache) { if (dataset.xmlCache) {
serializer.setCache(dataset.xmlCache); xmlModel.xmlCache = dataset.xmlCache;
} }
// return cache.getDomDocument(); // return cache.getDomDocument();
const xmlDocument: XMLBuilder | null = await serializer.toXmlDocument(); const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
return xmlDocument; return domDocument;
} }
} }

View file

@ -15,17 +15,18 @@ import { OaiModelException, BadOaiModelException } from '#app/exceptions/OaiMode
import Dataset from '#models/dataset'; import Dataset from '#models/dataset';
import Collection from '#models/collection'; import Collection from '#models/collection';
import { getDomain, preg_match } from '#app/utils/utility-functions'; import { getDomain, preg_match } from '#app/utils/utility-functions';
import DatasetXmlSerializer from '#app/Library/DatasetXmlSerializer'; import XmlModel from '#app/Library/XmlModel';
import logger from '@adonisjs/core/services/logger'; import logger from '@adonisjs/core/services/logger';
import ResumptionToken from '#app/Library/Oai/ResumptionToken'; import ResumptionToken from '#app/Library/Oai/ResumptionToken';
// import Config from '@ioc:Adonis/Core/Config'; // import Config from '@ioc:Adonis/Core/Config';
import config from '@adonisjs/core/services/config'; import config from '@adonisjs/core/services/config'
// import { inject } from '@adonisjs/fold'; // import { inject } from '@adonisjs/fold';
import { inject } from '@adonisjs/core'; import { inject } from '@adonisjs/core'
// import { TokenWorkerContract } from "MyApp/Models/TokenWorker"; // import { TokenWorkerContract } from "MyApp/Models/TokenWorker";
import TokenWorkerContract from '#library/Oai/TokenWorkerContract'; import TokenWorkerContract from '#library/Oai/TokenWorkerContract';
import { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model'; import { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model';
interface XslTParameter { interface XslTParameter {
[key: string]: any; [key: string]: any;
} }
@ -34,14 +35,12 @@ interface Dictionary {
[index: string]: string; [index: string]: string;
} }
interface PagingParameter { interface ListParameter {
cursor: number; cursor: number;
totalLength: number; totalIds: number;
start: number; start: number;
nextDocIds: number[]; reldocIds: (number | null)[];
activeWorkIds: number[];
metadataPrefix: string; metadataPrefix: string;
queryParams: Object;
} }
@inject() @inject()
@ -50,7 +49,6 @@ export default class OaiController {
private sampleRegEx = /^[A-Za-zäüÄÜß0-9\-_.!~]+$/; private sampleRegEx = /^[A-Za-zäüÄÜß0-9\-_.!~]+$/;
private xsltParameter: XslTParameter; private xsltParameter: XslTParameter;
private firstPublishedDataset: Dataset | null;
/** /**
* Holds xml representation of document information to be processed. * Holds xml representation of document information to be processed.
* *
@ -59,6 +57,7 @@ export default class OaiController {
private xml: XMLBuilder; private xml: XMLBuilder;
private proc; private proc;
constructor(public tokenWorker: TokenWorkerContract) { constructor(public tokenWorker: TokenWorkerContract) {
// Load the XSLT file // Load the XSLT file
this.proc = readFileSync('public/assets2/datasetxml2oai.sef.json'); this.proc = readFileSync('public/assets2/datasetxml2oai.sef.json');
@ -86,9 +85,9 @@ export default class OaiController {
let earliestDateFromDb; let earliestDateFromDb;
// const oaiRequest: OaiParameter = request.body; // const oaiRequest: OaiParameter = request.body;
try { try {
this.firstPublishedDataset = await Dataset.earliestPublicationDate(); const firstPublishedDataset: Dataset | null = await Dataset.earliestPublicationDate();
this.firstPublishedDataset != null && firstPublishedDataset != null &&
(earliestDateFromDb = this.firstPublishedDataset.server_date_published.toFormat("yyyy-MM-dd'T'HH:mm:ss'Z'")); (earliestDateFromDb = firstPublishedDataset.server_date_published.toFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"));
this.xsltParameter['earliestDatestamp'] = earliestDateFromDb; this.xsltParameter['earliestDatestamp'] = earliestDateFromDb;
// start the request // start the request
await this.handleRequest(oaiRequest, request); await this.handleRequest(oaiRequest, request);
@ -163,19 +162,22 @@ export default class OaiController {
} else if (verb == 'GetRecord') { } else if (verb == 'GetRecord') {
await this.handleGetRecord(oaiRequest); await this.handleGetRecord(oaiRequest);
} else if (verb == 'ListRecords') { } else if (verb == 'ListRecords') {
// Get browser fingerprint from the request: await this.handleListRecords(oaiRequest);
const browserFingerprint = this.getBrowserFingerprint(request);
await this.handleListRecords(oaiRequest, browserFingerprint);
} else if (verb == 'ListIdentifiers') { } else if (verb == 'ListIdentifiers') {
// Get browser fingerprint from the request: await this.handleListIdentifiers(oaiRequest);
const browserFingerprint = this.getBrowserFingerprint(request);
await this.handleListIdentifiers(oaiRequest, browserFingerprint);
} else if (verb == 'ListSets') { } else if (verb == 'ListSets') {
await this.handleListSets(); await this.handleListSets();
} else { } else {
this.handleIllegalVerb(); this.handleIllegalVerb();
} }
} else { } else {
// // try {
// // console.log("Async code example.")
// const err = new PageNotFoundException("verb not found");
// throw err;
// // } catch (error) { // manually catching
// // next(error); // passing to default middleware error handler
// // }
throw new OaiModelException( throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR, StatusCodes.INTERNAL_SERVER_ERROR,
'The verb provided in the request is illegal.', 'The verb provided in the request is illegal.',
@ -185,11 +187,11 @@ export default class OaiController {
} }
protected handleIdentify() { protected handleIdentify() {
// Get configuration values from environment or a dedicated configuration service const email = process.env.OAI_EMAIL || 'repository@geosphere.at';
const email = process.env.OAI_EMAIL ?? 'repository@geosphere.at'; const repositoryName = 'Tethys RDR';
const repositoryName = process.env.OAI_REPOSITORY_NAME ?? 'Tethys RDR'; const repIdentifier = 'tethys.at';
const repIdentifier = process.env.OAI_REP_IDENTIFIER ?? 'tethys.at'; const sampleIdentifier = 'oai:' + repIdentifier + ':1'; //$this->_configuration->getSampleIdentifier();
const sampleIdentifier = `oai:${repIdentifier}:1`;
// Dataset::earliestPublicationDate()->server_date_published->format('Y-m-d\TH:i:s\Z') : null; // Dataset::earliestPublicationDate()->server_date_published->format('Y-m-d\TH:i:s\Z') : null;
// earliestDateFromDb!= null && (this.xsltParameter['earliestDatestamp'] = earliestDateFromDb?.server_date_published); // earliestDateFromDb!= null && (this.xsltParameter['earliestDatestamp'] = earliestDateFromDb?.server_date_published);
@ -214,7 +216,7 @@ export default class OaiController {
const sets: { [key: string]: string } = { const sets: { [key: string]: string } = {
'open_access': 'Set for open access licenses', 'open_access': 'Set for open access licenses',
'openaire_data': 'OpenAIRE', 'openaire_data': "OpenAIRE",
'doc-type:ResearchData': 'Set for document type ResearchData', 'doc-type:ResearchData': 'Set for document type ResearchData',
...(await this.getSetsForDatasetTypes()), ...(await this.getSetsForDatasetTypes()),
...(await this.getSetsForCollections()), ...(await this.getSetsForCollections()),
@ -232,15 +234,7 @@ export default class OaiController {
const repIdentifier = 'tethys.at'; const repIdentifier = 'tethys.at';
this.xsltParameter['repIdentifier'] = repIdentifier; this.xsltParameter['repIdentifier'] = repIdentifier;
// Validate that required parameter exists early
if (!('identifier' in oaiRequest)) {
throw new BadOaiModelException('The prefix of the identifier argument is unknown.');
}
// Validate and extract the dataset identifier from the request
const dataId = this.validateAndGetIdentifier(oaiRequest); const dataId = this.validateAndGetIdentifier(oaiRequest);
// Retrieve dataset with associated XML cache and collection roles
const dataset = await Dataset.query() const dataset = await Dataset.query()
.where('publish_id', dataId) .where('publish_id', dataId)
.preload('xmlCache') .preload('xmlCache')
@ -257,61 +251,59 @@ export default class OaiController {
); );
} }
// Validate and set the metadata prefix parameter
const metadataPrefix = this.validateAndGetMetadataPrefix(oaiRequest); const metadataPrefix = this.validateAndGetMetadataPrefix(oaiRequest);
this.xsltParameter['oai_metadataPrefix'] = metadataPrefix; this.xsltParameter['oai_metadataPrefix'] = metadataPrefix;
// do not deliver datasets which are restricted by document state defined in deliveringStates
// Ensure that the dataset is in an exportable state
this.validateDatasetState(dataset); this.validateDatasetState(dataset);
// Build the XML for the dataset record and add it to the root node // add xml elements
const datasetNode = this.xml.root().ele('Datasets'); const datasetNode = this.xml.root().ele('Datasets');
await this.createXmlRecord(dataset, datasetNode); await this.createXmlRecord(dataset, datasetNode);
} }
protected async handleListIdentifiers(oaiRequest: Dictionary, browserFingerprint: string) { protected async handleListIdentifiers(oaiRequest: Dictionary) {
if (!this.tokenWorker.isConnected) { !this.tokenWorker.isConnected && (await this.tokenWorker.connect());
await this.tokenWorker.connect();
}
const maxIdentifier: number = config.get('oai.max.listidentifiers', 100); const maxIdentifier: number = config.get('oai.max.listidentifiers', 100);
await this.handleLists(oaiRequest, maxIdentifier, browserFingerprint); await this.handleLists(oaiRequest, maxIdentifier);
} }
protected async handleListRecords(oaiRequest: Dictionary, browserFingerprint: string) { protected async handleListRecords(oaiRequest: Dictionary) {
if (!this.tokenWorker.isConnected) { !this.tokenWorker.isConnected && (await this.tokenWorker.connect());
await this.tokenWorker.connect();
}
const maxRecords: number = config.get('oai.max.listrecords', 100); const maxRecords: number = config.get('oai.max.listrecords', 100);
await this.handleLists(oaiRequest, maxRecords, browserFingerprint); await this.handleLists(oaiRequest, maxRecords);
} }
private async handleLists(oaiRequest: Dictionary, maxRecords: number, browserFingerprint: string) { private async handleLists(oaiRequest: Dictionary, maxRecords: number) {
maxRecords = maxRecords || 100;
const repIdentifier = 'tethys.at'; const repIdentifier = 'tethys.at';
this.xsltParameter['repIdentifier'] = repIdentifier; this.xsltParameter['repIdentifier'] = repIdentifier;
const datasetNode = this.xml.root().ele('Datasets'); const datasetNode = this.xml.root().ele('Datasets');
const paginationParams: PagingParameter = { // list initialisation
const numWrapper: ListParameter = {
cursor: 0, cursor: 0,
totalLength: 0, totalIds: 0,
start: maxRecords + 1, start: maxRecords + 1,
nextDocIds: [], reldocIds: [],
activeWorkIds: [],
metadataPrefix: '', metadataPrefix: '',
queryParams: {},
}; };
// resumptionToken is defined
if ('resumptionToken' in oaiRequest) { if ('resumptionToken' in oaiRequest) {
await this.handleResumptionToken(oaiRequest, maxRecords, paginationParams); await this.handleResumptionToken(oaiRequest, maxRecords, numWrapper);
} else { } else {
await this.handleNoResumptionToken(oaiRequest, paginationParams, maxRecords); // no resumptionToken is given
await this.handleNoResumptionToken(oaiRequest, numWrapper);
} }
const nextIds: number[] = paginationParams.nextDocIds; // handling of document ids
const workIds: number[] = paginationParams.activeWorkIds; const restIds = numWrapper.reldocIds as number[];
const workIds = restIds.splice(0, maxRecords) as number[]; // array_splice(restIds, 0, maxRecords);
if (workIds.length === 0) { // no records returned
if (workIds.length == 0) {
throw new OaiModelException( throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR, StatusCodes.INTERNAL_SERVER_ERROR,
'The combination of the given values results in an empty list.', 'The combination of the given values results in an empty list.',
@ -319,222 +311,169 @@ export default class OaiController {
); );
} }
const datasets = await Dataset.query() const datasets: Dataset[] = await Dataset.query()
.whereIn('publish_id', workIds) .whereIn('publish_id', workIds)
.preload('xmlCache') .preload('xmlCache')
.preload('collections', (builder) => { .preload('collections', (builder) => {
builder.preload('collectionRole'); builder.preload('collectionRole');
}) })
.orderBy('publish_id'); .orderBy('publish_id');
for (const dataset of datasets) { for (const dataset of datasets) {
await this.createXmlRecord(dataset, datasetNode); await this.createXmlRecord(dataset, datasetNode);
} }
await this.setResumptionToken(nextIds, paginationParams, browserFingerprint);
// store the further Ids in a resumption-file
const countRestIds = restIds.length; //84
if (countRestIds > 0) {
const token = new ResumptionToken();
token.startPosition = numWrapper.start; //101
token.totalIds = numWrapper.totalIds; //184
token.documentIds = restIds; //101 -184
token.metadataPrefix = numWrapper.metadataPrefix;
// $tokenWorker->storeResumptionToken($token);
const res: string = await this.tokenWorker.set(token);
// set parameters for the resumptionToken-node
// const res = token.ResumptionId;
this.setParamResumption(res, numWrapper.cursor, numWrapper.totalIds);
}
} }
private async handleNoResumptionToken(oaiRequest: Dictionary, paginationParams: PagingParameter, maxRecords: number) { private async handleResumptionToken(oaiRequest: Dictionary, maxRecords: number, numWrapper: ListParameter) {
this.validateMetadataPrefix(oaiRequest, paginationParams); const resParam = oaiRequest['resumptionToken']; //e.g. "158886496600000"
const finder: ModelQueryBuilderContract<typeof Dataset, Dataset> = Dataset.query().whereIn(
'server_state',
this.deliveringDocumentStates,
);
this.applySetFilter(finder, oaiRequest);
this.applyDateFilters(finder, oaiRequest);
await this.fetchAndSetResults(finder, paginationParams, oaiRequest, maxRecords);
}
private async fetchAndSetResults(
finder: ModelQueryBuilderContract<typeof Dataset, Dataset>,
paginationParams: PagingParameter,
oaiRequest: Dictionary,
maxRecords: number,
) {
const totalResult = await finder
.clone()
.count('* as total')
.first()
.then((res) => res?.$extras.total);
paginationParams.totalLength = Number(totalResult);
const combinedRecords: Dataset[] = await finder
.select('publish_id')
.orderBy('publish_id')
.offset(0)
.limit(maxRecords * 2);
paginationParams.activeWorkIds = combinedRecords.slice(0, 100).map((dat) => Number(dat.publish_id));
paginationParams.nextDocIds = combinedRecords.slice(100).map((dat) => Number(dat.publish_id));
// No resumption token was used set queryParams from the current oaiRequest
paginationParams.queryParams = {
...oaiRequest,
deliveringStates: this.deliveringDocumentStates,
};
// paginationParams.totalLength = 230;
}
private async handleResumptionToken(oaiRequest: Dictionary, maxRecords: number, paginationParams: PagingParameter) {
const resParam = oaiRequest['resumptionToken'];
const token = await this.tokenWorker.get(resParam); const token = await this.tokenWorker.get(resParam);
if (!token) { if (!token) {
throw new OaiModelException(StatusCodes.INTERNAL_SERVER_ERROR, 'cache is outdated.', OaiErrorCodes.BADRESUMPTIONTOKEN); throw new OaiModelException(StatusCodes.INTERNAL_SERVER_ERROR, 'cache is outdated.', OaiErrorCodes.BADRESUMPTIONTOKEN);
} }
// this.setResumptionParameters(token, maxRecords, paginationParams); numWrapper.cursor = token.startPosition - 1; //startet dann bei Index 10
paginationParams.cursor = token.startPosition - 1; numWrapper.start = token.startPosition + maxRecords;
paginationParams.start = token.startPosition + maxRecords; numWrapper.totalIds = token.totalIds;
paginationParams.totalLength = token.totalIds; numWrapper.reldocIds = token.documentIds;
paginationParams.activeWorkIds = token.documentIds; numWrapper.metadataPrefix = token.metadataPrefix;
paginationParams.metadataPrefix = token.metadataPrefix;
paginationParams.queryParams = token.queryParams;
this.xsltParameter['oai_metadataPrefix'] = token.metadataPrefix;
const finder = this.buildDatasetQueryViaToken(token); this.xsltParameter['oai_metadataPrefix'] = numWrapper.metadataPrefix;
const nextRecords: Dataset[] = await this.fetchNextRecords(finder, token, maxRecords);
paginationParams.nextDocIds = nextRecords.map((dat) => Number(dat.publish_id));
} }
private async setResumptionToken(nextIds: number[], paginationParams: PagingParameter, browserFingerprint: string) { private async handleNoResumptionToken(oaiRequest: Dictionary, numWrapper: ListParameter) {
const countRestIds = nextIds.length; // no resumptionToken is given
if (countRestIds > 0) { if ('metadataPrefix' in oaiRequest) {
// const token = this.createResumptionToken(paginationParams, nextIds); numWrapper.metadataPrefix = oaiRequest['metadataPrefix'];
const token = new ResumptionToken(); } else {
token.startPosition = paginationParams.start;
token.totalIds = paginationParams.totalLength;
token.documentIds = nextIds;
token.metadataPrefix = paginationParams.metadataPrefix;
token.queryParams = paginationParams.queryParams;
const res: string = await this.tokenWorker.set(token, browserFingerprint);
this.setParamResumption(res, paginationParams.cursor, paginationParams.totalLength);
}
}
private buildDatasetQueryViaToken(token: ResumptionToken) {
const finder = Dataset.query();
const originalQuery = token.queryParams || {};
const deliveringStates = originalQuery.deliveringStates || this.deliveringDocumentStates;
finder.whereIn('server_state', deliveringStates);
this.applySetFilter(finder, originalQuery);
this.applyDateFilters(finder, originalQuery);
return finder;
}
private async fetchNextRecords(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, token: ResumptionToken, maxRecords: number) {
return finder
.select('publish_id')
.orderBy('publish_id')
.offset(token.startPosition - 1 + maxRecords)
.limit(100);
}
private validateMetadataPrefix(oaiRequest: Dictionary, paginationParams: PagingParameter) {
if (!('metadataPrefix' in oaiRequest)) {
throw new OaiModelException( throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR, StatusCodes.INTERNAL_SERVER_ERROR,
'The prefix of the metadata argument is unknown.', 'The prefix of the metadata argument is unknown.',
OaiErrorCodes.BADARGUMENT, OaiErrorCodes.BADARGUMENT,
); );
} }
paginationParams.metadataPrefix = oaiRequest['metadataPrefix']; this.xsltParameter['oai_metadataPrefix'] = numWrapper.metadataPrefix;
this.xsltParameter['oai_metadataPrefix'] = paginationParams.metadataPrefix;
}
private applySetFilter(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, queryParams: any) { let finder: ModelQueryBuilderContract<typeof Dataset, Dataset> = Dataset.query();
if ('set' in queryParams) { // add server state restrictions
const [setType, setValue] = queryParams['set'].split(':'); finder.whereIn('server_state', this.deliveringDocumentStates);
if ('set' in oaiRequest) {
const set = oaiRequest['set'] as string;
const setArray = set.split(':');
switch (setType) { if (setArray[0] == 'data-type') {
case 'data-type': if (setArray.length == 2 && setArray[1]) {
setValue && finder.where('type', setValue); finder.where('type', setArray[1]);
break; }
case 'open_access': } else if (setArray[0] == 'open_access') {
finder.andWhereHas('licenses', (query) => { const openAccessLicences = ['CC-BY-4.0', 'CC-BY-SA-4.0'];
query.whereIn('name', ['CC-BY-4.0', 'CC-BY-SA-4.0']); finder.andWhereHas('licenses', (query) => {
query.whereIn('name', openAccessLicences);
});
} else if (setArray[0] == 'ddc') {
if (setArray.length == 2 && setArray[1] != '') {
finder.andWhereHas('collections', (query) => {
query.where('number', setArray[1]);
}); });
break; }
case 'ddc':
setValue &&
finder.andWhereHas('collections', (query) => {
query.where('number', setValue);
});
break;
} }
} }
}
private applyDateFilters(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, queryParams: any) { // const timeZone = "Europe/Vienna"; // Canonical time zone name
const { from, until } = queryParams; // &from=2020-09-03&until2020-09-03
// &from=2020-09-11&until=2021-05-11
if ('from' in oaiRequest && 'until' in oaiRequest) {
const from = oaiRequest['from'] as string;
let fromDate = dayjs(from); //.tz(timeZone);
const until = oaiRequest['until'] as string;
let untilDate = dayjs(until); //.tz(timeZone);
if (!fromDate.isValid() || !untilDate.isValid()) {
throw new OaiModelException(StatusCodes.INTERNAL_SERVER_ERROR, 'Date Parameter is not valid.', OaiErrorCodes.BADARGUMENT);
}
fromDate = dayjs.tz(from, 'Europe/Vienna');
untilDate = dayjs.tz(until, 'Europe/Vienna');
if (from && until) { if (from.length != until.length) {
this.handleFromUntilFilter(finder, from, until); throw new OaiModelException(
} else if (from) { StatusCodes.INTERNAL_SERVER_ERROR,
this.handleFromFilter(finder, from); 'The request has different granularities for the from and until parameters.',
} else if (until) { OaiErrorCodes.BADARGUMENT,
this.handleUntilFilter(finder, until); );
} }
} fromDate.hour() == 0 && (fromDate = fromDate.startOf('day'));
untilDate.hour() == 0 && (untilDate = untilDate.endOf('day'));
private handleFromUntilFilter(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, from: string, until: string) { finder.whereBetween('server_date_published', [fromDate.format('YYYY-MM-DD HH:mm:ss'), untilDate.format('YYYY-MM-DD HH:mm:ss')]);
const fromDate = this.parseDateWithValidation(from, 'From'); } else if ('from' in oaiRequest && !('until' in oaiRequest)) {
const untilDate = this.parseDateWithValidation(until, 'Until'); const from = oaiRequest['from'] as string;
let fromDate = dayjs(from);
if (!fromDate.isValid()) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
'From date parameter is not valid.',
OaiErrorCodes.BADARGUMENT,
);
}
fromDate = dayjs.tz(from, 'Europe/Vienna');
fromDate.hour() == 0 && (fromDate = fromDate.startOf('day'));
if (from.length !== until.length) { const now = dayjs();
throw new OaiModelException( if (fromDate.isAfter(now)) {
StatusCodes.INTERNAL_SERVER_ERROR, throw new OaiModelException(
'The request has different granularities for the from and until parameters.', StatusCodes.INTERNAL_SERVER_ERROR,
OaiErrorCodes.BADARGUMENT, 'Given from date is greater than now. The given values results in an empty list.',
); OaiErrorCodes.NORECORDSMATCH,
);
} else {
finder.andWhere('server_date_published', '>=', fromDate.format('YYYY-MM-DD HH:mm:ss'));
}
} else if (!('from' in oaiRequest) && 'until' in oaiRequest) {
const until = oaiRequest['until'] as string;
let untilDate = dayjs(until);
if (!untilDate.isValid()) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
'Until date parameter is not valid.',
OaiErrorCodes.BADARGUMENT,
);
}
untilDate = dayjs.tz(until, 'Europe/Vienna');
untilDate.hour() == 0 && (untilDate = untilDate.endOf('day'));
const firstPublishedDataset: Dataset = (await Dataset.earliestPublicationDate()) as Dataset;
const earliestPublicationDate = dayjs(firstPublishedDataset.server_date_published.toISO()); //format("YYYY-MM-DDThh:mm:ss[Z]"));
if (earliestPublicationDate.isAfter(untilDate)) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
`earliestDatestamp is greater than given until date.
The given values results in an empty list.`,
OaiErrorCodes.NORECORDSMATCH,
);
} else {
finder.andWhere('server_date_published', '<=', untilDate.format('YYYY-MM-DD HH:mm:ss'));
}
} }
finder.whereBetween('server_date_published', [fromDate.format('YYYY-MM-DD HH:mm:ss'), untilDate.format('YYYY-MM-DD HH:mm:ss')]); let reldocIdsDocs = await finder.select('publish_id').orderBy('publish_id');
} numWrapper.reldocIds = reldocIdsDocs.map((dat) => dat.publish_id);
numWrapper.totalIds = numWrapper.reldocIds.length; //212
private handleFromFilter(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, from: string) {
const fromDate = this.parseDateWithValidation(from, 'From');
const now = dayjs();
if (fromDate.isAfter(now)) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
'Given from date is greater than now. The given values results in an empty list.',
OaiErrorCodes.NORECORDSMATCH,
);
}
finder.andWhere('server_date_published', '>=', fromDate.format('YYYY-MM-DD HH:mm:ss'));
}
private handleUntilFilter(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, until: string) {
const untilDate = this.parseDateWithValidation(until, 'Until');
const earliestPublicationDate = dayjs(this.firstPublishedDataset?.server_date_published.toISO());
if (earliestPublicationDate.isAfter(untilDate)) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
'earliestDatestamp is greater than given until date. The given values results in an empty list.',
OaiErrorCodes.NORECORDSMATCH,
);
}
finder.andWhere('server_date_published', '<=', untilDate.format('YYYY-MM-DD HH:mm:ss'));
}
private parseDateWithValidation(dateStr: string, label: string) {
let date = dayjs(dateStr);
if (!date.isValid()) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
`${label} date parameter is not valid.`,
OaiErrorCodes.BADARGUMENT,
);
}
date = dayjs.tz(dateStr, 'Europe/Vienna');
return date.hour() === 0 ? (label === 'From' ? date.startOf('day') : date.endOf('day')) : date;
} }
private setParamResumption(res: string, cursor: number, totalIds: number) { private setParamResumption(res: string, cursor: number, totalIds: number) {
@ -606,17 +545,19 @@ export default class OaiController {
} }
private async getDatasetXmlDomNode(dataset: Dataset) { private async getDatasetXmlDomNode(dataset: Dataset) {
const serializer = new DatasetXmlSerializer(dataset).enableCaching().excludeEmptyFields(); const xmlModel = new XmlModel(dataset);
// xmlModel.setModel(dataset);
xmlModel.excludeEmptyFields();
xmlModel.caching = true;
// const cache = dataset.xmlCache ? dataset.xmlCache : null; // const cache = dataset.xmlCache ? dataset.xmlCache : null;
// dataset.load('xmlCache'); // dataset.load('xmlCache');
if (dataset.xmlCache) { if (dataset.xmlCache) {
serializer.setCache(dataset.xmlCache); xmlModel.xmlCache = dataset.xmlCache;
} }
// return cache.toXmlDocument(); // return cache.getDomDocument();
const xmlDocument: XMLBuilder | null = await serializer.toXmlDocument(); const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
return xmlDocument; return domDocument;
} }
private addSpecInformation(domNode: XMLBuilder, information: string) { private addSpecInformation(domNode: XMLBuilder, information: string) {
@ -700,30 +641,4 @@ export default class OaiController {
this.xsltParameter['oai_error_code'] = 'badVerb'; this.xsltParameter['oai_error_code'] = 'badVerb';
this.xsltParameter['oai_error_message'] = 'The verb provided in the request is illegal.'; this.xsltParameter['oai_error_message'] = 'The verb provided in the request is illegal.';
} }
/**
* Helper method to build a browser fingerprint by combining:
* - User-Agent header,
* - the IP address,
* - Accept-Language header,
* - current timestamp rounded to the hour.
*
* Every new hour, this will return a different fingerprint.
*/
private getBrowserFingerprint(request: Request): string {
const userAgent = request.header('user-agent') || 'unknown';
// Check for X-Forwarded-For header to use the client IP from the proxy if available.
const xForwardedFor = request.header('x-forwarded-for');
let ip = request.ip();
// console.log(ip);
if (xForwardedFor) {
// X-Forwarded-For may contain a comma-separated list of IPs; the first one is the client IP.
ip = xForwardedFor.split(',')[0].trim();
// console.log('xforwardedfor ip' + ip);
}
const locale = request.header('accept-language') || 'default';
// Round the current time to the start of the hour.
const timestampHour = dayjs().startOf('hour').format('YYYY-MM-DDTHH');
return `${userAgent}-${ip}-${locale}-${timestampHour}`;
}
} }

View file

@ -9,7 +9,6 @@ import vine from '@vinejs/vine';
import mail from '@adonisjs/mail/services/main'; import mail from '@adonisjs/mail/services/main';
import logger from '@adonisjs/core/services/logger'; import logger from '@adonisjs/core/services/logger';
import { validate } from 'deep-email-validator'; import { validate } from 'deep-email-validator';
import File from '#models/file';
interface Dictionary { interface Dictionary {
[index: string]: string; [index: string]: string;
@ -39,21 +38,13 @@ export default class DatasetsController {
} }
datasets.orderBy(attribute, sortOrder); datasets.orderBy(attribute, sortOrder);
} else { } else {
// datasets.orderBy('id', 'asc'); // users.orderBy('created_at', 'desc');
// Custom ordering to prioritize rejected_editor state datasets.orderBy('id', 'asc');
datasets.orderByRaw(`
CASE
WHEN server_state = 'rejected_to_reviewer' THEN 0
ELSE 1
END ASC,
id ASC
`);
} }
// const users = await User.query().orderBy('login').paginate(page, limit); // const users = await User.query().orderBy('login').paginate(page, limit);
const myDatasets = await datasets const myDatasets = await datasets
// .where('server_state', 'approved') .where('server_state', 'approved')
.whereIn('server_state', ['approved', 'rejected_to_reviewer'])
.where('reviewer_id', user.id) .where('reviewer_id', user.id)
.preload('titles') .preload('titles')
@ -71,51 +62,7 @@ export default class DatasetsController {
}); });
} }
public async review({ request, inertia, response, auth }: HttpContext) { public async review({ request, inertia, response }: HttpContext) {
const id = request.param('id');
const datasetQuery = Dataset.query().where('id', id);
datasetQuery
.preload('titles', (query) => query.orderBy('id', 'asc'))
.preload('descriptions', (query) => query.orderBy('id', 'asc'))
.preload('coverage')
.preload('licenses')
.preload('authors', (query) => query.orderBy('pivot_sort_order', 'asc'))
.preload('contributors', (query) => query.orderBy('pivot_sort_order', 'asc'))
// .preload('subjects')
.preload('subjects', (builder) => {
builder.orderBy('id', 'asc').withCount('datasets');
})
.preload('references')
.preload('project')
.preload('files', (query) => {
query.orderBy('sort_order', 'asc'); // Sort by sort_order column
});
const dataset = await datasetQuery.firstOrFail();
const validStates = ['approved', 'rejected_to_reviewer'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be reviewed. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.toRoute('reviewer.dataset.list');
}
return inertia.render('Reviewer/Dataset/Review', {
dataset,
can: {
review: await auth.user?.can(['dataset-review']),
reject: await auth.user?.can(['dataset-review-reject']),
},
});
}
public async review_old({ request, inertia, response, auth }: HttpContext) {
const id = request.param('id'); const id = request.param('id');
const dataset = await Dataset.query() const dataset = await Dataset.query()
.where('id', id) .where('id', id)
@ -211,10 +158,6 @@ export default class DatasetsController {
return inertia.render('Reviewer/Dataset/Review', { return inertia.render('Reviewer/Dataset/Review', {
dataset, dataset,
fields: fields, fields: fields,
can: {
review: await auth.user?.can(['dataset-review']),
reject: await auth.user?.can(['dataset-review-reject']),
},
}); });
} }
@ -223,7 +166,7 @@ export default class DatasetsController {
// const { id } = params; // const { id } = params;
const dataset = await Dataset.findOrFail(id); const dataset = await Dataset.findOrFail(id);
const validStates = ['approved', 'rejected_to_reviewer']; const validStates = ['approved'];
if (!validStates.includes(dataset.server_state)) { if (!validStates.includes(dataset.server_state)) {
// throw new Error('Invalid server state!'); // throw new Error('Invalid server state!');
// return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back(); // return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back();
@ -237,10 +180,6 @@ export default class DatasetsController {
} }
dataset.server_state = 'reviewed'; dataset.server_state = 'reviewed';
// if editor has rejected to reviewer:
if (dataset.reject_editor_note != null) {
dataset.reject_editor_note = null;
}
try { try {
// await dataset.related('editor').associate(user); // speichert schon ab // await dataset.related('editor').associate(user); // speichert schon ab
@ -264,7 +203,7 @@ export default class DatasetsController {
}) })
.firstOrFail(); .firstOrFail();
const validStates = ['approved', 'rejected_to_reviewer']; const validStates = ['approved'];
if (!validStates.includes(dataset.server_state)) { if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!'); // session.flash('errors', 'Invalid server state!');
return response return response
@ -311,12 +250,12 @@ export default class DatasetsController {
throw error; throw error;
} }
const validStates = ['approved', 'rejected_to_reviewer']; const validStates = ['approved'];
if (!validStates.includes(dataset.server_state)) { if (!validStates.includes(dataset.server_state)) {
// throw new Error('Invalid server state!'); // throw new Error('Invalid server state!');
// return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back(); // return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back();
return response return response
.flash( .flash(
`Invalid server state. Dataset with id ${id} cannot be rejected. Datset has server state ${dataset.server_state}.`, `Invalid server state. Dataset with id ${id} cannot be rejected. Datset has server state ${dataset.server_state}.`,
'warning', 'warning',
) )
@ -337,7 +276,7 @@ export default class DatasetsController {
validateSMTP: false, validateSMTP: false,
}); });
const validRecipientEmail: boolean = validationResult.valid; const validRecipientEmail: boolean = validationResult.valid;
// let emailStatusMessage = ''; let emailStatusMessage = '';
if (sendMail == true) { if (sendMail == true) {
if (dataset.editor.email && validRecipientEmail) { if (dataset.editor.email && validRecipientEmail) {
@ -350,7 +289,7 @@ export default class DatasetsController {
<p>Best regards,<br>Your Tethys reviewer: ${authUser.login}</p> <p>Best regards,<br>Your Tethys reviewer: ${authUser.login}</p>
`); `);
}); });
// emailStatusMessage = ` A rejection email was successfully sent to ${dataset.editor.email}.`; emailStatusMessage = ` A rejection email was successfully sent to ${dataset.editor.email}.`;
} catch (error) { } catch (error) {
logger.error(error); logger.error(error);
return response return response
@ -358,7 +297,7 @@ export default class DatasetsController {
.toRoute('reviewer.dataset.list'); .toRoute('reviewer.dataset.list');
} }
} else { } else {
// emailStatusMessage = ` However, the email could not be sent because the editor's email address (${dataset.editor.email}) is not valid.`; emailStatusMessage = ` However, the email could not be sent because the editor's email address (${dataset.editor.email}) is not valid.`;
} }
} }
@ -368,41 +307,4 @@ export default class DatasetsController {
.toRoute('reviewer.dataset.list') .toRoute('reviewer.dataset.list')
.flash(`You have rejected dataset ${dataset.id}! to editor ${dataset.editor.login}`, 'message'); .flash(`You have rejected dataset ${dataset.id}! to editor ${dataset.editor.login}`, 'message');
} }
// public async download({ params, response }: HttpContext) {
// const id = params.id;
// // Find the file by ID
// const file = await File.findOrFail(id);
// // const filePath = await drive.use('local').getUrl('/'+ file.filePath)
// const filePath = file.filePath;
// const fileExt = file.filePath.split('.').pop() || '';
// // Set the response headers and download the file
// response.header('Content-Type', file.mime_type || 'application/octet-stream');
// response.attachment(`${file.label}.${fileExt}`);
// return response.download(filePath);
// }
public async download({ params, response }: HttpContext) {
const id = params.id;
// Find the file by ID
const file = await File.findOrFail(id);
// const filePath = await drive.use('local').getUrl('/'+ file.filePath)
const filePath = file.filePath;
const fileExt = file.filePath.split('.').pop() || '';
// Check if label already includes the extension
const fileName = file.label.toLowerCase().endsWith(`.${fileExt.toLowerCase()}`) ? file.label : `${file.label}.${fileExt}`;
// Set the response headers and download the file
response
.header('Cache-Control', 'no-cache private')
.header('Content-Description', 'File Transfer')
.header('Content-Type', file.mime_type || 'application/octet-stream')
// .header('Content-Disposition', 'inline; filename=' + fileName)
.header('Content-Transfer-Encoding', 'binary')
.header('Access-Control-Allow-Origin', '*')
.header('Access-Control-Allow-Methods', 'GET');
response.attachment(fileName);
return response.download(filePath);
}
} }

File diff suppressed because it is too large Load diff

View file

@ -1,231 +0,0 @@
import DocumentXmlCache from '#models/DocumentXmlCache';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import Dataset from '#models/dataset';
import Strategy from './Strategy.js';
import { builder } from 'xmlbuilder2';
import logger from '@adonisjs/core/services/logger';
/**
* Configuration for XML serialization
*
* @interface XmlSerializationConfig
*/
export interface XmlSerializationConfig {
/** The dataset model to serialize */
model: Dataset;
/** DOM representation (if available) */
dom?: XMLBuilder;
/** Fields to exclude from serialization */
excludeFields: Array<string>;
/** Whether to exclude empty fields */
excludeEmpty: boolean;
/** Base URI for xlink:ref elements */
baseUri: string;
}
/**
* Options for controlling serialization behavior
*/
export interface SerializationOptions {
/** Enable XML caching */
enableCaching?: boolean;
/** Exclude empty fields from output */
excludeEmptyFields?: boolean;
/** Custom base URI */
baseUri?: string;
/** Fields to exclude */
excludeFields?: string[];
}
/**
* DatasetXmlSerializer
*
* Handles XML serialization of Dataset models with intelligent caching.
* Generates XML representations and manages cache lifecycle to optimize performance.
*
* @example
* ```typescript
* const serializer = new DatasetXmlSerializer(dataset);
* serializer.enableCaching();
* serializer.excludeEmptyFields();
*
* const xmlDocument = await serializer.toXmlDocument();
* ```
*/
export default class DatasetXmlSerializer {
private readonly config: XmlSerializationConfig;
private readonly strategy: Strategy;
private cache: DocumentXmlCache | null = null;
private cachingEnabled = false;
constructor(dataset: Dataset, options: SerializationOptions = {}) {
this.config = {
model: dataset,
excludeEmpty: options.excludeEmptyFields ?? false,
baseUri: options.baseUri ?? '',
excludeFields: options.excludeFields ?? [],
};
this.strategy = new Strategy({
excludeEmpty: options.excludeEmptyFields ?? false,
baseUri: options.baseUri ?? '',
excludeFields: options.excludeFields ?? [],
model: dataset,
});
if (options.enableCaching) {
this.cachingEnabled = true;
}
}
/**
* Enable caching for XML generation
* When enabled, generated XML is stored in database for faster retrieval
*/
public enableCaching(): this {
this.cachingEnabled = true;
return this;
}
/**
* Disable caching for XML generation
*/
public disableCaching(): this {
this.cachingEnabled = false;
return this;
}
set model(model: Dataset) {
this.config.model = model;
}
/**
* Configure to exclude empty fields from XML output
*/
public excludeEmptyFields(): this {
this.config.excludeEmpty = true;
return this;
}
/**
* Set the cache instance directly (useful when preloading)
* @param cache - The DocumentXmlCache instance
*/
public setCache(cache: DocumentXmlCache): this {
this.cache = cache;
return this;
}
/**
* Get the current cache instance
*/
public getCache(): DocumentXmlCache | null {
return this.cache;
}
/**
* Get DOM document with intelligent caching
* Returns cached version if valid, otherwise generates new document
*/
public async toXmlDocument(): Promise<XMLBuilder | null> {
const dataset = this.config.model;
// Try to get from cache first
let cachedDocument: XMLBuilder | null = await this.retrieveFromCache();
if (cachedDocument) {
logger.debug(`Using cached XML for dataset ${dataset.id}`);
return cachedDocument;
}
// Generate fresh document
logger.debug(`[DatasetXmlSerializer] Cache miss - generating fresh XML for dataset ${dataset.id}`);
const freshDocument = await this.strategy.createDomDocument();
if (!freshDocument) {
logger.error(`[DatasetXmlSerializer] Failed to generate XML for dataset ${dataset.id}`);
return null;
}
// Cache if caching is enabled
if (this.cachingEnabled) {
await this.persistToCache(freshDocument, dataset);
}
// Extract the dataset-specific node
return this.extractDatasetNode(freshDocument);
}
/**
* Generate XML string representation
* Convenience method that converts XMLBuilder to string
*/
public async toXmlString(): Promise<string | null> {
const document = await this.toXmlDocument();
return document ? document.end({ prettyPrint: false }) : null;
}
/**
* Persist generated XML document to cache
* Non-blocking - failures are logged but don't interrupt the flow
*/
private async persistToCache(domDocument: XMLBuilder, dataset: Dataset): Promise<void> {
try {
this.cache = this.cache || new DocumentXmlCache();
this.cache.document_id = dataset.id;
this.cache.xml_version = 1;
this.cache.server_date_modified = dataset.server_date_modified.toFormat('yyyy-MM-dd HH:mm:ss');
this.cache.xml_data = domDocument.end();
await this.cache.save();
logger.debug(`Cached XML for dataset ${dataset.id}`);
} catch (error) {
logger.error(`Failed to cache XML for dataset ${dataset.id}: ${error.message}`);
// Don't throw - caching failure shouldn't break the flow
}
}
/**
* Extract the Rdr_Dataset node from full document
*/
private extractDatasetNode(domDocument: XMLBuilder): XMLBuilder | null {
const node = domDocument.find((n) => n.node.nodeName === 'Rdr_Dataset', false, true)?.node;
if (node) {
return builder({ version: '1.0', encoding: 'UTF-8', standalone: true }, node);
}
return domDocument;
}
/**
* Attempt to retrieve valid cached XML document
* Returns null if cache doesn't exist or is stale
*/
private async retrieveFromCache(): Promise<XMLBuilder | null> {
const dataset: Dataset = this.config.model;
if (!this.cache) {
return null;
}
// Check if cache is still valid
const actuallyCached = await DocumentXmlCache.hasValidEntry(dataset.id, dataset.server_date_modified);
if (!actuallyCached) {
logger.debug(`Cache invalid for dataset ${dataset.id}`);
return null;
}
//cache is actual return cached document
try {
if (this.cache) {
return this.cache.getDomDocument();
} else {
return null;
}
} catch (error) {
logger.error(`Failed to retrieve cached document for dataset ${dataset.id}: ${error.message}`);
return null;
}
}
}

View file

@ -1,22 +1,25 @@
// import { Client } from 'guzzle';
// import { Log } from '@adonisjs/core/build/standalone';
// import { DoiInterface } from './interfaces/DoiInterface';
import DoiClientContract from '#app/Library/Doi/DoiClientContract'; import DoiClientContract from '#app/Library/Doi/DoiClientContract';
import DoiClientException from '#app/exceptions/DoiClientException'; import DoiClientException from '#app/exceptions/DoiClientException';
import { StatusCodes } from 'http-status-codes'; import { StatusCodes } from 'http-status-codes';
import logger from '@adonisjs/core/services/logger'; import logger from '@adonisjs/core/services/logger';
import { AxiosResponse } from 'axios'; import { AxiosResponse } from 'axios';
import { default as axios } from 'axios'; import axios from 'axios';
export class DoiClient implements DoiClientContract { export class DoiClient implements DoiClientContract {
public username: string; public username: string;
public password: string; public password: string;
public serviceUrl: string; public serviceUrl: string;
public apiUrl: string;
constructor() { constructor() {
// const datacite_environment = process.env.DATACITE_ENVIRONMENT || 'debug'; // const datacite_environment = process.env.DATACITE_ENVIRONMENT || 'debug';
this.username = process.env.DATACITE_USERNAME || ''; this.username = process.env.DATACITE_USERNAME || '';
this.password = process.env.DATACITE_PASSWORD || ''; this.password = process.env.DATACITE_PASSWORD || '';
this.serviceUrl = process.env.DATACITE_SERVICE_URL || ''; this.serviceUrl = process.env.DATACITE_SERVICE_URL || '';
this.apiUrl = process.env.DATACITE_API_URL || 'https://api.datacite.org'; // this.prefix = process.env.DATACITE_PREFIX || '';
// this.base_domain = process.env.BASE_DOMAIN || '';
if (this.username === '' || this.password === '' || this.serviceUrl === '') { if (this.username === '' || this.password === '' || this.serviceUrl === '') {
const message = 'issing configuration settings to properly initialize DOI client'; const message = 'issing configuration settings to properly initialize DOI client';
@ -47,7 +50,7 @@ export class DoiClient implements DoiClientContract {
'Content-Type': 'application/xml;charset=UTF-8', 'Content-Type': 'application/xml;charset=UTF-8',
}; };
try { try {
const metadataResponse = await axios.put(`${this.serviceUrl}/metadata/${doiValue}`, xmlMeta, { auth, headers }); const metadataResponse = await axios.default.put(`${this.serviceUrl}/metadata/${doiValue}`, xmlMeta, { auth, headers });
// Response Codes // Response Codes
// 201 Created: operation successful // 201 Created: operation successful
@ -62,7 +65,7 @@ export class DoiClient implements DoiClientContract {
throw new DoiClientException(metadataResponse.status, message); throw new DoiClientException(metadataResponse.status, message);
} }
const doiResponse = await axios.put(`${this.serviceUrl}/doi/${doiValue}`, `doi=${doiValue}\nurl=${landingPageUrl}`, { const doiResponse = await axios.default.put(`${this.serviceUrl}/doi/${doiValue}`, `doi=${doiValue}\nurl=${landingPageUrl}`, {
auth, auth,
headers, headers,
}); });
@ -87,240 +90,4 @@ export class DoiClient implements DoiClientContract {
throw new DoiClientException(error.response.status, error.response.data); throw new DoiClientException(error.response.status, error.response.data);
} }
} }
/**
* Retrieves DOI information from DataCite REST API
*
* @param doiValue The DOI identifier e.g. '10.5072/tethys.999'
* @returns Promise with DOI information or null if not found
*/
public async getDoiInfo(doiValue: string): Promise<any | null> {
try {
// Use configurable DataCite REST API URL
const dataciteApiUrl = `${this.apiUrl}/dois/${doiValue}`;
const response = await axios.get(dataciteApiUrl, {
headers: {
Accept: 'application/vnd.api+json',
},
});
if (response.status === 200 && response.data.data) {
return {
created: response.data.data.attributes.created,
registered: response.data.data.attributes.registered,
updated: response.data.data.attributes.updated,
published: response.data.data.attributes.published,
state: response.data.data.attributes.state,
url: response.data.data.attributes.url,
metadata: response.data.data.attributes,
};
}
} catch (error) {
if (error.response?.status === 404) {
logger.debug(`DOI ${doiValue} not found in DataCite`);
return null;
}
logger.debug(`DataCite REST API failed for ${doiValue}: ${error.message}`);
// Fallback to MDS API
return await this.getDoiInfoFromMds(doiValue);
}
return null;
}
/**
* Fallback method to get DOI info from MDS API
*
* @param doiValue The DOI identifier
* @returns Promise with basic DOI information or null
*/
private async getDoiInfoFromMds(doiValue: string): Promise<any | null> {
try {
const auth = {
username: this.username,
password: this.password,
};
// Get DOI URL
const doiResponse = await axios.get(`${this.serviceUrl}/doi/${doiValue}`, { auth });
if (doiResponse.status === 200) {
// Get metadata if available
try {
const metadataResponse = await axios.get(`${this.serviceUrl}/metadata/${doiValue}`, {
auth,
headers: {
Accept: 'application/xml',
},
});
return {
url: doiResponse.data.trim(),
metadata: metadataResponse.data,
created: new Date().toISOString(), // MDS doesn't provide creation dates
registered: new Date().toISOString(), // Use current time as fallback
source: 'mds',
};
} catch (metadataError) {
// Return basic info even if metadata fetch fails
return {
url: doiResponse.data.trim(),
created: new Date().toISOString(),
registered: new Date().toISOString(),
source: 'mds',
};
}
}
} catch (error) {
if (error.response?.status === 404) {
logger.debug(`DOI ${doiValue} not found in DataCite MDS`);
return null;
}
logger.debug(`DataCite MDS API failed for ${doiValue}: ${error.message}`);
}
return null;
}
/**
* Checks if a DOI exists in DataCite
*
* @param doiValue The DOI identifier
* @returns Promise<boolean> True if DOI exists
*/
public async doiExists(doiValue: string): Promise<boolean> {
const doiInfo = await this.getDoiInfo(doiValue);
return doiInfo !== null;
}
/**
* Gets the last modification date of a DOI
*
* @param doiValue The DOI identifier
* @returns Promise<Date | null> Last modification date or creation date if never updated, null if not found
*/
public async getDoiLastModified(doiValue: string): Promise<Date | null> {
const doiInfo = await this.getDoiInfo(doiValue);
if (doiInfo) {
// Use updated date if available, otherwise fall back to created/registered date
const dateToUse = doiInfo.updated || doiInfo.registered || doiInfo.created;
if (dateToUse) {
logger.debug(
`DOI ${doiValue}: Using ${doiInfo.updated ? 'updated' : doiInfo.registered ? 'registered' : 'created'} date: ${dateToUse}`,
);
return new Date(dateToUse);
}
}
return null;
}
/**
* Makes a DOI unfindable (registered but not discoverable)
* Note: DOIs cannot be deleted, only made unfindable
* await doiClient.makeDoiUnfindable('10.21388/tethys.231');
*
* @param doiValue The DOI identifier e.g. '10.5072/tethys.999'
* @returns Promise<AxiosResponse<any>> The http response
*/
public async makeDoiUnfindable(doiValue: string): Promise<AxiosResponse<any>> {
const auth = {
username: this.username,
password: this.password,
};
try {
// First, check if DOI exists
const exists = await this.doiExists(doiValue);
if (!exists) {
throw new DoiClientException(404, `DOI ${doiValue} not found`);
}
// Delete the DOI URL mapping to make it unfindable
// This removes the URL but keeps the metadata registered
const response = await axios.delete(`${this.serviceUrl}/doi/${doiValue}`, { auth });
// Response Codes for DELETE /doi/{doi}
// 200 OK: operation successful
// 401 Unauthorized: no login
// 403 Forbidden: login problem, quota exceeded
// 404 Not Found: DOI does not exist
if (response.status !== 200) {
const message = `Unexpected DataCite MDS response code ${response.status}`;
logger.error(message);
throw new DoiClientException(response.status, message);
}
logger.info(`DOI ${doiValue} successfully made unfindable`);
return response;
} catch (error) {
logger.error(`Failed to make DOI ${doiValue} unfindable: ${error.message}`);
if (error instanceof DoiClientException) {
throw error;
}
throw new DoiClientException(error.response?.status || 500, error.response?.data || error.message);
}
}
/**
* Makes a DOI findable again by re-registering the URL
* await doiClient.makeDoiFindable(
* '10.21388/tethys.231',
* 'https://doi.dev.tethys.at/10.21388/tethys.231'
* );
*
* @param doiValue The DOI identifier e.g. '10.5072/tethys.999'
* @param landingPageUrl The landing page URL
* @returns Promise<AxiosResponse<any>> The http response
*/
public async makeDoiFindable(doiValue: string, landingPageUrl: string): Promise<AxiosResponse<any>> {
const auth = {
username: this.username,
password: this.password,
};
try {
// Re-register the DOI with its URL to make it findable again
const response = await axios.put(`${this.serviceUrl}/doi/${doiValue}`, `doi=${doiValue}\nurl=${landingPageUrl}`, { auth });
// Response Codes for PUT /doi/{doi}
// 201 Created: operation successful
// 400 Bad Request: request body must be exactly two lines: DOI and URL
// 401 Unauthorized: no login
// 403 Forbidden: login problem, quota exceeded
// 412 Precondition failed: metadata must be uploaded first
if (response.status !== 201) {
const message = `Unexpected DataCite MDS response code ${response.status}`;
logger.error(message);
throw new DoiClientException(response.status, message);
}
logger.info(`DOI ${doiValue} successfully made findable again`);
return response;
} catch (error) {
logger.error(`Failed to make DOI ${doiValue} findable: ${error.message}`);
if (error instanceof DoiClientException) {
throw error;
}
throw new DoiClientException(error.response?.status || 500, error.response?.data || error.message);
}
}
/**
* Gets the current state of a DOI (draft, registered, findable)
* const state = await doiClient.getDoiState('10.21388/tethys.231');
* console.log(`Current state: ${state}`); // 'findable'
*
* @param doiValue The DOI identifier
* @returns Promise<string | null> The DOI state or null if not found
*/
public async getDoiState(doiValue: string): Promise<string | null> {
const doiInfo = await this.getDoiInfo(doiValue);
return doiInfo?.state || null;
}
} }

View file

@ -4,7 +4,6 @@ export default class ResumptionToken {
private _resumptionId = ''; private _resumptionId = '';
private _startPosition = 0; private _startPosition = 0;
private _totalIds = 0; private _totalIds = 0;
private _queryParams: Record<string, any> = {};
get key(): string { get key(): string {
return this.metadataPrefix + this.startPosition + this.totalIds; return this.metadataPrefix + this.startPosition + this.totalIds;
@ -49,12 +48,4 @@ export default class ResumptionToken {
set totalIds(totalIds: number) { set totalIds(totalIds: number) {
this._totalIds = totalIds; this._totalIds = totalIds;
} }
get queryParams(): Record<string, any> {
return this._queryParams;
}
set queryParams(params: Record<string, any>) {
this._queryParams = params;
}
} }

View file

@ -6,6 +6,6 @@ export default abstract class TokenWorkerContract {
abstract connect(): void; abstract connect(): void;
abstract close(): void; abstract close(): void;
abstract get(key: string): Promise<ResumptionToken | null>; abstract get(key: string): Promise<ResumptionToken | null>;
abstract set(token: ResumptionToken, browserFingerprint: string): Promise<string>; abstract set(token: ResumptionToken): Promise<string>;
} }

View file

@ -40,64 +40,14 @@ export default class TokenWorkerService implements TokenWorkerContract {
return result !== undefined && result !== null; return result !== undefined && result !== null;
} }
/** public async set(token: ResumptionToken): Promise<string> {
* Simplified set method that stores the token using a browser fingerprint key. const uniqueName = await this.generateUniqueName();
* If the token for that fingerprint already exists and its documentIds match the new token,
* then the fingerprint key is simply returned.
*/
public async set(token: ResumptionToken, browserFingerprint: string): Promise<string> {
// Generate a 15-digit unique number string based on the fingerprint
const uniqueNumberKey = this.createUniqueNumberFromFingerprint(browserFingerprint, token.documentIds, token.totalIds);
// Optionally, you could prefix it if desired, e.g. 'rs_' + uniqueNumberKey
const fingerprintKey = uniqueNumberKey;
// const fingerprintKey = `rs_fp_${browserFingerprint}`;
const existingTokenString = await this.cache.get(fingerprintKey);
if (existingTokenString) {
const existingToken = this.parseToken(existingTokenString);
if (this.arraysAreEqual(existingToken.documentIds, token.documentIds)) {
return fingerprintKey;
}
}
const serialToken = JSON.stringify(token); const serialToken = JSON.stringify(token);
await this.cache.setEx(fingerprintKey, this.ttl, serialToken); await this.cache.setEx(uniqueName, this.ttl, serialToken);
return fingerprintKey; return uniqueName;
} }
// Updated helper method to generate a unique key based on fingerprint and documentIds
private createUniqueNumberFromFingerprint(browserFingerprint: string, documentIds: number[], totalIds: number): string {
// Combine the fingerprint, document IDs and totalIds to produce the input string
const combined = browserFingerprint + ':' + documentIds.join('-') + ':' + totalIds;
// Simple hash algorithm
let hash = 0;
for (let i = 0; i < combined.length; i++) {
hash = (hash << 5) - hash + combined.charCodeAt(i);
hash |= 0; // Convert to 32-bit integer
}
// Ensure positive number and limit it to at most 15 digits
const positiveHash = Math.abs(hash) % 1000000000000000;
// Pad with trailing zeros to ensure a 15-digit string
return positiveHash.toString().padEnd(15, '0');
}
// Add a helper function to compare two arrays of numbers with identical order
private arraysAreEqual(arr1: number[], arr2: number[]): boolean {
if (arr1.length !== arr2.length) {
return false;
}
return arr1.every((num, index) => num === arr2[index]);
}
// public async set(token: ResumptionToken): Promise<string> {
// const uniqueName = await this.generateUniqueName();
// const serialToken = JSON.stringify(token);
// await this.cache.setEx(uniqueName, this.ttl, serialToken);
// return uniqueName;
// }
private async generateUniqueName(): Promise<string> { private async generateUniqueName(): Promise<string> {
let fc = 0; let fc = 0;
const uniqueId = dayjs().unix().toString(); const uniqueId = dayjs().unix().toString();

View file

@ -2,7 +2,7 @@ import Dataset from '#models/dataset';
import { Client } from '@opensearch-project/opensearch'; import { Client } from '@opensearch-project/opensearch';
import { create } from 'xmlbuilder2'; import { create } from 'xmlbuilder2';
import SaxonJS from 'saxon-js'; import SaxonJS from 'saxon-js';
import DatasetXmlSerializer from '#app/Library/DatasetXmlSerializer'; import XmlModel from '#app/Library/XmlModel';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js'; import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import logger from '@adonisjs/core/services/logger'; import logger from '@adonisjs/core/services/logger';
import { readFileSync } from 'fs'; import { readFileSync } from 'fs';
@ -72,42 +72,31 @@ export default {
} }
}, },
/**
* Index a dataset document to OpenSearch/Elasticsearch
*/
async indexDocument(dataset: Dataset, index_name: string): Promise<void> { async indexDocument(dataset: Dataset, index_name: string): Promise<void> {
try { try {
// Load XSLT transformation file const proc = readFileSync('public/assets2/solr.sef.json');
const xsltProc = readFileSync('public/assets2/solr.sef.json'); const doc: string = await this.getTransformedString(dataset, proc);
// Transform dataset to JSON document let document = JSON.parse(doc);
const jsonDoc: string = await this.getTransformedString(dataset, xsltProc);
const document = JSON.parse(jsonDoc);
// Index document to OpenSearch with doument json body
await this.client.index({ await this.client.index({
id: dataset.publish_id?.toString(), id: dataset.publish_id?.toString(),
index: index_name, index: index_name,
body: document, body: document,
refresh: true, // make immediately searchable refresh: true,
}); });
logger.info(`Dataset ${dataset.publish_id} successfully indexed to ${index_name}`); logger.info(`dataset with publish_id ${dataset.publish_id} successfully indexed`);
} catch (error) { } catch (error) {
logger.error(`Failed to index dataset ${dataset.publish_id}: ${error.message}`); logger.error(`An error occurred while indexing datsaet with publish_id ${dataset.publish_id}.`);
throw error; // Re-throw to allow caller to handle
} }
}, },
/**
* Transform dataset XML to JSON using XSLT
*/
async getTransformedString(dataset: Dataset, proc: Buffer): Promise<string> { async getTransformedString(dataset: Dataset, proc: Buffer): Promise<string> {
// Generate XML string from dataset let xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
const xmlString = await this.generateDatasetXml(dataset); const datasetNode = xml.root().ele('Dataset');
await createXmlRecord(dataset, datasetNode);
const xmlString = xml.end({ prettyPrint: false });
try { try {
// Apply XSLT transformation
const result = await SaxonJS.transform({ const result = await SaxonJS.transform({
stylesheetText: proc, stylesheetText: proc,
destination: 'serialized', destination: 'serialized',
@ -119,18 +108,6 @@ export default {
return ''; return '';
} }
}, },
/**
* Generate XML string from dataset model
*/
async generateDatasetXml(dataset: Dataset): Promise<string> {
const xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
const datasetNode = xml.root().ele('Dataset');
await createXmlRecord(dataset, datasetNode);
return xml.end({ prettyPrint: false });
},
}; };
/** /**
* Return the default global focus trap stack * Return the default global focus trap stack
@ -138,49 +115,74 @@ export default {
* @return {import('focus-trap').FocusTrap[]} * @return {import('focus-trap').FocusTrap[]}
*/ */
/** // export const indexDocument = async (dataset: Dataset, index_name: string, proc: Buffer): Promise<void> => {
* Create complete XML record for dataset // try {
* Handles caching and metadata enrichment // const doc = await getJsonString(dataset, proc);
*/
// let document = JSON.parse(doc);
// await client.index({
// id: dataset.publish_id?.toString(),
// index: index_name,
// body: document,
// refresh: true,
// });
// Logger.info(`dataset with publish_id ${dataset.publish_id} successfully indexed`);
// } catch (error) {
// Logger.error(`An error occurred while indexing datsaet with publish_id ${dataset.publish_id}.`);
// }
// };
// const getJsonString = async (dataset, proc): Promise<string> => {
// let xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
// const datasetNode = xml.root().ele('Dataset');
// await createXmlRecord(dataset, datasetNode);
// const xmlString = xml.end({ prettyPrint: false });
// try {
// const result = await transform({
// stylesheetText: proc,
// destination: 'serialized',
// sourceText: xmlString,
// });
// return result.principalResult;
// } catch (error) {
// Logger.error(`An error occurred while creating the user, error: ${error.message},`);
// return '';
// }
// };
const createXmlRecord = async (dataset: Dataset, datasetNode: XMLBuilder): Promise<void> => { const createXmlRecord = async (dataset: Dataset, datasetNode: XMLBuilder): Promise<void> => {
const domNode = await getDatasetXmlDomNode(dataset); const domNode = await getDatasetXmlDomNode(dataset);
if (domNode) {
if (!domNode) { // add frontdoor url and data-type
throw new Error(`Failed to generate XML DOM node for dataset ${dataset.id}`); dataset.publish_id && addLandingPageAttribute(domNode, dataset.publish_id.toString());
} addSpecInformation(domNode, 'data-type:' + dataset.type);
if (dataset.collections) {
// Enrich with landing page URL for (const coll of dataset.collections) {
if (dataset.publish_id) { const collRole = coll.collectionRole;
addLandingPageAttribute(domNode, dataset.publish_id.toString()); addSpecInformation(domNode, collRole.oai_name + ':' + coll.number);
} }
// Add data type specification
addSpecInformation(domNode, `data-type:${dataset.type}`);
// Add collection information
if (dataset.collections) {
for (const coll of dataset.collections) {
const collRole = coll.collectionRole;
addSpecInformation(domNode, `${collRole.oai_name}:${coll.number}`);
} }
}
datasetNode.import(domNode); datasetNode.import(domNode);
}
}; };
const getDatasetXmlDomNode = async (dataset: Dataset): Promise<XMLBuilder | null> => { const getDatasetXmlDomNode = async (dataset: Dataset): Promise<XMLBuilder | null> => {
const serializer = new DatasetXmlSerializer(dataset).enableCaching().excludeEmptyFields(); const xmlModel = new XmlModel(dataset);
// xmlModel.setModel(dataset); // xmlModel.setModel(dataset);
xmlModel.excludeEmptyFields();
// Load cache relationship if not already loaded xmlModel.caching = true;
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
// dataset.load('xmlCache');
await dataset.load('xmlCache'); await dataset.load('xmlCache');
if (dataset.xmlCache) { if (dataset.xmlCache) {
serializer.setCache(dataset.xmlCache); xmlModel.xmlCache = dataset.xmlCache;
} }
// Generate or retrieve cached DOM document // return cache.getDomDocument();
const xmlDocument: XMLBuilder | null = await serializer.toXmlDocument(); const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
return xmlDocument; return domDocument;
}; };
const addLandingPageAttribute = (domNode: XMLBuilder, dataid: string) => { const addLandingPageAttribute = (domNode: XMLBuilder, dataid: string) => {
@ -190,6 +192,6 @@ const addLandingPageAttribute = (domNode: XMLBuilder, dataid: string) => {
domNode.att('landingpage', url); domNode.att('landingpage', url);
}; };
const addSpecInformation = (domNode: XMLBuilder, information: string) => { const addSpecInformation= (domNode: XMLBuilder, information: string) => {
domNode.ele('SetSpec').att('Value', information); domNode.ele('SetSpec').att('Value', information);
}; };

129
app/Library/XmlModel.ts Normal file
View file

@ -0,0 +1,129 @@
import DocumentXmlCache from '#models/DocumentXmlCache';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import Dataset from '#models/dataset';
import Strategy from './Strategy.js';
import { DateTime } from 'luxon';
import { builder } from 'xmlbuilder2';
/**
* This is the description of the interface
*
* @interface Conf
* @member {Model} model holds the current dataset model
* @member {XMLBuilder} dom holds the current DOM representation
* @member {Array<string>} excludeFields List of fields to skip on serialization.
* @member {boolean} excludeEmpty True, if empty fields get excluded from serialization.
* @member {string} baseUri Base URI for xlink:ref elements
*/
export interface Conf {
model: Dataset;
dom?: XMLBuilder;
excludeFields: Array<string>;
excludeEmpty: boolean;
baseUri: string;
}
export default class XmlModel {
private config: Conf;
// private strategy = null;
private cache: DocumentXmlCache | null = null;
private _caching = false;
private strategy: Strategy;
constructor(dataset: Dataset) {
// $this->strategy = new Strategy();// Opus_Model_Xml_Version1;
// $this->config = new Conf();
// $this->strategy->setup($this->config);
this.config = {
excludeEmpty: false,
baseUri: '',
excludeFields: [],
model: dataset,
};
this.strategy = new Strategy({
excludeEmpty: true,
baseUri: '',
excludeFields: [],
model: dataset,
});
}
set model(model: Dataset) {
this.config.model = model;
}
public excludeEmptyFields(): void {
this.config.excludeEmpty = true;
}
get xmlCache(): DocumentXmlCache | null {
return this.cache;
}
set xmlCache(cache: DocumentXmlCache) {
this.cache = cache;
}
get caching(): boolean {
return this._caching;
}
set caching(caching: boolean) {
this._caching = caching;
}
public async getDomDocument(): Promise<XMLBuilder | null> {
const dataset = this.config.model;
let domDocument: XMLBuilder | null = await this.getDomDocumentFromXmlCache();
if (domDocument == null) {
domDocument = await this.strategy.createDomDocument();
// domDocument = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
if (this._caching) {
// caching is desired:
this.cache = this.cache || new DocumentXmlCache();
this.cache.document_id = dataset.id;
this.cache.xml_version = 1; // (int)$this->strategy->getVersion();
this.cache.server_date_modified = dataset.server_date_modified.toFormat('yyyy-MM-dd HH:mm:ss');
this.cache.xml_data = domDocument.end();
await this.cache.save();
}
const node = domDocument.find(
(n) => {
const test = n.node.nodeName == 'Rdr_Dataset';
return test;
},
false,
true,
)?.node;
if (node != undefined) {
domDocument = builder({ version: '1.0', encoding: 'UTF-8', standalone: true }, node);
}
}
return domDocument;
}
private async getDomDocumentFromXmlCache(): Promise<XMLBuilder | null> {
const dataset: Dataset = this.config.model;
if (!this.cache) {
return null;
}
//.toFormat('YYYY-MM-DD HH:mm:ss');
let date: DateTime = dataset.server_date_modified;
const actuallyCached: boolean = await DocumentXmlCache.hasValidEntry(dataset.id, date);
if (!actuallyCached) {
return null;
}
//cache is actual return it for oai:
try {
if (this.cache) {
return this.cache.getDomDocument();
} else {
return null;
}
} catch (error) {
return null;
}
}
}

View file

@ -1,54 +0,0 @@
// app/controllers/projects_controller.ts
import Project from '#models/project';
import type { HttpContext } from '@adonisjs/core/http';
import { createProjectValidator, updateProjectValidator } from '#validators/project';
export default class ProjectsController {
// GET /settings/projects
public async index({ inertia, auth }: HttpContext) {
const projects = await Project.all();
// return inertia.render('Admin/Project/Index', { projects });
return inertia.render('Admin/Project/Index', {
projects: projects,
can: {
edit: await auth.user?.can(['settings']),
create: await auth.user?.can(['settings']),
},
});
}
// GET /settings/projects/create
public async create({ inertia }: HttpContext) {
return inertia.render('Admin/Project/Create');
}
// POST /settings/projects
public async store({ request, response, session }: HttpContext) {
// Validate the request data
const data = await request.validateUsing(createProjectValidator);
await Project.create(data);
session.flash('success', 'Project created successfully');
return response.redirect().toRoute('settings.project.index');
}
// GET /settings/projects/:id/edit
public async edit({ params, inertia }: HttpContext) {
const project = await Project.findOrFail(params.id);
return inertia.render('Admin/Project/Edit', { project });
}
// PUT /settings/projects/:id
public async update({ params, request, response, session }: HttpContext) {
const project = await Project.findOrFail(params.id);
// Validate the request data
const data = await request.validateUsing(updateProjectValidator);
await project.merge(data).save();
session.flash('success', 'Project updated successfully');
return response.redirect().toRoute('settings.project.index');
}
}

View file

@ -1,43 +0,0 @@
// import { Exception } from '@adonisjs/core/exceptions'
import { HttpContext, ExceptionHandler } from '@adonisjs/core/http';
export default class DbHandlerException extends ExceptionHandler {
// constructor() {
// super(Logger)
// }
async handle(error: any, ctx: HttpContext) {
// Check for AggregateError type
if (error.type === 'AggregateError' && error.aggregateErrors) {
const dbErrors = error.aggregateErrors.some((err: any) => err.code === 'ECONNREFUSED' && err.port === 5432);
if (dbErrors) {
return ctx.response.status(503).json({
status: 'error',
message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
details: {
code: error.code,
type: error.type,
ports: error.aggregateErrors.map((err: any) => ({
port: err.port,
address: err.address,
})),
},
});
}
}
// Handle simple ECONNREFUSED errors
if (error.code === 'ECONNREFUSED') {
return ctx.response.status(503).json({
status: 'error',
message: 'Database connection failed. Please ensure PostgreSQL is running.',
code: error.code,
});
}
return super.handle(error, ctx);
}
static status = 500;
}

View file

@ -46,7 +46,6 @@ export default class HttpExceptionHandler extends ExceptionHandler {
// return view.render('./errors/server-error', { error }); // return view.render('./errors/server-error', { error });
// }, // },
// }; // };
protected statusPages: Record<StatusPageRange, StatusPageRenderer> = { protected statusPages: Record<StatusPageRange, StatusPageRenderer> = {
'404': (error, { inertia }) => { '404': (error, { inertia }) => {
return inertia.render('Errors/ServerError', { return inertia.render('Errors/ServerError', {
@ -59,47 +58,9 @@ export default class HttpExceptionHandler extends ExceptionHandler {
return inertia.render('Errors/ServerError', { return inertia.render('Errors/ServerError', {
error: error.message, error: error.message,
code: error.status, code: error.status,
}); });
},
// '500': (error, { inertia }) => {
// return inertia.render('Errors/postgres_error', {
// status: 'error',
// message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
// details: {
// code: error.code,
// type: error.status,
// ports: error.errors.map((err: any) => ({
// port: err.port,
// address: err.address,
// })),
// },
// });
// },
'500..599': (error, { inertia }) => {
if (error.code === 'ECONNREFUSED') {
const dbErrors = error.errors.some((err: any) => err.code === 'ECONNREFUSED' && err.port === 5432);
if (dbErrors) {
return inertia.render('Errors/postgres_error', {
status: 'error',
message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
details: {
code: error.code,
type: error.status,
ports: error.errors.map((err: any) => ({
port: err.port,
address: err.address,
})),
},
});
}
} else {
return inertia.render('Errors/ServerError', {
error: error.message,
code: error.status,
});
}
}, },
'500..599': (error, { inertia }) => inertia.render('Errors/ServerError', { error: error.message, code: error.status }),
}; };
// constructor() { // constructor() {
@ -107,7 +68,7 @@ export default class HttpExceptionHandler extends ExceptionHandler {
// } // }
public async handle(error: any, ctx: HttpContext) { public async handle(error: any, ctx: HttpContext) {
const { response, request, session, inertia } = ctx; const { response, request, session } = ctx;
/** /**
* Handle failed authentication attempt * Handle failed authentication attempt
@ -121,47 +82,6 @@ export default class HttpExceptionHandler extends ExceptionHandler {
// return response.redirect('/dashboard'); // return response.redirect('/dashboard');
// } // }
// Handle Axios errors
if (error.code === 'ECONNREFUSED') {
const dbErrors = error.errors.some((err: any) => err.code === 'ECONNREFUSED' && err.port === 5432);
if (dbErrors) {
// return ctx.response.status(503).json({
// status: 'error',
// message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
// details: {
// code: error.code,
// type: error.status,
// ports: error.errors.map((err: any) => ({
// port: err.port,
// address: err.address,
// })),
// },
// });
// return inertia.render('Errors/postgres_error', {
// status: 'error',
// message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
// details: {
// code: error.code,
// type: error.status,
// ports: error.errors.map((err: any) => ({
// port: err.port,
// address: err.address,
// })),
// },
// });
}
}
// Handle simple ECONNREFUSED errors
// if (error.code === 'ECONNREFUSED') {
// return ctx.response.status(503).json({
// status: 'error',
// message: 'Database connection failed. Please ensure PostgreSQL is running.',
// code: error.code,
// });
// }
// https://github.com/inertiajs/inertia-laravel/issues/56 // https://github.com/inertiajs/inertia-laravel/issues/56
// let test = response.getStatus(); //200 // let test = response.getStatus(); //200
// let header = request.header('X-Inertia'); // true // let header = request.header('X-Inertia'); // true
@ -178,21 +98,12 @@ export default class HttpExceptionHandler extends ExceptionHandler {
// ->toResponse($request) // ->toResponse($request)
// ->setStatusCode($response->status()); // ->setStatusCode($response->status());
} }
// Handle simple ECONNREFUSED errors
// if (error.code === 'ECONNREFUSED') {
// return ctx.response.status(503).json({
// status: 'error',
// message: 'Database connection failed. Please ensure PostgreSQL is running.',
// code: error.code,
// });
// }
// Dynamically change the error templates based on the absence of X-Inertia header // Dynamically change the error templates based on the absence of X-Inertia header
// if (!ctx.request.header('X-Inertia')) { // if (!ctx.request.header('X-Inertia')) {
// this.statusPages = { // this.statusPages = {
// '401..403': (error, { inertia }) => inertia.render('Errors/ServerError', { error: error.message, code: error.status }), // '401..403': (error, { view }) => view.render('./errors/unauthorized', { error }),
// '404': (error, { inertia }) => inertia.render('Errors/ServerError', { error: error.message, code: error.status }), // '404': (error, { view }) => view.render('./errors/not-found', { error }),
// '500..599': (error, { inertia }) => inertia.render('Errors/ServerError', { error: error.message, code: error.status }), // '500..599': (error, { view }) => view.render('./errors/server-error', { error }),
// }; // };
// } // }

View file

@ -4,8 +4,7 @@ import { builder, create } from 'xmlbuilder2';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js'; import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import db from '@adonisjs/lucid/services/db'; import db from '@adonisjs/lucid/services/db';
import { DateTime } from 'luxon'; import { DateTime } from 'luxon';
import type { BelongsTo } from '@adonisjs/lucid/types/relations'; import type { BelongsTo } from "@adonisjs/lucid/types/relations";
import logger from '@adonisjs/core/services/logger';
export default class DocumentXmlCache extends BaseModel { export default class DocumentXmlCache extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy(); public static namingStrategy = new SnakeCaseNamingStrategy();
@ -67,38 +66,33 @@ export default class DocumentXmlCache extends BaseModel {
} }
/** /**
* Check if a valid (non-stale) cache entry exists * Check if a dataset in a specific xml version is already cached or not.
* Cache is valid only if it was created AFTER the dataset's last modification
* *
* @param datasetId - The dataset ID to check * @param mixed datasetId
* @param datasetServerDateModified - The dataset's last modification timestamp * @param mixed serverDateModified
* @returns true if valid cache exists, false otherwise * @returns {Promise<boolean>} Returns true on cached hit else false.
*/ */
// public static async hasValidEntry(datasetId: number, datasetServerDateModified: DateTime): Promise<boolean> {
// // const formattedDate = dayjs(datasetServerDateModified).format('YYYY-MM-DD HH:mm:ss');
// const query = Database.from(this.table)
// .where('document_id', datasetId)
// .where('server_date_modified', '2023-08-17 16:51:03')
// .first();
// const row = await query;
// return !!row;
// }
// Assuming 'DocumentXmlCache' has a table with a 'server_date_modified' column in your database
public static async hasValidEntry(datasetId: number, datasetServerDateModified: DateTime): Promise<boolean> { public static async hasValidEntry(datasetId: number, datasetServerDateModified: DateTime): Promise<boolean> {
const serverDateModifiedString: string = datasetServerDateModified.toFormat('yyyy-MM-dd HH:mm:ss'); // Convert DateTime to ISO string const serverDateModifiedString: string = datasetServerDateModified.toFormat('yyyy-MM-dd HH:mm:ss'); // Convert DateTime to ISO string
const query = db.from(this.table)
const row = await db
.from(this.table)
.where('document_id', datasetId) .where('document_id', datasetId)
.where('server_date_modified', '>', serverDateModifiedString) // Check if server_date_modified is newer or equal .where('server_date_modified', '>=', serverDateModifiedString) // Check if server_date_modified is newer or equal
.first(); .first();
const isValid = !!row; const row = await query;
return !!row;
if (isValid) {
logger.debug(`Valid cache found for dataset ${datasetId}`);
} else {
logger.debug(`No valid cache for dataset ${datasetId} (dataset modified: ${serverDateModifiedString})`);
}
return isValid;
}
/**
* Invalidate (delete) cache entry
*/
public async invalidate(): Promise<void> {
await this.delete();
logger.debug(`Invalidated cache for document ${this.document_id}`);
} }
} }

View file

@ -209,15 +209,6 @@ export default class Dataset extends DatasetExtension {
return mainTitle ? mainTitle.value : null; return mainTitle ? mainTitle.value : null;
} }
@computed({
serializeAs: 'doi_identifier',
})
public get doiIdentifier() {
// return `${this.firstName} ${this.lastName}`;
const identifier: DatasetIdentifier = this.identifier;
return identifier ? identifier.value : null;
}
@manyToMany(() => Person, { @manyToMany(() => Person, {
pivotForeignKey: 'document_id', pivotForeignKey: 'document_id',
pivotRelatedForeignKey: 'person_id', pivotRelatedForeignKey: 'person_id',

View file

@ -3,12 +3,12 @@ import { column, hasMany, belongsTo, SnakeCaseNamingStrategy, computed } from '@
import HashValue from './hash_value.js'; import HashValue from './hash_value.js';
import Dataset from './dataset.js'; import Dataset from './dataset.js';
import BaseModel from './base_model.js'; import BaseModel from './base_model.js';
// import { Buffer } from 'buffer';
import * as fs from 'fs'; import * as fs from 'fs';
import crypto from 'crypto'; import crypto from 'crypto';
// import Drive from '@ioc:Adonis/Core/Drive'; // import Drive from '@ioc:Adonis/Core/Drive';
// import Drive from '@adonisjs/drive'; // import Drive from '@adonisjs/drive';
// import drive from '#services/drive'; import drive from '#services/drive';
import drive from '@adonisjs/drive/services/main';
import type { HasMany } from "@adonisjs/lucid/types/relations"; import type { HasMany } from "@adonisjs/lucid/types/relations";
import type { BelongsTo } from "@adonisjs/lucid/types/relations"; import type { BelongsTo } from "@adonisjs/lucid/types/relations";
@ -88,8 +88,7 @@ export default class File extends BaseModel {
serializeAs: 'filePath', serializeAs: 'filePath',
}) })
public get filePath() { public get filePath() {
// return `/storage/app/public/${this.pathName}`; return `/storage/app/public/${this.pathName}`;
return `/storage/app/data/${this.pathName}`;
// const mainTitle = this.titles?.find((title) => title.type === 'Main'); // const mainTitle = this.titles?.find((title) => title.type === 'Main');
// return mainTitle ? mainTitle.value : null; // return mainTitle ? mainTitle.value : null;
} }
@ -166,7 +165,7 @@ export default class File extends BaseModel {
public async delete() { public async delete() {
if (this.pathName) { if (this.pathName) {
// Delete file from additional storage // Delete file from additional storage
await drive.use('local').delete(this.pathName); await drive.delete(this.pathName);
} }
// Call the original delete method of the BaseModel to remove the record from the database // Call the original delete method of the BaseModel to remove the record from the database

View file

@ -16,14 +16,9 @@ export default class MimeType extends BaseModel {
@column({}) @column({})
public name: string; public name: string;
// 1 : n file_extensions are separated by '|' in the database
@column({}) @column({})
public file_extension: string; public file_extension: string;
// 1 : n alternate_mimetype are separated by '|' in the database
@column({})
public alternate_mimetype: string;
@column({}) @column({})
public enabled: boolean; public enabled: boolean;

View file

@ -3,7 +3,7 @@ import { DateTime } from 'luxon';
import dayjs from 'dayjs'; import dayjs from 'dayjs';
import Dataset from './dataset.js'; import Dataset from './dataset.js';
import BaseModel from './base_model.js'; import BaseModel from './base_model.js';
import type { ManyToMany } from '@adonisjs/lucid/types/relations'; import type { ManyToMany } from "@adonisjs/lucid/types/relations";
export default class Person extends BaseModel { export default class Person extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy(); public static namingStrategy = new SnakeCaseNamingStrategy();
@ -30,7 +30,7 @@ export default class Person extends BaseModel {
@column({}) @column({})
public lastName: string; public lastName: string;
@column({ columnName: 'identifier_orcid' }) @column({})
public identifierOrcid: string; public identifierOrcid: string;
@column({}) @column({})
@ -51,7 +51,7 @@ export default class Person extends BaseModel {
serializeAs: 'name', serializeAs: 'name',
}) })
public get fullName() { public get fullName() {
return [this.firstName, this.lastName].filter(Boolean).join(' '); return `${this.firstName} ${this.lastName}`;
} }
// @computed() // @computed()
@ -64,12 +64,10 @@ export default class Person extends BaseModel {
// return '2023-03-21 08:45:00'; // return '2023-03-21 08:45:00';
// } // }
@computed({ @computed()
serializeAs: 'dataset_count',
})
public get datasetCount() { public get datasetCount() {
const stock = this.$extras.datasets_count; //my pivot column name was "stock" const stock = this.$extras.datasets_count; //my pivot column name was "stock"
return Number(stock); return stock;
} }
@computed() @computed()
@ -78,16 +76,6 @@ export default class Person extends BaseModel {
return contributor_type; return contributor_type;
} }
@computed({ serializeAs: 'allow_email_contact' })
public get allowEmailContact() {
// If the datasets relation is missing or empty, return false instead of null.
if (!this.datasets || this.datasets.length === 0) {
return false;
}
// Otherwise return the pivot attribute from the first related dataset.
return this.datasets[0].$extras?.pivot_allow_email_contact;
}
@manyToMany(() => Dataset, { @manyToMany(() => Dataset, {
pivotForeignKey: 'person_id', pivotForeignKey: 'person_id',
pivotRelatedForeignKey: 'document_id', pivotRelatedForeignKey: 'document_id',
@ -95,34 +83,4 @@ export default class Person extends BaseModel {
pivotColumns: ['role', 'sort_order', 'allow_email_contact'], pivotColumns: ['role', 'sort_order', 'allow_email_contact'],
}) })
public datasets: ManyToMany<typeof Dataset>; public datasets: ManyToMany<typeof Dataset>;
// public toJSON() {
// const json = super.toJSON();
// // Check if this person is loaded through a pivot relationship with sensitive roles
// const pivotRole = this.$extras?.pivot_role;
// if (pivotRole === 'author' || pivotRole === 'contributor') {
// // Remove sensitive information for public-facing roles
// delete json.email;
// // delete json.identifierOrcid;
// }
// return json;
// }
// @afterFind()
// public static async afterFindHook(person: Person) {
// if (person.$extras?.pivot_role === 'author' || person.$extras?.pivot_role === 'contributor') {
// person.email = undefined as any;
// }
// }
// @afterFetch()
// public static async afterFetchHook(persons: Person[]) {
// persons.forEach(person => {
// if (person.$extras?.pivot_role === 'author' || person.$extras?.pivot_role === 'contributor') {
// person.email = undefined as any;
// }
// });
// }
} }

View file

@ -1,57 +0,0 @@
/**
* Qs module config
*/
type QueryStringConfig = {
depth?: number
allowPrototypes?: boolean
plainObjects?: boolean
parameterLimit?: number
arrayLimit?: number
ignoreQueryPrefix?: boolean
delimiter?: RegExp | string
allowDots?: boolean
charset?: 'utf-8' | 'iso-8859-1' | undefined
charsetSentinel?: boolean
interpretNumericEntities?: boolean
parseArrays?: boolean
comma?: boolean
}
/**
* Base config used by all types
*/
type BodyParserBaseConfig = {
encoding: string
limit: string | number
types: string[]
}
/**
* Body parser config for parsing JSON requests
*/
export type BodyParserJSONConfig = BodyParserBaseConfig & {
strict: boolean
convertEmptyStringsToNull: boolean
}
/**
* Parser config for parsing form data
*/
export type BodyParserFormConfig = BodyParserBaseConfig & {
queryString: QueryStringConfig
convertEmptyStringsToNull: boolean
}
/**
* Parser config for parsing raw body (untouched)
*/
export type BodyParserRawConfig = BodyParserBaseConfig
/**
* Body parser config for all supported form types
*/
export type BodyParserConfig = {
allowedMethods: string[]
json: BodyParserJSONConfig
form: BodyParserFormConfig
raw: BodyParserRawConfig
multipart: BodyParserMultipartConfig
}

View file

@ -1,6 +1,6 @@
import { DateTime } from 'luxon'; import { DateTime } from 'luxon';
import { withAuthFinder } from '@adonisjs/auth/mixins/lucid'; import { withAuthFinder } from '@adonisjs/auth/mixins/lucid';
import { column, manyToMany, hasMany, SnakeCaseNamingStrategy, computed, beforeFetch, beforeFind } from '@adonisjs/lucid/orm'; import { column, manyToMany, hasMany, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm';
import hash from '@adonisjs/core/services/hash'; import hash from '@adonisjs/core/services/hash';
import Role from './role.js'; import Role from './role.js';
import db from '@adonisjs/lucid/services/db'; import db from '@adonisjs/lucid/services/db';
@ -49,6 +49,7 @@ export default class User extends compose(BaseModel, AuthFinder) {
@column() @column()
public login: string; public login: string;
@column() @column()
public firstName: string; public firstName: string;
@ -86,8 +87,17 @@ export default class User extends compose(BaseModel, AuthFinder) {
@column({}) @column({})
public state: number; public state: number;
@column({}) // @hasOne(() => TotpSecret, {
public avatar: string; // foreignKey: 'user_id',
// })
// public totp_secret: HasOne<typeof TotpSecret>;
// @beforeSave()
// public static async hashPassword(user: User) {
// if (user.$dirty.password) {
// user.password = await hash.use('laravel').make(user.password);
// }
// }
public get isTwoFactorEnabled(): boolean { public get isTwoFactorEnabled(): boolean {
return Boolean(this?.twoFactorSecret && this.state == TotpState.STATE_ENABLED); return Boolean(this?.twoFactorSecret && this.state == TotpState.STATE_ENABLED);
@ -111,29 +121,6 @@ export default class User extends compose(BaseModel, AuthFinder) {
}) })
public backupcodes: HasMany<typeof BackupCode>; public backupcodes: HasMany<typeof BackupCode>;
@computed({
serializeAs: 'is_admin',
})
public get isAdmin(): boolean {
const roles = this.roles;
const isAdmin = roles?.map((role: Role) => role.name).includes('administrator');
return isAdmin;
}
// public toJSON() {
// return {
// ...super.toJSON(),
// roles: []
// };
// }
@beforeFind()
@beforeFetch()
public static preloadRoles(user: User) {
user.preload('roles', (builder) => {
builder.select(['id', 'name', 'display_name', 'description']);
});
}
public async getBackupCodes(this: User): Promise<BackupCode[]> { public async getBackupCodes(this: User): Promise<BackupCode[]> {
const test = await this.related('backupcodes').query(); const test = await this.related('backupcodes').query();
// return test.map((role) => role.code); // return test.map((role) => role.code);

View file

@ -1,16 +1,3 @@
import { join, isAbsolute } from 'node:path';
import type { BodyParserConfig } from '#models/types';
import { createId } from '@paralleldrive/cuid2';
import { tmpdir } from 'node:os';
import config from '@adonisjs/core/services/config';
import Dataset from '#models/dataset';
import { TransactionClientContract } from '@adonisjs/lucid/types/database';
import Person from '#models/person';
interface Dictionary {
[index: string]: string;
}
export function sum(a: number, b: number): number { export function sum(a: number, b: number): number {
return a + b; return a + b;
} }
@ -37,88 +24,3 @@ export function preg_match(regex: RegExp, str: string) {
const result: boolean = regex.test(str); const result: boolean = regex.test(str);
return result; return result;
} }
/**
* Returns the tmp path for storing the files temporarly
*/
export function getTmpPath(config: BodyParserConfig['multipart']): string {
if (typeof config.tmpFileName === 'function') {
const tmpPath = config.tmpFileName();
return isAbsolute(tmpPath) ? tmpPath : join(tmpdir(), tmpPath);
}
return join(tmpdir(), createId());
}
/**
* Returns config for a given type
*/
export function getConfigFor<K extends keyof BodyParserConfig>(type: K): BodyParserConfig[K] {
const bodyParserConfig: BodyParserConfig = config.get('bodyparser');
const configType = bodyParserConfig[type];
return configType;
}
export function parseBytesSize(size: string): number {
const units: Record<string, number> = {
kb: 1024,
mb: 1024 * 1024,
gb: 1024 * 1024 * 1024,
tb: 1024 * 1024 * 1024 * 1024,
};
const match = size.match(/^(\d+)(kb|mb|gb|tb)$/i); // Regex to match size format
if (!match) {
throw new Error('Invalid size format');
}
const [, value, unit] = match;
return parseInt(value) * units[unit.toLowerCase()];
}
// Helper function to format bytes as human-readable text
export function formatBytes(bytes: number): string {
if (bytes === 0) return '0 Bytes';
const k = 1024;
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
}
export async function savePersons(dataset: Dataset, persons: any[], role: string, trx: TransactionClientContract) {
for (const [key, person] of persons.entries()) {
const pivotData = {
role: role,
sort_order: key + 1,
allow_email_contact: false,
...extractPivotAttributes(person), // Merge pivot attributes here
};
if (person.id !== undefined) {
await dataset
.useTransaction(trx)
.related('persons')
.attach({
[person.id]: pivotData,
});
} else {
const dataPerson = new Person();
dataPerson.fill(person);
await dataset.useTransaction(trx).related('persons').save(dataPerson, false, pivotData);
}
}
}
// Helper function to extract pivot attributes from a person object
function extractPivotAttributes(person: any) {
const pivotAttributes: Dictionary = {};
for (const key in person) {
if (key.startsWith('pivot_')) {
// pivotAttributes[key] = person[key];
const cleanKey = key.replace('pivot_', ''); // Remove 'pivot_' prefix
pivotAttributes[cleanKey] = person[key];
}
}
return pivotAttributes;
}

View file

@ -1,7 +1,6 @@
import vine, { SimpleMessagesProvider } from '@vinejs/vine'; import vine, { SimpleMessagesProvider } from '@vinejs/vine';
import { TitleTypes, DescriptionTypes, ContributorTypes, ReferenceIdentifierTypes, RelationTypes } from '#contracts/enums'; import { TitleTypes, DescriptionTypes, ContributorTypes, ReferenceIdentifierTypes, RelationTypes } from '#contracts/enums';
import dayjs from 'dayjs'; import dayjs from 'dayjs';
// import MimeType from '#models/mime_type'; // import MimeType from '#models/mime_type';
// const enabledExtensions = await MimeType.query().select('file_extension').where('enabled', true).exec(); // const enabledExtensions = await MimeType.query().select('file_extension').where('enabled', true).exec();
@ -40,8 +39,7 @@ export const createDatasetValidator = vine.compile(
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }), .translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}), }),
) )
// .minLength(2) .minLength(1),
.arrayContainsTypes({ typeA: 'main', typeB: 'translated' }),
descriptions: vine descriptions: vine
.array( .array(
vine.object({ vine.object({
@ -55,8 +53,7 @@ export const createDatasetValidator = vine.compile(
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }), .translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}), }),
) )
// .minLength(1), .minLength(1),
.arrayContainsTypes({ typeA: 'abstract', typeB: 'translated' }),
authors: vine authors: vine
.array( .array(
vine.object({ vine.object({
@ -67,9 +64,8 @@ export const createDatasetValidator = vine.compile(
.email() .email()
.normalizeEmail() .normalizeEmail()
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }), .isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
first_name: vine.string().trim().minLength(3).maxLength(255).optional().requiredWhen('name_type', '=', 'Personal'), first_name: vine.string().trim().minLength(3).maxLength(255),
last_name: vine.string().trim().minLength(3).maxLength(255), last_name: vine.string().trim().minLength(3).maxLength(255),
identifier_orcid: vine.string().trim().maxLength(255).orcid().optional(),
}), }),
) )
.minLength(1) .minLength(1)
@ -84,10 +80,9 @@ export const createDatasetValidator = vine.compile(
.email() .email()
.normalizeEmail() .normalizeEmail()
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }), .isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
first_name: vine.string().trim().minLength(3).maxLength(255).optional().requiredWhen('name_type', '=', 'Personal'), first_name: vine.string().trim().minLength(3).maxLength(255),
last_name: vine.string().trim().minLength(3).maxLength(255), last_name: vine.string().trim().minLength(3).maxLength(255),
pivot_contributor_type: vine.enum(Object.keys(ContributorTypes)), pivot_contributor_type: vine.enum(Object.keys(ContributorTypes)),
identifier_orcid: vine.string().trim().maxLength(255).orcid().optional(),
}), }),
) )
.distinct('email') .distinct('email')
@ -130,7 +125,7 @@ export const createDatasetValidator = vine.compile(
references: vine references: vine
.array( .array(
vine.object({ vine.object({
value: vine.string().trim().minLength(3).maxLength(255).validateReference({ typeField: 'type' }), value: vine.string().trim().minLength(3).maxLength(255),
type: vine.enum(Object.values(ReferenceIdentifierTypes)), type: vine.enum(Object.values(ReferenceIdentifierTypes)),
relation: vine.enum(Object.values(RelationTypes)), relation: vine.enum(Object.values(RelationTypes)),
label: vine.string().trim().minLength(2).maxLength(255), label: vine.string().trim().minLength(2).maxLength(255),
@ -191,8 +186,7 @@ export const updateDatasetValidator = vine.compile(
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }), .translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}), }),
) )
// .minLength(2) .minLength(1),
.arrayContainsTypes({ typeA: 'main', typeB: 'translated' }),
descriptions: vine descriptions: vine
.array( .array(
vine.object({ vine.object({
@ -206,7 +200,7 @@ export const updateDatasetValidator = vine.compile(
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }), .translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}), }),
) )
.arrayContainsTypes({ typeA: 'abstract', typeB: 'translated' }), .minLength(1),
authors: vine authors: vine
.array( .array(
vine.object({ vine.object({
@ -217,9 +211,8 @@ export const updateDatasetValidator = vine.compile(
.email() .email()
.normalizeEmail() .normalizeEmail()
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }), .isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
first_name: vine.string().trim().minLength(3).maxLength(255).optional().requiredWhen('name_type', '=', 'Personal'), first_name: vine.string().trim().minLength(3).maxLength(255),
last_name: vine.string().trim().minLength(3).maxLength(255), last_name: vine.string().trim().minLength(3).maxLength(255),
identifier_orcid: vine.string().trim().maxLength(255).orcid().optional(),
}), }),
) )
.minLength(1) .minLength(1)
@ -234,9 +227,8 @@ export const updateDatasetValidator = vine.compile(
.email() .email()
.normalizeEmail() .normalizeEmail()
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }), .isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
first_name: vine.string().trim().minLength(3).maxLength(255).optional().requiredWhen('name_type', '=', 'Personal'), first_name: vine.string().trim().minLength(3).maxLength(255),
last_name: vine.string().trim().minLength(3).maxLength(255), last_name: vine.string().trim().minLength(3).maxLength(255),
identifier_orcid: vine.string().trim().maxLength(255).orcid().optional(),
pivot_contributor_type: vine.enum(Object.keys(ContributorTypes)), pivot_contributor_type: vine.enum(Object.keys(ContributorTypes)),
}), }),
) )
@ -280,7 +272,7 @@ export const updateDatasetValidator = vine.compile(
references: vine references: vine
.array( .array(
vine.object({ vine.object({
value: vine.string().trim().minLength(3).maxLength(255).validateReference({ typeField: 'type' }), value: vine.string().trim().minLength(3).maxLength(255),
type: vine.enum(Object.values(ReferenceIdentifierTypes)), type: vine.enum(Object.values(ReferenceIdentifierTypes)),
relation: vine.enum(Object.values(RelationTypes)), relation: vine.enum(Object.values(RelationTypes)),
label: vine.string().trim().minLength(2).maxLength(255), label: vine.string().trim().minLength(2).maxLength(255),
@ -310,149 +302,21 @@ export const updateDatasetValidator = vine.compile(
.fileScan({ removeInfected: true }), .fileScan({ removeInfected: true }),
) )
.dependentArrayMinLength({ dependentArray: 'fileInputs', min: 1 }), .dependentArrayMinLength({ dependentArray: 'fileInputs', min: 1 }),
fileInputs: vine fileInputs: vine.array(
.array( vine.object({
vine.object({ label: vine.string().trim().maxLength(100),
label: vine.string().trim().maxLength(100), //extnames: extensions,
}), }),
) ),
.optional(),
}), }),
); );
export const updateEditorDatasetValidator = vine.compile( // files: schema.array([rules.minLength(1)]).members(
vine.object({ // schema.file({
// first step // size: '512mb',
language: vine // extnames: ['jpg', 'gif', 'png', 'tif', 'pdf', 'zip', 'fgb', 'nc', 'qml', 'ovr', 'gpkg', 'gml', 'gpx', 'kml', 'kmz', 'json'],
.string() // }),
.trim() // ),
.regex(/^[a-zA-Z0-9]+$/),
licenses: vine.array(vine.number()).minLength(1), // define at least one license for the new dataset
rights: vine.string().in(['true']),
// second step
type: vine.string().trim().minLength(3).maxLength(255),
creating_corporation: vine.string().trim().minLength(3).maxLength(255),
titles: vine
.array(
vine.object({
value: vine.string().trim().minLength(3).maxLength(255),
type: vine.enum(Object.values(TitleTypes)),
language: vine
.string()
.trim()
.minLength(2)
.maxLength(255)
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}),
)
// .minLength(2)
.arrayContainsTypes({ typeA: 'main', typeB: 'translated' }),
descriptions: vine
.array(
vine.object({
value: vine.string().trim().minLength(3).maxLength(2500),
type: vine.enum(Object.values(DescriptionTypes)),
language: vine
.string()
.trim()
.minLength(2)
.maxLength(255)
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}),
)
.arrayContainsTypes({ typeA: 'abstract', typeB: 'translated' }),
authors: vine
.array(
vine.object({
email: vine
.string()
.trim()
.maxLength(255)
.email()
.normalizeEmail()
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
first_name: vine.string().trim().minLength(3).maxLength(255).optional().requiredWhen('name_type', '=', 'Personal'),
last_name: vine.string().trim().minLength(3).maxLength(255),
identifier_orcid: vine.string().trim().maxLength(255).orcid().optional(),
}),
)
.minLength(1)
.distinct('email'),
contributors: vine
.array(
vine.object({
email: vine
.string()
.trim()
.maxLength(255)
.email()
.normalizeEmail()
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
first_name: vine.string().trim().minLength(3).maxLength(255).optional().requiredWhen('name_type', '=', 'Personal'),
last_name: vine.string().trim().minLength(3).maxLength(255),
identifier_orcid: vine.string().trim().maxLength(255).orcid().optional(),
pivot_contributor_type: vine.enum(Object.keys(ContributorTypes)),
}),
)
.distinct('email')
.optional(),
// third step
project_id: vine.number().optional(),
// embargo_date: schema.date.optional({ format: 'yyyy-MM-dd' }, [rules.after(10, 'days')]),
embargo_date: vine
.date({
formats: ['YYYY-MM-DD'],
})
.afterOrEqual((_field) => {
return dayjs().add(10, 'day').format('YYYY-MM-DD');
})
.optional(),
coverage: vine.object({
x_min: vine.number(),
x_max: vine.number(),
y_min: vine.number(),
y_max: vine.number(),
elevation_absolut: vine.number().positive().optional(),
elevation_min: vine.number().positive().optional().requiredIfExists('elevation_max'),
elevation_max: vine.number().positive().optional().requiredIfExists('elevation_min'),
// type: vine.enum(Object.values(DescriptionTypes)),
depth_absolut: vine.number().negative().optional(),
depth_min: vine.number().negative().optional().requiredIfExists('depth_max'),
depth_max: vine.number().negative().optional().requiredIfExists('depth_min'),
time_abolute: vine.date({ formats: { utc: true } }).optional(),
time_min: vine
.date({ formats: { utc: true } })
.beforeField('time_max')
.optional()
.requiredIfExists('time_max'),
time_max: vine
.date({ formats: { utc: true } })
.afterField('time_min')
.optional()
.requiredIfExists('time_min'),
}),
references: vine
.array(
vine.object({
value: vine.string().trim().minLength(3).maxLength(255).validateReference({ typeField: 'type' }),
type: vine.enum(Object.values(ReferenceIdentifierTypes)),
relation: vine.enum(Object.values(RelationTypes)),
label: vine.string().trim().minLength(2).maxLength(255),
}),
)
.optional(),
subjects: vine
.array(
vine.object({
value: vine.string().trim().minLength(3).maxLength(255),
// pivot_contributor_type: vine.enum(Object.keys(ContributorTypes)),
language: vine.string().trim().minLength(2).maxLength(255),
}),
)
.minLength(3)
.distinct('value'),
}),
);
let messagesProvider = new SimpleMessagesProvider({ let messagesProvider = new SimpleMessagesProvider({
'minLength': '{{ field }} must be at least {{ min }} characters long', 'minLength': '{{ field }} must be at least {{ min }} characters long',
@ -504,10 +368,8 @@ let messagesProvider = new SimpleMessagesProvider({
'files.array.minLength': 'At least {{ min }} file upload is required.', 'files.array.minLength': 'At least {{ min }} file upload is required.',
'files.*.size': 'file size is to big', 'files.*.size': 'file size is to big',
'files.*.extnames': 'file extension is not supported', 'files.*.extnames': 'file extension is not supported',
'embargo_date.date.afterOrEqual': `Embargo date must be on or after ${dayjs().add(10, 'day').format('DD.MM.YYYY')}`,
}); });
createDatasetValidator.messagesProvider = messagesProvider; createDatasetValidator.messagesProvider = messagesProvider;
updateDatasetValidator.messagesProvider = messagesProvider; updateDatasetValidator.messagesProvider = messagesProvider;
updateEditorDatasetValidator.messagesProvider = messagesProvider;
// export default createDatasetValidator; // export default createDatasetValidator;

View file

@ -1,28 +0,0 @@
// app/validators/project.ts
import vine from '@vinejs/vine';
export const createProjectValidator = vine.compile(
vine.object({
label: vine.string().trim().minLength(1).maxLength(50) .regex(/^[a-z0-9-]+$/),
name: vine
.string()
.trim()
.minLength(3)
.maxLength(255)
.regex(/^[a-zA-Z0-9äöüßÄÖÜ\s-]+$/),
description: vine.string().trim().maxLength(255).minLength(5).optional(),
}),
);
export const updateProjectValidator = vine.compile(
vine.object({
// label is NOT included since it's readonly
name: vine
.string()
.trim()
.minLength(3)
.maxLength(255)
.regex(/^[a-zA-Z0-9äöüßÄÖÜ\s-]+$/),
description: vine.string().trim().maxLength(255).minLength(5).optional(),
}),
);

View file

@ -8,20 +8,20 @@ export const createRoleValidator = vine.compile(
vine.object({ vine.object({
name: vine name: vine
.string() .string()
.isUnique({ table: 'roles', column: 'name' })
.trim() .trim()
.minLength(3) .minLength(3)
.maxLength(255) .maxLength(255)
.isUnique({ table: 'roles', column: 'name' }) .regex(/^[a-zA-Z0-9]+$/), //Must be alphanumeric with hyphens or underscores
.regex(/^[a-zA-Z0-9]+$/), // Must be alphanumeric
display_name: vine display_name: vine
.string() .string()
.isUnique({ table: 'roles', column: 'display_name' })
.trim() .trim()
.minLength(3) .minLength(3)
.maxLength(255) .maxLength(255)
.isUnique({ table: 'roles', column: 'display_name' })
.regex(/^[a-zA-Z0-9]+$/), .regex(/^[a-zA-Z0-9]+$/),
description: vine.string().trim().escape().minLength(3).maxLength(255).optional(), description: vine.string().trim().escape().minLength(3).maxLength(255).optional(),
permissions: vine.array(vine.number()).minLength(1), // At least one permission required permissions: vine.array(vine.number()).minLength(1), // define at least one permission for the new role
}), }),
); );
@ -29,28 +29,21 @@ export const updateRoleValidator = vine.withMetaData<{ roleId: number }>().compi
vine.object({ vine.object({
name: vine name: vine
.string() .string()
.trim() // .unique(async (db, value, field) => {
.minLength(3) // const result = await db.from('roles').select('id').whereNot('id', field.meta.roleId).where('name', value).first();
.maxLength(255) // return result.length ? false : true;
// })
.isUnique({ .isUnique({
table: 'roles', table: 'roles',
column: 'name', column: 'name',
whereNot: (field) => field.meta.roleId, whereNot: (field) => field.meta.roleId,
}) })
.regex(/^[a-zA-Z0-9]+$/),
display_name: vine
.string()
.trim() .trim()
.minLength(3) .minLength(3)
.maxLength(255) .maxLength(255),
.isUnique({
table: 'roles',
column: 'display_name',
whereNot: (field) => field.meta.roleId,
})
.regex(/^[a-zA-Z0-9]+$/),
description: vine.string().trim().escape().minLength(3).maxLength(255).optional(), description: vine.string().trim().escape().minLength(3).maxLength(255).optional(),
permissions: vine.array(vine.number()).minLength(1), // At least one permission required permissions: vine.array(vine.number()).minLength(1), // define at least one permission for the new role
}), }),
); );

View file

@ -16,7 +16,7 @@ export const createUserValidator = vine.compile(
first_name: vine.string().trim().minLength(3).maxLength(255), first_name: vine.string().trim().minLength(3).maxLength(255),
last_name: vine.string().trim().minLength(3).maxLength(255), last_name: vine.string().trim().minLength(3).maxLength(255),
email: vine.string().maxLength(255).email().normalizeEmail().isUnique({ table: 'accounts', column: 'email' }), email: vine.string().maxLength(255).email().normalizeEmail().isUnique({ table: 'accounts', column: 'email' }),
new_password: vine.string().confirmed({ confirmationField: 'password_confirmation' }).trim().minLength(3).maxLength(60), password: vine.string().confirmed().trim().minLength(3).maxLength(60),
roles: vine.array(vine.number()).minLength(1), // define at least one role for the new user roles: vine.array(vine.number()).minLength(1), // define at least one role for the new user
}), }),
); );
@ -42,7 +42,7 @@ export const updateUserValidator = vine.withMetaData<{ objId: number }>().compil
.email() .email()
.normalizeEmail() .normalizeEmail()
.isUnique({ table: 'accounts', column: 'email', whereNot: (field) => field.meta.objId }), .isUnique({ table: 'accounts', column: 'email', whereNot: (field) => field.meta.objId }),
new_password: vine.string().confirmed({ confirmationField: 'password_confirmation' }).trim().minLength(3).maxLength(60).optional(), password: vine.string().confirmed().trim().minLength(3).maxLength(60).optional(),
roles: vine.array(vine.number()).minLength(1), // define at least one role for the new user roles: vine.array(vine.number()).minLength(1), // define at least one role for the new user
}), }),
); );

View file

@ -142,7 +142,7 @@ export class VanillaErrorReporter implements ErrorReporterContract {
// } // }
this.hasErrors = true; this.hasErrors = true;
// var test = field.getFieldPath(); var test = field.getFieldPath();
// this.errors.push(error); // this.errors.push(error);
// if (this.errors[error.field]) { // if (this.errors[error.field]) {

View file

@ -5,23 +5,7 @@ LogSyslog no
LogVerbose yes LogVerbose yes
DatabaseDirectory /var/lib/clamav DatabaseDirectory /var/lib/clamav
LocalSocket /var/run/clamav/clamd.socket LocalSocket /var/run/clamav/clamd.socket
# LocalSocketMode 666
# Optional: allow multiple threads
MaxThreads 20
# Disable TCP socket
# TCPSocket 0
# TCP port address.
# Default: no
# TCPSocket 3310
# TCP address.
# By default we bind to INADDR_ANY, probably not wise.
# Enable the following to provide some degree of protection
# from the outside world.
# Default: no
# TCPAddr 127.0.0.1
Foreground no Foreground no
PidFile /var/run/clamav/clamd.pid PidFile /var/run/clamav/clamd.pid
# LocalSocketGroup node # Changed from 'clamav' LocalSocketGroup node
# User node # Changed from 'clamav' - clamd runs as clamav user User node

View file

@ -1,482 +0,0 @@
/*
|--------------------------------------------------------------------------
| node ace make:command fix-dataset-cross-references
| DONE: create commands/fix_dataset_cross_references.ts
|--------------------------------------------------------------------------
*/
import { BaseCommand, flags } from '@adonisjs/core/ace';
import type { CommandOptions } from '@adonisjs/core/types/ace';
import { DateTime } from 'luxon';
import Dataset from '#models/dataset';
import DatasetReference from '#models/dataset_reference';
import AppConfig from '#models/appconfig';
// import env from '#start/env';
interface MissingCrossReference {
sourceDatasetId: number;
targetDatasetId: number;
sourcePublishId: number | null;
targetPublishId: number | null;
sourceDoi: string | null;
targetDoi: string | null;
referenceType: string;
relation: string;
doi: string | null;
reverseRelation: string;
sourceReferenceLabel: string | null;
}
export default class DetectMissingCrossReferences extends BaseCommand {
static commandName = 'detect:missing-cross-references';
static description = 'Detect missing bidirectional cross-references between versioned datasets';
public static needsApplication = true;
@flags.boolean({ alias: 'f', description: 'Fix missing cross-references automatically' })
public fix: boolean = false;
@flags.boolean({ alias: 'v', description: 'Verbose output' })
public verbose: boolean = false;
@flags.number({ alias: 'p', description: 'Filter by specific publish_id (source or target dataset)' })
public publish_id?: number;
// example: node ace detect:missing-cross-references --verbose -p 227 //if you want to filter by specific publish_id with details
// example: node ace detect:missing-cross-references --verbose
// example: node ace detect:missing-cross-references --fix -p 227 //if you want to filter by specific publish_id and fix it
// example: node ace detect:missing-cross-references
public static options: CommandOptions = {
startApp: true,
staysAlive: false,
};
// Define the allowed relations that we want to process
private readonly ALLOWED_RELATIONS = [
'IsNewVersionOf',
'IsPreviousVersionOf',
'IsVariantFormOf',
'IsOriginalFormOf',
'Continues',
'IsContinuedBy',
'HasPart',
'IsPartOf',
];
// private readonly ALLOWED_RELATIONS = ['IsPreviousVersionOf', 'IsOriginalFormOf'];
async run() {
this.logger.info('🔍 Detecting missing cross-references...');
this.logger.info(`📋 Processing only these relations: ${this.ALLOWED_RELATIONS.join(', ')}`);
if (this.publish_id) {
this.logger.info(`Filtering by publish_id: ${this.publish_id}`);
}
try {
const missingReferences = await this.findMissingCrossReferences();
// Store count in AppConfig if not fixing and count >= 1
if (!this.fix && missingReferences.length >= 1) {
await this.storeMissingCrossReferencesCount(missingReferences.length);
}
if (missingReferences.length === 0) {
const filterMsg = this.publish_id ? ` for publish_id ${this.publish_id}` : '';
this.logger.success(`All cross-references are properly linked for the specified relations${filterMsg}!`);
// Clear the count if no missing references
if (!this.fix) {
await this.storeMissingCrossReferencesCount(0);
}
return;
}
const filterMsg = this.publish_id ? ` (filtered by publish_id ${this.publish_id})` : '';
this.logger.warning(`Found ${missingReferences.length} missing cross-reference(s)${filterMsg}:`);
// Show brief list if not verbose mode
if (!this.verbose) {
for (const missing of missingReferences) {
const sourceDoi = missing.sourceDoi ? ` DOI: ${missing.sourceDoi}` : '';
const targetDoi = missing.targetDoi ? ` DOI: ${missing.targetDoi}` : '';
this.logger.info(
`Dataset ${missing.sourceDatasetId} (Publish ID: ${missing.sourcePublishId}${sourceDoi}) ${missing.relation} Dataset ${missing.targetDatasetId} (Publish ID: ${missing.targetPublishId}${targetDoi}) → missing reverse: ${missing.reverseRelation}`,
);
}
} else {
// Verbose mode - show detailed info
for (const missing of missingReferences) {
this.logger.info(
`Dataset ${missing.sourceDatasetId} references ${missing.targetDatasetId}, but reverse reference is missing`,
);
this.logger.info(` - Reference type: ${missing.referenceType}`);
this.logger.info(` - Relation: ${missing.relation}`);
this.logger.info(` - DOI: ${missing.doi}`);
}
}
if (this.fix) {
await this.fixMissingReferences(missingReferences);
// Clear the count after fixing
await this.storeMissingCrossReferencesCount(0);
this.logger.success('All missing cross-references have been fixed!');
} else {
if (this.verbose) {
this.printMissingReferencesList(missingReferences);
}
this.logger.info('💡 Run with --fix flag to automatically create missing cross-references');
if (this.publish_id) {
this.logger.info(`🎯 Currently filtering by publish_id: ${this.publish_id}`);
}
}
} catch (error) {
this.logger.error('Error detecting missing cross-references:', error);
process.exit(1);
}
}
private async storeMissingCrossReferencesCount(count: number): Promise<void> {
try {
await AppConfig.updateOrCreate(
{
appid: 'commands',
configkey: 'missing_cross_references_count',
},
{
configvalue: count.toString(),
},
);
this.logger.info(`📊 Stored missing cross-references count in database: ${count}`);
} catch (error) {
this.logger.error('Failed to store missing cross-references count:', error);
}
}
private async findMissingCrossReferences(): Promise<MissingCrossReference[]> {
const missingReferences: {
sourceDatasetId: number;
targetDatasetId: number;
sourcePublishId: number | null;
targetPublishId: number | null;
sourceDoi: string | null;
targetDoi: string | null;
referenceType: string;
relation: string;
doi: string | null;
reverseRelation: string;
sourceReferenceLabel: string | null;
}[] = [];
this.logger.info('📊 Querying dataset references...');
// Find all references that point to Tethys datasets (DOI or URL containing tethys DOI)
// Only from datasets that are published AND only for allowed relations
const tethysReferencesQuery = DatasetReference.query()
.whereIn('type', ['DOI', 'URL'])
.whereIn('relation', this.ALLOWED_RELATIONS) // Only process allowed relations
.where((query) => {
query.where('value', 'like', '%doi.org/10.24341/tethys.%').orWhere('value', 'like', '%tethys.at/dataset/%');
})
.preload('dataset', (datasetQuery) => {
datasetQuery.preload('identifier');
})
.whereHas('dataset', (datasetQuery) => {
datasetQuery.where('server_state', 'published');
});
if (typeof this.publish_id === 'number') {
tethysReferencesQuery.whereHas('dataset', (datasetQuery) => {
datasetQuery.where('publish_id', this.publish_id as number);
});
}
const tethysReferences = await tethysReferencesQuery.exec();
this.logger.info(`🔗 Found ${tethysReferences.length} Tethys references from published datasets (allowed relations only)`);
let processedCount = 0;
let skippedCount = 0;
for (const reference of tethysReferences) {
processedCount++;
// if (this.verbose && processedCount % 10 === 0) {
// this.logger.info(`📈 Processed ${processedCount}/${tethysReferences.length} references...`);
// }
// Double-check that this relation is in our allowed list (safety check)
if (!this.ALLOWED_RELATIONS.includes(reference.relation)) {
skippedCount++;
if (this.verbose) {
this.logger.info(`⏭️ Skipping relation "${reference.relation}" - not in allowed list`);
}
continue;
}
// Extract dataset publish_id from DOI or URL
// const targetDatasetPublish = this.extractDatasetPublishIdFromReference(reference.value);
// Extract DOI from reference URL
const doi = this.extractDoiFromReference(reference.value);
// if (!targetDatasetPublish) {
// if (this.verbose) {
// this.logger.warning(`Could not extract publish ID from: ${reference.value}`);
// }
// continue;
// }
if (!doi) {
if (this.verbose) {
this.logger.warning(`Could not extract DOI from: ${reference.value}`);
}
continue;
}
// // Check if target dataset exists and is published
// const targetDataset = await Dataset.query()
// .where('publish_id', targetDatasetPublish)
// .where('server_state', 'published')
// .preload('identifier')
// .first();
// Check if target dataset exists and is published by querying via identifier
const targetDataset = await Dataset.query()
.where('server_state', 'published')
.whereHas('identifier', (query) => {
query.where('value', doi);
})
.preload('identifier')
.first();
if (!targetDataset) {
if (this.verbose) {
this.logger.warning(`⚠️ Target dataset with publish_id ${doi} not found or not published`);
}
continue;
}
// Ensure we have a valid source dataset with proper preloading
if (!reference.dataset) {
this.logger.warning(`⚠️ Source dataset ${reference.document_id} not properly loaded, skipping...`);
continue;
}
// Check if reverse reference exists
const reverseReferenceExists = await this.checkReverseReferenceExists(
targetDataset.id,
reference.document_id,
reference.relation,
reference.dataset.identifier.value
);
if (!reverseReferenceExists) {
const reverseRelation = this.getReverseRelation(reference.relation);
if (reverseRelation) {
// Only add if we have a valid reverse relation
missingReferences.push({
sourceDatasetId: reference.document_id,
targetDatasetId: targetDataset.id,
sourcePublishId: reference.dataset.publish_id || null,
targetPublishId: targetDataset.publish_id || null,
referenceType: reference.type,
relation: reference.relation,
doi: reference.value,
reverseRelation: reverseRelation,
sourceDoi: reference.dataset.identifier ? reference.dataset.identifier.value : null,
targetDoi: targetDataset.identifier ? targetDataset.identifier.value : null,
sourceReferenceLabel: reference.label || null,
});
}
}
}
this.logger.info(`✅ Processed ${processedCount} references (${skippedCount} skipped due to relation filtering)`);
return missingReferences;
}
private extractDoiFromReference(reference: string): string | null {
// Match DOI pattern, with or without URL prefix
const doiPattern = /(?:https?:\/\/)?(?:doi\.org\/)?(.+)/i;
const match = reference.match(doiPattern);
if (match && match[1]) {
return match[1]; // Returns just "10.24341/tethys.99.2"
}
return null;
}
private extractDatasetPublishIdFromReference(value: string): number | null {
// Extract from DOI: https://doi.org/10.24341/tethys.107 -> 107
const doiMatch = value.match(/10\.24341\/tethys\.(\d+)/);
if (doiMatch) {
return parseInt(doiMatch[1]);
}
// Extract from URL: https://tethys.at/dataset/107 -> 107
const urlMatch = value.match(/tethys\.at\/dataset\/(\d+)/);
if (urlMatch) {
return parseInt(urlMatch[1]);
}
return null;
}
private async checkReverseReferenceExists(
targetDatasetId: number,
sourceDatasetId: number,
originalRelation: string,
sourceDatasetIdentifier: string | null,
): Promise<boolean> {
const reverseRelation = this.getReverseRelation(originalRelation);
if (!reverseRelation) {
return true; // If no reverse relation is defined, consider it as "exists" to skip processing
}
// Only check for reverse references where the source dataset is also published
const reverseReference = await DatasetReference.query()
// We don't filter by source document_id here to find any incoming reference from any published dataset
.where('document_id', targetDatasetId)
// .where('related_document_id', sourceDatasetId) // Ensure it's an incoming reference
.where('relation', reverseRelation)
.where('value', 'like', `%${sourceDatasetIdentifier}`) // Basic check to ensure it points back to source dataset
.first();
return !!reverseReference;
}
private getReverseRelation(relation: string): string | null {
const relationMap: Record<string, string> = {
IsNewVersionOf: 'IsPreviousVersionOf',
IsPreviousVersionOf: 'IsNewVersionOf',
IsVariantFormOf: 'IsOriginalFormOf',
IsOriginalFormOf: 'IsVariantFormOf',
Continues: 'IsContinuedBy',
IsContinuedBy: 'Continues',
HasPart: 'IsPartOf',
IsPartOf: 'HasPart',
};
// Only return reverse relation if it exists in our map, otherwise return null
return relationMap[relation] || null;
}
private printMissingReferencesList(missingReferences: MissingCrossReference[]) {
console.log('┌─────────────────────────────────────────────────────────────────────────────────┐');
console.log('│ MISSING CROSS-REFERENCES REPORT │');
console.log('│ (Published Datasets Only - Filtered Relations) │');
console.log('└─────────────────────────────────────────────────────────────────────────────────┘');
console.log();
missingReferences.forEach((missing, index) => {
console.log(
`${index + 1}. Dataset ${missing.sourceDatasetId} (Publish ID: ${missing.sourcePublishId} Identifier: ${missing.sourceDoi})
${missing.relation} Dataset ${missing.targetDatasetId} (Publish ID: ${missing.targetPublishId} Identifier: ${missing.targetDoi})`,
);
console.log(` ├─ Current relation: "${missing.relation}"`);
console.log(` ├─ Missing reverse relation: "${missing.reverseRelation}"`);
console.log(` ├─ Reference type: ${missing.referenceType}`);
console.log(` └─ DOI/URL: ${missing.doi}`);
console.log();
});
console.log('┌─────────────────────────────────────────────────────────────────────────────────┐');
console.log(`│ SUMMARY: ${missingReferences.length} missing reverse reference(s) detected │`);
console.log(`│ Processed relations: ${this.ALLOWED_RELATIONS.join(', ')}`);
console.log('└─────────────────────────────────────────────────────────────────────────────────┘');
}
private async fixMissingReferences(missingReferences: MissingCrossReference[]) {
this.logger.info('🔧 Creating missing cross-references in database...');
let fixedCount = 0;
let errorCount = 0;
for (const [index, missing] of missingReferences.entries()) {
try {
// Get both source and target datasets
const sourceDataset = await Dataset.query()
.where('id', missing.sourceDatasetId)
.where('server_state', 'published')
.preload('identifier')
.preload('titles') // Preload titles to get mainTitle
.first();
const targetDataset = await Dataset.query().where('id', missing.targetDatasetId).where('server_state', 'published').first();
if (!sourceDataset) {
this.logger.warning(`⚠️ Source dataset ${missing.sourceDatasetId} not found or not published, skipping...`);
errorCount++;
continue;
}
if (!targetDataset) {
this.logger.warning(`⚠️ Target dataset ${missing.targetDatasetId} not found or not published, skipping...`);
errorCount++;
continue;
}
// **NEW: Update the original reference if related_document_id is missing**
const originalReference = await DatasetReference.query()
.where('document_id', missing.sourceDatasetId)
.where('relation', missing.relation)
.where('value', 'like', `%${missing.targetDoi}%`)
.first();
if (originalReference && !originalReference.related_document_id) {
originalReference.related_document_id = missing.targetDatasetId;
await originalReference.save();
if (this.verbose) {
this.logger.info(`🔗 Updated original reference with related_document_id: ${missing.targetDatasetId}`);
}
}
// Create the reverse reference using the referenced_by relationship
// Example: If Dataset 297 IsNewVersionOf Dataset 144
// We create an incoming reference for Dataset 144 that shows Dataset 297 IsPreviousVersionOf it
const reverseReference = new DatasetReference();
// Don't set document_id - this creates an incoming reference via related_document_id
reverseReference.document_id = missing.targetDatasetId; //
reverseReference.related_document_id = missing.sourceDatasetId;
reverseReference.type = 'DOI';
reverseReference.relation = missing.reverseRelation;
// Use the source dataset's DOI for the value (what's being referenced)
if (sourceDataset.identifier?.value) {
reverseReference.value = `https://doi.org/${sourceDataset.identifier.value}`;
} else {
// Fallback to dataset URL if no DOI
reverseReference.value = `https://tethys.at/dataset/${sourceDataset.publish_id || missing.sourceDatasetId}`;
}
// Use the source dataset's main title for the label
//reverseReference.label = sourceDataset.mainTitle || `Dataset ${missing.sourceDatasetId}`;
// get label of forward reference
reverseReference.label = missing.sourceReferenceLabel || sourceDataset.mainTitle || `Dataset ${missing.sourceDatasetId}`;
// reverseReference.notes = `Auto-created by detect:missing-cross-references command on ${DateTime.now().toISO()} to fix missing bidirectional reference.`;
// Save the new reverse reference
// Also save 'server_date_modified' on target dataset to trigger any downstream updates (e.g. search index)
targetDataset.server_date_modified = DateTime.now();
await targetDataset.save();
await reverseReference.save();
fixedCount++;
if (this.verbose) {
this.logger.info(
`✅ [${index + 1}/${missingReferences.length}] Created reverse reference: Dataset ${missing.sourceDatasetId} -> ${missing.targetDatasetId} (${missing.reverseRelation})`,
);
} else if ((index + 1) % 10 === 0) {
this.logger.info(`📈 Fixed ${fixedCount}/${missingReferences.length} references...`);
}
} catch (error) {
this.logger.error(
`❌ Error creating reverse reference for datasets ${missing.targetDatasetId} -> ${missing.sourceDatasetId}:`,
error,
);
errorCount++;
}
}
this.logger.info(`📊 Fix completed: ${fixedCount} created, ${errorCount} errors`);
}
}

View file

@ -4,7 +4,7 @@
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js'; import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import { create } from 'xmlbuilder2'; import { create } from 'xmlbuilder2';
import Dataset from '#models/dataset'; import Dataset from '#models/dataset';
import XmlModel from '#app/Library/DatasetXmlSerializer'; import XmlModel from '#app/Library/XmlModel';
import { readFileSync } from 'fs'; import { readFileSync } from 'fs';
import SaxonJS from 'saxon-js'; import SaxonJS from 'saxon-js';
import { Client } from '@opensearch-project/opensearch'; import { Client } from '@opensearch-project/opensearch';
@ -12,8 +12,10 @@ import { getDomain } from '#app/utils/utility-functions';
import { BaseCommand, flags } from '@adonisjs/core/ace'; import { BaseCommand, flags } from '@adonisjs/core/ace';
import { CommandOptions } from '@adonisjs/core/types/ace'; import { CommandOptions } from '@adonisjs/core/types/ace';
import env from '#start/env'; import env from '#start/env';
// import db from '@adonisjs/lucid/services/db';
// import { default as Dataset } from '#models/dataset';
import logger from '@adonisjs/core/services/logger'; import logger from '@adonisjs/core/services/logger';
import { DateTime } from 'luxon';
const opensearchNode = env.get('OPENSEARCH_HOST', 'localhost'); const opensearchNode = env.get('OPENSEARCH_HOST', 'localhost');
const client = new Client({ node: `${opensearchNode}` }); // replace with your OpenSearch endpoint const client = new Client({ node: `${opensearchNode}` }); // replace with your OpenSearch endpoint
@ -28,10 +30,11 @@ export default class IndexDatasets extends BaseCommand {
public publish_id: number; public publish_id: number;
public static options: CommandOptions = { public static options: CommandOptions = {
startApp: true, // Ensures the IoC container is ready to use startApp: true,
staysAlive: false, // Command exits after running staysAlive: false,
}; };
async run() { async run() {
logger.debug('Hello world!'); logger.debug('Hello world!');
// const { default: Dataset } = await import('#models/dataset'); // const { default: Dataset } = await import('#models/dataset');
@ -41,12 +44,10 @@ export default class IndexDatasets extends BaseCommand {
const index_name = 'tethys-records'; const index_name = 'tethys-records';
for (var dataset of datasets) { for (var dataset of datasets) {
const shouldUpdate = await this.shouldUpdateDataset(dataset, index_name); // Logger.info(`File publish_id ${dataset.publish_id}`);
if (shouldUpdate) { // const jsonString = await this.getJsonString(dataset, proc);
await this.indexDocument(dataset, index_name, proc); // console.log(jsonString);
} else { await this.indexDocument(dataset, index_name, proc);
logger.info(`Dataset with publish_id ${dataset.publish_id} is up to date, skipping indexing`);
}
} }
} }
@ -64,46 +65,6 @@ export default class IndexDatasets extends BaseCommand {
return await query.exec(); return await query.exec();
} }
private async shouldUpdateDataset(dataset: Dataset, index_name: string): Promise<boolean> {
try {
// Check if publish_id exists before proceeding
if (!dataset.publish_id) {
// Return true to update since document doesn't exist in OpenSearch yet
return true;
}
// Get the existing document from OpenSearch
const response = await client.get({
index: index_name,
id: dataset.publish_id?.toString(),
});
const existingDoc = response.body._source;
// Compare server_date_modified
if (existingDoc && existingDoc.server_date_modified) {
// Convert Unix timestamp (seconds) to milliseconds for DateTime.fromMillis()
const existingModified = DateTime.fromMillis(Number(existingDoc.server_date_modified) * 1000);
const currentModified = dataset.server_date_modified;
// Only update if the dataset has been modified more recently
if (currentModified <= existingModified) {
return false;
}
}
return true;
} catch (error) {
// If document doesn't exist or other error, we should index it
if (error.statusCode === 404) {
logger.info(`Dataset with publish_id ${dataset.publish_id} not found in index, will create new document`);
return true;
}
logger.warn(`Error checking existing document for publish_id ${dataset.publish_id}: ${error.message}`);
return true; // Index anyway if we can't determine the status
}
}
private async indexDocument(dataset: Dataset, index_name: string, proc: Buffer): Promise<void> { private async indexDocument(dataset: Dataset, index_name: string, proc: Buffer): Promise<void> {
try { try {
const doc = await this.getJsonString(dataset, proc); const doc = await this.getJsonString(dataset, proc);
@ -117,8 +78,7 @@ export default class IndexDatasets extends BaseCommand {
}); });
logger.info(`dataset with publish_id ${dataset.publish_id} successfully indexed`); logger.info(`dataset with publish_id ${dataset.publish_id} successfully indexed`);
} catch (error) { } catch (error) {
logger.error(`An error occurred while indexing dataset with publish_id ${dataset.publish_id}. logger.error(`An error occurred while indexing dataset with publish_id ${dataset.publish_id}.`);
Error: ${error.message}`);
} }
} }
@ -151,16 +111,19 @@ export default class IndexDatasets extends BaseCommand {
} }
private async getDatasetXmlDomNode(dataset: Dataset): Promise<XMLBuilder | null> { private async getDatasetXmlDomNode(dataset: Dataset): Promise<XMLBuilder | null> {
const serializer = new XmlModel(dataset).enableCaching().excludeEmptyFields(); const xmlModel = new XmlModel(dataset);
// xmlModel.setModel(dataset); // xmlModel.setModel(dataset);
xmlModel.excludeEmptyFields();
xmlModel.caching = true;
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
// dataset.load('xmlCache');
if (dataset.xmlCache) { if (dataset.xmlCache) {
serializer.setCache(dataset.xmlCache); xmlModel.xmlCache = dataset.xmlCache;
} }
// return cache.toXmlDocument(); // return cache.getDomDocument();
const xmlDocument: XMLBuilder | null = await serializer.toXmlDocument(); const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
return xmlDocument; return domDocument;
} }
private addSpecInformation(domNode: XMLBuilder, information: string) { private addSpecInformation(domNode: XMLBuilder, information: string) {

View file

@ -1,346 +0,0 @@
/*
|--------------------------------------------------------------------------
| node ace make:command list-updateable-datacite
| DONE: create commands/list_updeatable_datacite.ts
|--------------------------------------------------------------------------
*/
import { BaseCommand, flags } from '@adonisjs/core/ace';
import { CommandOptions } from '@adonisjs/core/types/ace';
import Dataset from '#models/dataset';
import { DoiClient } from '#app/Library/Doi/DoiClient';
import env from '#start/env';
import logger from '@adonisjs/core/services/logger';
import { DateTime } from 'luxon';
import pLimit from 'p-limit';
export default class ListUpdateableDatacite extends BaseCommand {
static commandName = 'list:updateable-datacite';
static description = 'List all datasets that need DataCite DOI updates';
public static needsApplication = true;
// private chunkSize = 100; // Set chunk size for pagination
@flags.boolean({ alias: 'v', description: 'Verbose output showing detailed information' })
public verbose: boolean = false;
@flags.boolean({ alias: 'c', description: 'Show only count of updatable datasets' })
public countOnly: boolean = false;
@flags.boolean({ alias: 'i', description: 'Show only publish IDs (useful for scripting)' })
public idsOnly: boolean = false;
@flags.number({ description: 'Chunk size for processing datasets (default: 50)' })
public chunkSize: number = 50;
//example: node ace list:updateable-datacite
//example: node ace list:updateable-datacite --verbose
//example: node ace list:updateable-datacite --count-only
//example: node ace list:updateable-datacite --ids-only
//example: node ace list:updateable-datacite --chunk-size 50
public static options: CommandOptions = {
startApp: true,
stayAlive: false,
};
async run() {
const prefix = env.get('DATACITE_PREFIX', '');
const base_domain = env.get('BASE_DOMAIN', '');
if (!prefix || !base_domain) {
logger.error('Missing DATACITE_PREFIX or BASE_DOMAIN environment variables');
return;
}
// Prevent conflicting flags
if ((this.verbose && this.countOnly) || (this.verbose && this.idsOnly)) {
logger.error('Flags --verbose cannot be combined with --count-only or --ids-only');
return;
}
const chunkSize = this.chunkSize || 50;
let page = 1;
let hasMoreDatasets = true;
let totalProcessed = 0;
const updatableDatasets: Dataset[] = [];
if (!this.countOnly && !this.idsOnly) {
logger.info(`Processing datasets in chunks of ${chunkSize}...`);
}
while (hasMoreDatasets) {
const datasets = await this.getDatasets(page, chunkSize);
if (datasets.length === 0) {
hasMoreDatasets = false;
break;
}
if (!this.countOnly && !this.idsOnly) {
logger.info(`Processing chunk ${page} (${datasets.length} datasets)...`);
}
const chunkUpdatableDatasets = await this.processChunk(datasets);
updatableDatasets.push(...chunkUpdatableDatasets);
totalProcessed += datasets.length;
page += 1;
if (datasets.length < chunkSize) {
hasMoreDatasets = false;
}
}
if (!this.countOnly && !this.idsOnly) {
logger.info(`Processed ${totalProcessed} datasets total, found ${updatableDatasets.length} that need updates`);
}
if (this.countOnly) {
console.log(updatableDatasets.length);
} else if (this.idsOnly) {
updatableDatasets.forEach((dataset) => console.log(dataset.publish_id));
} else if (this.verbose) {
await this.showVerboseOutput(updatableDatasets);
} else {
this.showSimpleOutput(updatableDatasets);
}
}
/**
* Processes a chunk of datasets to determine which ones need DataCite updates
*
* This method handles parallel processing of datasets within a chunk, providing
* efficient error handling and filtering of results.
*
* @param datasets - Array of Dataset objects to process
* @returns Promise<Dataset[]> - Array of datasets that need updates
*/
// private async processChunk(datasets: Dataset[]): Promise<Dataset[]> {
// // Process datasets in parallel using Promise.allSettled for better error handling
// //
// // Why Promise.allSettled vs Promise.all?
// // - Promise.all fails fast: if ANY promise rejects, the entire operation fails
// // - Promise.allSettled waits for ALL promises: some can fail, others succeed
// // - This is crucial for batch processing where we don't want one bad dataset
// // to stop processing of the entire chunk
// const results = await Promise.allSettled(
// datasets.map(async (dataset) => {
// try {
// // Check if this specific dataset needs a DataCite update
// const needsUpdate = await this.shouldUpdateDataset(dataset);
// // Return the dataset if it needs update, null if it doesn't
// // This creates a sparse array that we'll filter later
// return needsUpdate ? dataset : null;
// } catch (error) {
// // Error handling for individual dataset checks
// //
// // Log warnings only if we're not in silent modes (count-only or ids-only)
// // This prevents log spam when running automated scripts
// if (!this.countOnly && !this.idsOnly) {
// logger.warn(`Error checking dataset ${dataset.publish_id}: ${error.message}`);
// }
// // IMPORTANT DECISION: Return the dataset anyway if we can't determine status
// //
// // Why? It's safer to include a dataset that might not need updating
// // than to miss one that actually does need updating. This follows the
// // "fail-safe" principle - if we're unsure, err on the side of caution
// return dataset;
// }
// }),
// );
// // Filter and extract results from Promise.allSettled response
// //
// // Promise.allSettled returns an array of objects with this structure:
// // - { status: 'fulfilled', value: T } for successful promises
// // - { status: 'rejected', reason: Error } for failed promises
// //
// // We need to:
// // 1. Only get fulfilled results (rejected ones are already handled above)
// // 2. Filter out null values (datasets that don't need updates)
// // 3. Extract the actual Dataset objects from the wrapper
// return results
// .filter(
// (result): result is PromiseFulfilledResult<Dataset | null> =>
// // Type guard: only include fulfilled results that have actual values
// // This filters out:
// // - Rejected promises (shouldn't happen due to try/catch, but safety first)
// // - Fulfilled promises that returned null (datasets that don't need updates)
// result.status === 'fulfilled' && result.value !== null,
// )
// .map((result) => result.value!); // Extract the Dataset from the wrapper
// // The ! is safe here because we filtered out null values above
// }
private async processChunk(datasets: Dataset[]): Promise<Dataset[]> {
// Limit concurrency to avoid API flooding (e.g., max 5 at once)
const limit = pLimit(5);
const tasks = datasets.map((dataset) =>
limit(async () => {
try {
const needsUpdate = await this.shouldUpdateDataset(dataset);
return needsUpdate ? dataset : null;
} catch (error) {
if (!this.countOnly && !this.idsOnly) {
logger.warn(
`Error checking dataset ${dataset.publish_id}: ${
error instanceof Error ? error.message : JSON.stringify(error)
}`,
);
}
// Fail-safe: include dataset if uncertain
return dataset;
}
}),
);
const results = await Promise.allSettled(tasks);
return results
.filter((result): result is PromiseFulfilledResult<Dataset | null> => result.status === 'fulfilled' && result.value !== null)
.map((result) => result.value!);
}
private async getDatasets(page: number, chunkSize: number): Promise<Dataset[]> {
return await Dataset.query()
.orderBy('publish_id', 'asc')
.preload('identifier')
.preload('xmlCache')
.preload('titles')
.where('server_state', 'published')
.whereHas('identifier', (identifierQuery) => {
identifierQuery.where('type', 'doi');
})
.forPage(page, chunkSize); // Get files for the current page
}
private async shouldUpdateDataset(dataset: Dataset): Promise<boolean> {
try {
let doiIdentifier = dataset.identifier;
if (!doiIdentifier) {
await dataset.load('identifier');
doiIdentifier = dataset.identifier;
}
if (!doiIdentifier || doiIdentifier.type !== 'doi') {
return false;
}
const datasetModified =
dataset.server_date_modified instanceof DateTime
? dataset.server_date_modified
: DateTime.fromJSDate(dataset.server_date_modified);
if (!datasetModified) {
return true;
}
if (datasetModified > DateTime.now()) {
return false;
}
const doiClient = new DoiClient();
const DOI_CHECK_TIMEOUT = 300; // ms
const doiLastModified = await Promise.race([
doiClient.getDoiLastModified(doiIdentifier.value),
this.createTimeoutPromise(DOI_CHECK_TIMEOUT),
]).catch(() => null);
if (!doiLastModified) {
// If uncertain, better include dataset for update
return true;
}
const doiModified = DateTime.fromJSDate(doiLastModified);
if (datasetModified > doiModified) {
const diffInSeconds = Math.abs(datasetModified.diff(doiModified, 'seconds').seconds);
const toleranceSeconds = 600;
return diffInSeconds > toleranceSeconds;
}
return false;
} catch (error) {
return true; // safer: include dataset if unsure
}
}
/**
* Create a timeout promise for API calls
*/
private createTimeoutPromise(timeoutMs: number): Promise<never> {
return new Promise((_, reject) => {
setTimeout(() => reject(new Error(`API call timeout after ${timeoutMs}ms`)), timeoutMs);
});
}
private showSimpleOutput(updatableDatasets: Dataset[]): void {
if (updatableDatasets.length === 0) {
console.log('No datasets need DataCite updates.');
return;
}
console.log(`\nFound ${updatableDatasets.length} dataset(s) that need DataCite updates:\n`);
updatableDatasets.forEach((dataset) => {
console.log(`publish_id ${dataset.publish_id} needs update - ${dataset.mainTitle || 'Untitled'}`);
});
console.log(`\nTo update these datasets, run:`);
console.log(` node ace update:datacite`);
console.log(`\nOr update specific datasets:`);
console.log(` node ace update:datacite -p <publish_id>`);
}
private async showVerboseOutput(updatableDatasets: Dataset[]): Promise<void> {
if (updatableDatasets.length === 0) {
console.log('No datasets need DataCite updates.');
return;
}
console.log(`\nFound ${updatableDatasets.length} dataset(s) that need DataCite updates:\n`);
for (const dataset of updatableDatasets) {
await this.showDatasetDetails(dataset);
}
console.log(`\nSummary: ${updatableDatasets.length} datasets need updates`);
}
private async showDatasetDetails(dataset: Dataset): Promise<void> {
try {
let doiIdentifier = dataset.identifier;
if (!doiIdentifier) {
await dataset.load('identifier');
doiIdentifier = dataset.identifier;
}
const doiValue = doiIdentifier?.value || 'N/A';
const datasetModified = dataset.server_date_modified;
// Get DOI info from DataCite
const doiClient = new DoiClient();
const doiLastModified = await doiClient.getDoiLastModified(doiValue);
const doiState = await doiClient.getDoiState(doiValue);
console.log(`┌─ Dataset ${dataset.publish_id} ───────────────────────────────────────────────────────────────`);
console.log(`│ Title: ${dataset.mainTitle || 'Untitled'}`);
console.log(`│ DOI: ${doiValue}`);
console.log(`│ DOI State: ${doiState || 'Unknown'}`);
console.log(`│ Dataset Modified: ${datasetModified ? datasetModified.toISO() : 'N/A'}`);
console.log(`│ DOI Modified: ${doiLastModified ? DateTime.fromJSDate(doiLastModified).toISO() : 'N/A'}`);
console.log(`│ Status: NEEDS UPDATE`);
console.log(`└─────────────────────────────────────────────────────────────────────────────────────────────\n`);
} catch (error) {
console.log(`┌─ Dataset ${dataset.publish_id} ───────────────────────────────────────────────────────────────`);
console.log(`│ Title: ${dataset.mainTitle || 'Untitled'}`);
console.log(`│ DOI: ${dataset.identifier?.value || 'N/A'}`);
console.log(`│ Error: ${error.message}`);
console.log(`│ Status: NEEDS UPDATE (Error checking)`);
console.log(`└─────────────────────────────────────────────────────────────────────────────────────────────\n`);
}
}
}

View file

@ -1,266 +0,0 @@
/*
|--------------------------------------------------------------------------
| node ace make:command update-datacite
| DONE: create commands/update_datacite.ts
|--------------------------------------------------------------------------
*/
import { BaseCommand, flags } from '@adonisjs/core/ace';
import { CommandOptions } from '@adonisjs/core/types/ace';
import Dataset from '#models/dataset';
import { DoiClient } from '#app/Library/Doi/DoiClient';
import DoiClientException from '#app/exceptions/DoiClientException';
import Index from '#app/Library/Utils/Index';
import env from '#start/env';
import logger from '@adonisjs/core/services/logger';
import { DateTime } from 'luxon';
import { getDomain } from '#app/utils/utility-functions';
export default class UpdateDatacite extends BaseCommand {
static commandName = 'update:datacite';
static description = 'Update DataCite DOI records for published datasets';
public static needsApplication = true;
@flags.number({ alias: 'p', description: 'Specific publish_id to update' })
public publish_id: number;
@flags.boolean({ alias: 'f', description: 'Force update all records regardless of modification date' })
public force: boolean = false;
@flags.boolean({ alias: 'd', description: 'Dry run - show what would be updated without making changes' })
public dryRun: boolean = false;
@flags.boolean({ alias: 's', description: 'Show detailed stats for each dataset that needs updating' })
public stats: boolean = false;
//example: node ace update:datacite -p 123 --force --dry-run
public static options: CommandOptions = {
startApp: true, // Whether to boot the application before running the command
stayAlive: false, // Whether to keep the process alive after the command has executed
};
async run() {
logger.info('Starting DataCite update process...');
const prefix = env.get('DATACITE_PREFIX', '');
const base_domain = env.get('BASE_DOMAIN', '');
const apiUrl = env.get('DATACITE_API_URL', 'https://api.datacite.org');
if (!prefix || !base_domain) {
logger.error('Missing DATACITE_PREFIX or BASE_DOMAIN environment variables');
return;
}
logger.info(`Using DataCite API: ${apiUrl}`);
const datasets = await this.getDatasets();
logger.info(`Found ${datasets.length} datasets to process`);
let updated = 0;
let skipped = 0;
let errors = 0;
for (const dataset of datasets) {
try {
const shouldUpdate = this.force || (await this.shouldUpdateDataset(dataset));
if (this.stats) {
// Stats mode: show detailed information for datasets that need updating
if (shouldUpdate) {
await this.showDatasetStats(dataset);
updated++;
} else {
skipped++;
}
continue;
}
if (!shouldUpdate) {
logger.info(`Dataset ${dataset.publish_id}: Up to date, skipping`);
skipped++;
continue;
}
if (this.dryRun) {
logger.info(`Dataset ${dataset.publish_id}: Would update DataCite record (dry run)`);
updated++;
continue;
}
await this.updateDataciteRecord(dataset, prefix, base_domain);
logger.info(`Dataset ${dataset.publish_id}: Successfully updated DataCite record`);
updated++;
} catch (error) {
logger.error(`Dataset ${dataset.publish_id}: Failed to update - ${error.message}`);
errors++;
}
}
if (this.stats) {
logger.info(`\nDataCite Stats Summary: ${updated} datasets need updating, ${skipped} are up to date`);
} else {
logger.info(`DataCite update completed. Updated: ${updated}, Skipped: ${skipped}, Errors: ${errors}`);
}
}
private async getDatasets(): Promise<Dataset[]> {
const query = Dataset.query()
.preload('identifier')
.preload('xmlCache')
.where('server_state', 'published')
.whereHas('identifier', (identifierQuery) => {
identifierQuery.where('type', 'doi');
});
if (this.publish_id) {
query.where('publish_id', this.publish_id);
}
return await query.exec();
}
private async shouldUpdateDataset(dataset: Dataset): Promise<boolean> {
try {
let doiIdentifier = dataset.identifier;
if (!doiIdentifier) {
await dataset.load('identifier');
doiIdentifier = dataset.identifier;
}
if (!doiIdentifier || doiIdentifier.type !== 'doi') {
return false;
}
const datasetModified = dataset.server_date_modified;
const now = DateTime.now();
if (!datasetModified) {
return true; // Update if modification date is missing
}
if (datasetModified > now) {
return false; // Skip invalid future dates
}
// Check DataCite DOI modification date
const doiClient = new DoiClient();
const doiLastModified = await doiClient.getDoiLastModified(doiIdentifier.value);
if (!doiLastModified) {
return false; // not Update if we can't get DOI info
}
const doiModified = DateTime.fromJSDate(doiLastModified);
if (datasetModified > doiModified) {
// if dataset was modified after DOI creation
// Calculate the difference in seconds
const diffInSeconds = Math.abs(datasetModified.diff(doiModified, 'seconds').seconds);
// Define tolerance threshold (60 seconds = 1 minute)
const toleranceSeconds = 60;
// Only update if the difference is greater than the tolerance
// This prevents unnecessary updates for minor timestamp differences
return diffInSeconds > toleranceSeconds;
} else {
return false; // No update needed
}
} catch (error) {
return false; // not update if we can't determine status or other error
}
}
private async updateDataciteRecord(dataset: Dataset, prefix: string, base_domain: string): Promise<void> {
try {
// Get the DOI identifier (HasOne relationship)
let doiIdentifier = dataset.identifier;
if (!doiIdentifier) {
await dataset.load('identifier');
doiIdentifier = dataset.identifier;
}
if (!doiIdentifier || doiIdentifier.type !== 'doi') {
throw new Error('No DOI identifier found for dataset');
}
// Generate XML metadata
const xmlMeta = (await Index.getDoiRegisterString(dataset)) as string;
if (!xmlMeta) {
throw new Error('Failed to generate XML metadata');
}
// Construct DOI value and landing page URL
const doiValue = doiIdentifier.value; // Use existing DOI value
const landingPageUrl = `https://doi.${getDomain(base_domain)}/${doiValue}`;
// Update DataCite record
const doiClient = new DoiClient();
const dataciteResponse = await doiClient.registerDoi(doiValue, xmlMeta, landingPageUrl);
if (dataciteResponse?.status === 201) {
// // Update dataset modification date
// dataset.server_date_modified = DateTime.now();
// await dataset.save();
// // Update search index
// const index_name = 'tethys-records';
// await Index.indexDocument(dataset, index_name);
logger.debug(`Dataset ${dataset.publish_id}: DataCite record and search index updated successfully`);
} else {
throw new DoiClientException(
dataciteResponse?.status || 500,
`Unexpected DataCite response code: ${dataciteResponse?.status}`,
);
}
} catch (error) {
if (error instanceof DoiClientException) {
throw error;
}
throw new Error(`Failed to update DataCite record: ${error.message}`);
}
}
/**
* Shows detailed statistics for a dataset that needs updating
*/
private async showDatasetStats(dataset: Dataset): Promise<void> {
try {
let doiIdentifier = dataset.identifier;
if (!doiIdentifier) {
await dataset.load('identifier');
doiIdentifier = dataset.identifier;
}
const doiValue = doiIdentifier?.value || 'N/A';
const doiStatus = doiIdentifier?.status || 'N/A';
const datasetModified = dataset.server_date_modified;
// Get DOI info from DataCite
const doiClient = new DoiClient();
const doiLastModified = await doiClient.getDoiLastModified(doiValue);
const doiState = await doiClient.getDoiState(doiValue);
console.log(`
Dataset ${dataset.publish_id}
DOI Value: ${doiValue}
DOI Status (DB): ${doiStatus}
DOI State (DataCite): ${doiState || 'Unknown'}
Dataset Modified: ${datasetModified ? datasetModified.toISO() : 'N/A'}
DOI Modified: ${doiLastModified ? DateTime.fromJSDate(doiLastModified).toISO() : 'N/A'}
Needs Update: YES - Dataset newer than DOI
`);
} catch (error) {
console.log(`
Dataset ${dataset.publish_id}
DOI Value: ${dataset.identifier?.value || 'N/A'}
Error: ${error.message}
Needs Update: YES - Error checking status
`);
}
}
}

View file

@ -88,7 +88,7 @@ export default class ValidateChecksum extends BaseCommand {
); );
// Construct the file path // Construct the file path
const filePath = '/storage/app/data/' + file.pathName; const filePath = '/storage/app/public/' + file.pathName;
try { try {
// Calculate the MD5 checksum of the file // Calculate the MD5 checksum of the file

18
components.d.ts vendored
View file

@ -11,21 +11,3 @@ declare module '@vue/runtime-core' {
NInput: (typeof import('naive-ui'))['NInput']; NInput: (typeof import('naive-ui'))['NInput'];
} }
} }
// types/leaflet-src-dom-DomEvent.d.ts
declare module 'leaflet/src/dom/DomEvent' {
export type DomEventHandler = (e?: any) => void;
// Attach event listeners. `obj` can be any DOM node or object with event handling.
export function on(obj: any, types: string, fn: DomEventHandler, context?: any): void;
// Detach event listeners.
export function off(obj: any, types: string, fn?: DomEventHandler, context?: any): void;
// Prevent default on native events
export function preventDefault(ev?: Event | undefined): void;
// Optional: other helpers you might need later
export function stopPropagation(ev?: Event | undefined): void;
export function stop(ev?: Event | undefined): void;
}

View file

@ -80,8 +80,7 @@ export const http = defineConfig({
| headers. | headers.
| |
*/ */
// trustProxy: proxyAddr.compile('loopback'), trustProxy: proxyAddr.compile('loopback'),
trustProxy: proxyAddr.compile(['127.0.0.1', '::1/128']),
/* /*
|-------------------------------------------------------------------------- |--------------------------------------------------------------------------

View file

@ -128,7 +128,7 @@ allowedMethods: ['POST', 'PUT', 'PATCH', 'DELETE'],
| projects/:id/file | projects/:id/file
| ``` | ```
*/ */
processManually: ['/submitter/dataset/submit', '/submitter/dataset/:id/update'], processManually: [],
/* /*
|-------------------------------------------------------------------------- |--------------------------------------------------------------------------
@ -185,8 +185,8 @@ allowedMethods: ['POST', 'PUT', 'PATCH', 'DELETE'],
| and fields data. | and fields data.
| |
*/ */
limit: '513mb', // limit: '20mb',
//limit: env.get('UPLOAD_LIMIT', '513mb'), limit: env.get('UPLOAD_LIMIT', '513mb'),
/* /*
|-------------------------------------------------------------------------- |--------------------------------------------------------------------------

View file

@ -47,7 +47,7 @@ const databaseConfig = defineConfig({
migrations: { migrations: {
naturalSort: true, naturalSort: true,
}, },
// healthCheck: false, healthCheck: false,
debug: false, debug: false,
pool: { min: 1, max: 100 }, pool: { min: 1, max: 100 },
}, },

View file

@ -1,45 +1,151 @@
// import env from '#start/env' /**
// import app from '@adonisjs/core/services/app' * Config source: https://git.io/JBt3o
import { defineConfig, services } from '@adonisjs/drive' *
* Feel free to let us know via PR, if you find something broken in this config
* file.
*/
import { defineConfig } from '#providers/drive/src/types/define_config';
import env from '#start/env';
// import { driveConfig } from '@adonisjs/core/build/config';
// import { driveConfig } from "@adonisjs/drive/build/config.js";
// import Application from '@ioc:Adonis/Core/Application';
const driveConfig = defineConfig({ /*
|--------------------------------------------------------------------------
default: 'public', | Drive Config
|--------------------------------------------------------------------------
|
| The `DriveConfig` relies on the `DisksList` interface which is
| defined inside the `contracts` directory.
|
*/
export default defineConfig({
/*
|--------------------------------------------------------------------------
| Default disk
|--------------------------------------------------------------------------
|
| The default disk to use for managing file uploads. The value is driven by
| the `DRIVE_DISK` environment variable.
|
*/
disk: env.get('DRIVE_DISK', 'local'),
services: { disks: {
/*
/** |--------------------------------------------------------------------------
* Persist files on the local filesystem | Local
*/ |--------------------------------------------------------------------------
public: services.fs({ |
location: '/storage/app/public/', | Uses the local file system to manage files. Make sure to turn off serving
serveFiles: true, | files when not using this disk.
routeBasePath: '/public', |
visibility: 'public', */
}), local: {
local: services.fs({ driver: 'local',
location: '/storage/app/data/', visibility: 'public',
serveFiles: true,
routeBasePath: '/data', /*
visibility: 'public', |--------------------------------------------------------------------------
}), | Storage root - Local driver only
|--------------------------------------------------------------------------
|
/** | Define an absolute path to the storage directory from where to read the
* Persist files on Digital Ocean spaces | files.
*/ |
// spaces: services.s3({ */
// credentials: { // root: Application.tmpPath('uploads'),
// accessKeyId: env.get('SPACES_KEY'), root: '/storage/app/public',
// secretAccessKey: env.get('SPACES_SECRET'),
// }, /*
// region: env.get('SPACES_REGION'), |--------------------------------------------------------------------------
// bucket: env.get('SPACES_BUCKET'), | Serve files - Local driver only
// endpoint: env.get('SPACES_ENDPOINT'), |--------------------------------------------------------------------------
// visibility: 'public', |
// }), | When this is set to true, AdonisJS will configure a files server to serve
| files from the disk root. This is done to mimic the behavior of cloud
| storage services that has inbuilt capabilities to serve files.
|
*/
serveFiles: true,
/*
|--------------------------------------------------------------------------
| Base path - Local driver only
|--------------------------------------------------------------------------
|
| Base path is always required when "serveFiles = true". Also make sure
| the `basePath` is unique across all the disks using "local" driver and
| you are not registering routes with this prefix.
|
*/
basePath: '/uploads',
},
/*
|--------------------------------------------------------------------------
| S3 Driver
|--------------------------------------------------------------------------
|
| Uses the S3 cloud storage to manage files. Make sure to install the s3
| drive separately when using it.
|
|**************************************************************************
| npm i @adonisjs/drive-s3
|**************************************************************************
|
*/
// s3: {
// driver: 's3',
// visibility: 'public',
// key: Env.get('S3_KEY'),
// secret: Env.get('S3_SECRET'),
// region: Env.get('S3_REGION'),
// bucket: Env.get('S3_BUCKET'),
// endpoint: Env.get('S3_ENDPOINT'),
//
// // For minio to work
// // forcePathStyle: true,
// },
/*
|--------------------------------------------------------------------------
| GCS Driver
|--------------------------------------------------------------------------
|
| Uses the Google cloud storage to manage files. Make sure to install the GCS
| drive separately when using it.
|
|**************************************************************************
| npm i @adonisjs/drive-gcs
|**************************************************************************
|
*/
// gcs: {
// driver: 'gcs',
// visibility: 'public',
// keyFilename: Env.get('GCS_KEY_FILENAME'),
// bucket: Env.get('GCS_BUCKET'),
/*
|--------------------------------------------------------------------------
| Uniform ACL - Google cloud storage only
|--------------------------------------------------------------------------
|
| When using the Uniform ACL on the bucket, the "visibility" option is
| ignored. Since, the files ACL is managed by the google bucket policies
| directly.
|
|**************************************************************************
| Learn more: https://cloud.google.com/storage/docs/uniform-bucket-level-access
|**************************************************************************
|
| The following option just informs drive whether your bucket is using uniform
| ACL or not. The actual setting needs to be toggled within the Google cloud
| console.
|
*/
// usingUniformAcl: false,
// },
}, },
}) });
export default driveConfig

View file

@ -1,233 +0,0 @@
/**
* Config source: https://git.io/JBt3o
*
* Feel free to let us know via PR, if you find something broken in this config
* file.
*/
import { defineConfig } from '#providers/drive/src/types/define_config';
import env from '#start/env';
// import { driveConfig } from '@adonisjs/core/build/config';
// import { driveConfig } from "@adonisjs/drive/build/config.js";
// import Application from '@ioc:Adonis/Core/Application';
/*
|--------------------------------------------------------------------------
| Drive Config
|--------------------------------------------------------------------------
|
| The `DriveConfig` relies on the `DisksList` interface which is
| defined inside the `contracts` directory.
|
*/
export default defineConfig({
/*
|--------------------------------------------------------------------------
| Default disk
|--------------------------------------------------------------------------
|
| The default disk to use for managing file uploads. The value is driven by
| the `DRIVE_DISK` environment variable.
|
*/
disk: env.get('DRIVE_DISK', 'local'),
disks: {
/*
|--------------------------------------------------------------------------
| Local
|--------------------------------------------------------------------------
|
| Uses the local file system to manage files. Make sure to turn off serving
| files when not using this disk.
|
*/
local: {
driver: 'local',
visibility: 'public',
/*
|--------------------------------------------------------------------------
| Storage root - Local driver only
|--------------------------------------------------------------------------
|
| Define an absolute path to the storage directory from where to read the
| files.
|
*/
// root: Application.tmpPath('uploads'),
root: '/storage/app/data',
/*
|--------------------------------------------------------------------------
| Serve files - Local driver only
|--------------------------------------------------------------------------
|
| When this is set to true, AdonisJS will configure a files server to serve
| files from the disk root. This is done to mimic the behavior of cloud
| storage services that has inbuilt capabilities to serve files.
|
*/
serveFiles: true,
/*
|--------------------------------------------------------------------------
| Base path - Local driver only
|--------------------------------------------------------------------------
|
| Base path is always required when "serveFiles = true". Also make sure
| the `basePath` is unique across all the disks using "local" driver and
| you are not registering routes with this prefix.
|
*/
basePath: '/files',
},
local: {
driver: 'local',
visibility: 'public',
/*
|--------------------------------------------------------------------------
| Storage root - Local driver only
|--------------------------------------------------------------------------
|
| Define an absolute path to the storage directory from where to read the
| files.
|
*/
// root: Application.tmpPath('uploads'),
root: '/storage/app/data',
/*
|--------------------------------------------------------------------------
| Serve files - Local driver only
|--------------------------------------------------------------------------
|
| When this is set to true, AdonisJS will configure a files server to serve
| files from the disk root. This is done to mimic the behavior of cloud
| storage services that has inbuilt capabilities to serve files.
|
*/
serveFiles: true,
/*
|--------------------------------------------------------------------------
| Base path - Local driver only
|--------------------------------------------------------------------------
|
| Base path is always required when "serveFiles = true". Also make sure
| the `basePath` is unique across all the disks using "local" driver and
| you are not registering routes with this prefix.
|
*/
basePath: '/files',
},
fs: {
driver: 'local',
visibility: 'public',
/*
|--------------------------------------------------------------------------
| Storage root - Local driver only
|--------------------------------------------------------------------------
|
| Define an absolute path to the storage directory from where to read the
| files.
|
*/
// root: Application.tmpPath('uploads'),
root: '/storage/app/public',
/*
|--------------------------------------------------------------------------
| Serve files - Local driver only
|--------------------------------------------------------------------------
|
| When this is set to true, AdonisJS will configure a files server to serve
| files from the disk root. This is done to mimic the behavior of cloud
| storage services that has inbuilt capabilities to serve files.
|
*/
serveFiles: true,
/*
|--------------------------------------------------------------------------
| Base path - Local driver only
|--------------------------------------------------------------------------
|
| Base path is always required when "serveFiles = true". Also make sure
| the `basePath` is unique across all the disks using "local" driver and
| you are not registering routes with this prefix.
|
*/
basePath: '/public',
},
/*
|--------------------------------------------------------------------------
| S3 Driver
|--------------------------------------------------------------------------
|
| Uses the S3 cloud storage to manage files. Make sure to install the s3
| drive separately when using it.
|
|**************************************************************************
| npm i @adonisjs/drive-s3
|**************************************************************************
|
*/
// s3: {
// driver: 's3',
// visibility: 'public',
// key: Env.get('S3_KEY'),
// secret: Env.get('S3_SECRET'),
// region: Env.get('S3_REGION'),
// bucket: Env.get('S3_BUCKET'),
// endpoint: Env.get('S3_ENDPOINT'),
//
// // For minio to work
// // forcePathStyle: true,
// },
/*
|--------------------------------------------------------------------------
| GCS Driver
|--------------------------------------------------------------------------
|
| Uses the Google cloud storage to manage files. Make sure to install the GCS
| drive separately when using it.
|
|**************************************************************************
| npm i @adonisjs/drive-gcs
|**************************************************************************
|
*/
// gcs: {
// driver: 'gcs',
// visibility: 'public',
// keyFilename: Env.get('GCS_KEY_FILENAME'),
// bucket: Env.get('GCS_BUCKET'),
/*
|--------------------------------------------------------------------------
| Uniform ACL - Google cloud storage only
|--------------------------------------------------------------------------
|
| When using the Uniform ACL on the bucket, the "visibility" option is
| ignored. Since, the files ACL is managed by the google bucket policies
| directly.
|
|**************************************************************************
| Learn more: https://cloud.google.com/storage/docs/uniform-bucket-level-access
|**************************************************************************
|
| The following option just informs drive whether your bucket is using uniform
| ACL or not. The actual setting needs to be toggled within the Google cloud
| console.
|
*/
// usingUniformAcl: false,
// },
},
});

View file

@ -1,8 +1,7 @@
import { defineConfig } from '@adonisjs/inertia'; import { defineConfig } from '@adonisjs/inertia';
import type { HttpContext } from '@adonisjs/core/http'; import type { HttpContext } from '@adonisjs/core/http';
import type { InferSharedProps } from '@adonisjs/inertia/types'
const inertiaConfig = defineConfig({ export default defineConfig({
/** /**
* Path to the Edge view that will be used as the root view for Inertia responses * Path to the Edge view that will be used as the root view for Inertia responses
*/ */
@ -53,12 +52,6 @@ const inertiaConfig = defineConfig({
}, },
}); });
export default inertiaConfig
declare module '@adonisjs/inertia/types' {
export interface SharedProps extends InferSharedProps<typeof inertiaConfig> {}
}
// import { InertiaConfig } from '@ioc:EidelLev/Inertia'; // import { InertiaConfig } from '@ioc:EidelLev/Inertia';
// /* // /*

View file

@ -12,11 +12,11 @@ const mailConfig = defineConfig({
mailers: { mailers: {
smtp: transports.smtp({ smtp: transports.smtp({
// socketTimeout: 5000,// Overall timeout (5 seconds) socketTimeout: 5000,// Overall timeout (5 seconds)
host: env.get('SMTP_HOST', ''), host: env.get('SMTP_HOST', ''),
port: env.get('SMTP_PORT'), port: env.get('SMTP_PORT'),
secure: false, secure: false,
ignoreTLS: true, // ignoreTLS: true,
requireTLS: false, requireTLS: false,
/** /**
@ -30,10 +30,10 @@ const mailConfig = defineConfig({
}, */ }, */
}), }),
// resend: transports.resend({ resend: transports.resend({
// key: env.get('RESEND_API_KEY'), key: env.get('RESEND_API_KEY'),
// baseUrl: 'https://api.resend.com', baseUrl: 'https://api.resend.com',
// }), }),
}, },
}); });

View file

@ -6,7 +6,7 @@
*/ */
import env from '#start/env'; import env from '#start/env';
// import app from '@adonisjs/core/services/app'; import app from '@adonisjs/core/services/app';
import { defineConfig, stores } from '@adonisjs/session'; import { defineConfig, stores } from '@adonisjs/session';
const sessionConfig = defineConfig({ const sessionConfig = defineConfig({

View file

@ -1,32 +0,0 @@
import { defineConfig } from '@adonisjs/vite';
const viteBackendConfig = defineConfig({
/**
* The output of vite will be written inside this
* directory. The path should be relative from
* the application root.
*/
buildDirectory: 'public/assets',
/**
* The path to the manifest file generated by the
* "vite build" command.
*/
manifestFile: 'public/assets/.vite/manifest.json',
/**
* Feel free to change the value of the "assetsUrl" to
* point to a CDN in production.
*/
assetsUrl: '/assets',
/**
* Add defer attribute to scripts for better performance.
*/
scriptAttributes: {
defer: true,
},
});
export default viteBackendConfig;

View file

@ -21,7 +21,6 @@ export enum ServerStates {
rejected_reviewer = 'rejected_reviewer', rejected_reviewer = 'rejected_reviewer',
rejected_editor = 'rejected_editor', rejected_editor = 'rejected_editor',
reviewed = 'reviewed', reviewed = 'reviewed',
rejected_to_reviewer = 'rejected_to_reviewer',
} }
// for table dataset_titles // for table dataset_titles

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -18,7 +18,6 @@ export default class Accounts extends BaseSchema {
table.text("two_factor_recovery_codes").nullable(); table.text("two_factor_recovery_codes").nullable();
table.smallint('state').nullable(); table.smallint('state').nullable();
table.bigint('last_counter').nullable(); table.bigint('last_counter').nullable();
table.string('avatar').nullable();
}); });
} }
@ -44,7 +43,6 @@ export default class Accounts extends BaseSchema {
// two_factor_recovery_codes text COLLATE pg_catalog."default", // two_factor_recovery_codes text COLLATE pg_catalog."default",
// state smallint, // state smallint,
// last_counter bigint, // last_counter bigint,
// avatar character varying(255),
// ) // )
// ALTER TABLE gba.accounts // ALTER TABLE gba.accounts
@ -87,6 +85,3 @@ export default class Accounts extends BaseSchema {
// GRANT ALL ON SEQUENCE gba.totp_secrets_id_seq TO tethys_admin; // GRANT ALL ON SEQUENCE gba.totp_secrets_id_seq TO tethys_admin;
// ALTER TABLE gba.totp_secrets ALTER COLUMN id SET DEFAULT nextval('gba.totp_secrets_id_seq'); // ALTER TABLE gba.totp_secrets ALTER COLUMN id SET DEFAULT nextval('gba.totp_secrets_id_seq');
// ALTER TABLE "accounts" ADD COLUMN "avatar" VARCHAR(255) NULL

View file

@ -86,22 +86,3 @@ export default class Documents extends BaseSchema {
// CONSTRAINT documents_server_state_check CHECK (server_state::text = ANY (ARRAY['deleted'::character varying::text, 'inprogress'::character varying::text, 'published'::character varying::text, 'released'::character varying::text, 'editor_accepted'::character varying::text, 'approved'::character varying::text, 'rejected_reviewer'::character varying::text, 'rejected_editor'::character varying::text, 'reviewed'::character varying::text])), // CONSTRAINT documents_server_state_check CHECK (server_state::text = ANY (ARRAY['deleted'::character varying::text, 'inprogress'::character varying::text, 'published'::character varying::text, 'released'::character varying::text, 'editor_accepted'::character varying::text, 'approved'::character varying::text, 'rejected_reviewer'::character varying::text, 'rejected_editor'::character varying::text, 'reviewed'::character varying::text])),
// CONSTRAINT documents_type_check CHECK (type::text = ANY (ARRAY['analysisdata'::character varying::text, 'measurementdata'::character varying::text, 'monitoring'::character varying::text, 'remotesensing'::character varying::text, 'gis'::character varying::text, 'models'::character varying::text, 'mixedtype'::character varying::text])) // CONSTRAINT documents_type_check CHECK (type::text = ANY (ARRAY['analysisdata'::character varying::text, 'measurementdata'::character varying::text, 'monitoring'::character varying::text, 'remotesensing'::character varying::text, 'gis'::character varying::text, 'models'::character varying::text, 'mixedtype'::character varying::text]))
// ) // )
// ALTER TABLE documents DROP CONSTRAINT documents_server_state_check;
// ALTER TABLE documents
// ADD CONSTRAINT documents_server_state_check CHECK (
// server_state::text = ANY (ARRAY[
// 'deleted',
// 'inprogress',
// 'published',
// 'released',
// 'editor_accepted',
// 'approved',
// 'rejected_reviewer',
// 'rejected_editor',
// 'reviewed',
// 'rejected_to_reviewer' -- new value added
// ]::text[])
// );

View file

@ -32,21 +32,3 @@ export default class CollectionsRoles extends BaseSchema {
// visible_oai boolean NOT NULL DEFAULT true, // visible_oai boolean NOT NULL DEFAULT true,
// CONSTRAINT collections_roles_pkey PRIMARY KEY (id) // CONSTRAINT collections_roles_pkey PRIMARY KEY (id)
// ) // )
// change to normal intzeger:
// ALTER TABLE collections_roles ALTER COLUMN id DROP DEFAULT;
// DROP SEQUENCE IF EXISTS collections_roles_id_seq;
// -- Step 1: Temporarily change one ID to a value not currently used
// UPDATE collections_roles SET id = 99 WHERE name = 'ccs';
// -- Step 2: Change 'ddc' ID to 2 (the old 'ccs' ID)
// UPDATE collections_roles SET id = 2 WHERE name = 'ddc';
// -- Step 3: Change the temporary ID (99) to 3 (the old 'ddc' ID)
// UPDATE collections_roles SET id = 3 WHERE name = 'ccs';
// UPDATE collections_roles SET id = 99 WHERE name = 'bk';
// UPDATE collections_roles SET id = 1 WHERE name = 'institutes';
// UPDATE collections_roles SET id = 4 WHERE name = 'pacs';
// UPDATE collections_roles SET id = 7 WHERE name = 'bk';

View file

@ -5,7 +5,7 @@ export default class Collections extends BaseSchema {
public async up() { public async up() {
this.schema.createTable(this.tableName, (table) => { this.schema.createTable(this.tableName, (table) => {
table.increments('id');//.defaultTo("nextval('collections_id_seq')"); table.increments('id').defaultTo("nextval('collections_id_seq')");
table.integer('role_id').unsigned(); table.integer('role_id').unsigned();
table table
.foreign('role_id', 'collections_role_id_foreign') .foreign('role_id', 'collections_role_id_foreign')
@ -25,8 +25,6 @@ export default class Collections extends BaseSchema {
.onUpdate('CASCADE'); .onUpdate('CASCADE');
table.boolean('visible').notNullable().defaultTo(true); table.boolean('visible').notNullable().defaultTo(true);
table.boolean('visible_publish').notNullable().defaultTo(true); table.boolean('visible_publish').notNullable().defaultTo(true);
table.integer('left_id').unsigned();
table.integer('right_id').unsigned();
}); });
} }
@ -56,31 +54,3 @@ export default class Collections extends BaseSchema {
// ON UPDATE CASCADE // ON UPDATE CASCADE
// ON DELETE CASCADE // ON DELETE CASCADE
// ) // )
// change to normal intzeger:
// ALTER TABLE collections ALTER COLUMN id DROP DEFAULT;
// DROP SEQUENCE IF EXISTS collections_id_seq;
// ALTER TABLE collections
// ADD COLUMN left_id INTEGER;
// COMMENT ON COLUMN collections.left_id IS 'comment';
// ALTER TABLE collections
// ADD COLUMN right_id INTEGER;
// COMMENT ON COLUMN collections.right_id IS 'comment';
// -- Step 1: Drop the existing default
// ALTER TABLE collections
// ALTER COLUMN visible DROP DEFAULT,
// ALTER COLUMN visible_publish DROP DEFAULT;
// -- Step 2: Change column types with proper casting
// ALTER TABLE collections
// ALTER COLUMN visible TYPE smallint USING CASE WHEN visible THEN 1 ELSE 0 END,
// ALTER COLUMN visible_publish TYPE smallint USING CASE WHEN visible_publish THEN 1 ELSE 0 END;
// -- Step 3: Set new defaults as smallint
// ALTER TABLE collections
// ALTER COLUMN visible SET DEFAULT 1,
// ALTER COLUMN visible_publish SET DEFAULT 1;

View file

@ -1,18 +0,0 @@
import { BaseSchema } from "@adonisjs/lucid/schema";
export default class AddAlternateMimetypeToMimeTypes extends BaseSchema {
protected tableName = 'mime_types';
public async up () {
this.schema.alterTable(this.tableName, (table) => {
table.string('alternate_mimetype').nullable();
});
}
public async down () {
this.schema.alterTable(this.tableName, (table) => {
table.dropColumn('alternate_mimetype');
});
}
}
// ALTER TABLE "mime_types" ADD COLUMN "alternate_mimetype" VARCHAR(255) NULL

View file

@ -1,74 +1,47 @@
#!/bin/bash #!/bin/bash
# # Run freshclam to update virus definitions
# freshclam
# # Sleep for a few seconds to give ClamAV time to start
# sleep 5
# # Start the ClamAV daemon
# /etc/init.d/clamav-daemon start
# bootstrap clam av service and clam av database updater
set -m set -m
echo "Starting ClamAV services..." function process_file() {
if [[ ! -z "$1" ]]; then
local SETTING_LIST=$(echo "$1" | tr ',' '\n' | grep "^[A-Za-z][A-Za-z]*=.*$")
local SETTING
for SETTING in ${SETTING_LIST}; do
# Remove any existing copies of this setting. We do this here so that
# settings with multiple values (e.g. ExtraDatabase) can still be added
# multiple times below
local KEY=${SETTING%%=*}
sed -i $2 -e "/^${KEY} /d"
done
for SETTING in ${SETTING_LIST}; do
# Split on first '='
local KEY=${SETTING%%=*}
local VALUE=${SETTING#*=}
echo "${KEY} ${VALUE}" >> "$2"
done
fi
}
# Try to download database if missing # process_file "${CLAMD_SETTINGS_CSV}" /etc/clamav/clamd.conf
# if [ ! "$(ls -A /var/lib/clamav 2>/dev/null)" ]; then # process_file "${FRESHCLAM_SETTINGS_CSV}" /etc/clamav/freshclam.conf
# echo "Downloading ClamAV database (this may take a while)..."
# # Simple freshclam run without complex config
# if freshclam --datadir=/var/lib/clamav --quiet; then
# echo "✓ Database downloaded successfully"
# else
# echo "⚠ Database download failed - creating minimal setup"
# # Create a dummy file so clamd doesn't immediately fail
# touch /var/lib/clamav/.dummy
# fi
# fi
# Start freshclam daemon for automatic updates
echo "Starting freshclam daemon for automatic updates..."
# sg clamav -c "freshclam -d" &
# Added --daemon-notify to freshclam - This notifies clamd when the database updates
freshclam -d --daemon-notify=/etc/clamav/clamd.conf &
#freshclam -d &
# start in background
freshclam -d &
# /etc/init.d/clamav-freshclam start & # /etc/init.d/clamav-freshclam start &
# Start clamd in background clamd
# Start clamd in foreground (so dumb-init can supervise it)
# /etc/init.d/clamav-daemon start & # /etc/init.d/clamav-daemon start &
# Give freshclam a moment to start # change back to CMD of dockerfile
sleep 2
# Start clamd daemon in background using sg
echo "Starting ClamAV daemon..."
# sg clamav -c "clamd" &
# Use sg to run clamd with proper group permissions
# sg clamav -c "clamd" &
# clamd --config-file=/etc/clamav/clamd.conf &
clamd &
# Give services time to start
echo "Waiting for services to initialize..."
sleep 8
# simple check
if pgrep clamd > /dev/null; then
echo "✓ ClamAV daemon is running"
else
echo "⚠ ClamAV daemon status uncertain, but continuing..."
fi
# Check if freshclam daemon is running
if pgrep freshclam > /dev/null; then
echo "✓ Freshclam daemon is running"
else
echo "⚠ Freshclam daemon status uncertain, but continuing..."
fi
# # Optional: Test socket connectivity
# if [ -S /var/run/clamav/clamd.socket ]; then
# echo "✓ ClamAV socket exists"
# else
# echo "⚠ WARNING: ClamAV socket not found - services may still be starting"
# fi
# # change back to CMD of dockerfile
echo "✓ ClamAV setup complete"
echo "Starting main application..."
# exec dumb-init -- "$@"
exec "$@" exec "$@"

View file

@ -1,278 +0,0 @@
# Dataset Indexing Command
AdonisJS Ace command for indexing and synchronizing published datasets with OpenSearch for search functionality.
## Overview
The `index:datasets` command processes published datasets and creates/updates corresponding search index documents in OpenSearch. It intelligently compares modification timestamps to only re-index datasets when necessary, optimizing performance while maintaining search index accuracy.
## Command Syntax
```bash
node ace index:datasets [options]
```
## Options
| Flag | Alias | Description |
|------|-------|-------------|
| `--publish_id <number>` | `-p` | Index a specific dataset by publish_id |
## Usage Examples
### Basic Operations
```bash
# Index all published datasets that have been modified since last indexing
node ace index:datasets
# Index a specific dataset by publish_id
node ace index:datasets --publish_id 231
node ace index:datasets -p 231
```
## How It Works
### 1. **Dataset Selection**
The command processes datasets that meet these criteria:
- `server_state = 'published'` - Only published datasets
- Has preloaded `xmlCache` relationship for metadata transformation
- Optionally filtered by specific `publish_id`
### 2. **Smart Update Detection**
For each dataset, the command:
- Checks if the dataset exists in the OpenSearch index
- Compares `server_date_modified` timestamps
- Only re-indexes if the dataset is newer than the indexed version
### 3. **Document Processing**
The indexing process involves:
1. **XML Generation**: Creates structured XML from dataset metadata
2. **XSLT Transformation**: Converts XML to JSON using Saxon-JS processor
3. **Index Update**: Updates or creates the document in OpenSearch
4. **Logging**: Records success/failure for each operation
## Index Structure
### Index Configuration
- **Index Name**: `tethys-records`
- **Document ID**: Dataset `publish_id`
- **Refresh**: `true` (immediate availability)
### Document Fields
The indexed documents contain:
- **Metadata Fields**: Title, description, authors, keywords
- **Identifiers**: DOI, publish_id, and other identifiers
- **Temporal Data**: Publication dates, coverage periods
- **Geographic Data**: Spatial coverage information
- **Technical Details**: Data formats, access information
- **Timestamps**: Creation and modification dates
## Example Output
### Successful Run
```bash
node ace index:datasets
```
```
Found 150 published datasets to process
Dataset with publish_id 231 successfully indexed
Dataset with publish_id 245 is up to date, skipping indexing
Dataset with publish_id 267 successfully indexed
An error occurred while indexing dataset with publish_id 289. Error: Invalid XML metadata
Processing completed: 148 indexed, 1 skipped, 1 error
```
### Specific Dataset
```bash
node ace index:datasets --publish_id 231
```
```
Found 1 published dataset to process
Dataset with publish_id 231 successfully indexed
Processing completed: 1 indexed, 0 skipped, 0 errors
```
## Update Logic
The command uses intelligent indexing to avoid unnecessary processing:
| Condition | Action | Reason |
|-----------|--------|--------|
| Dataset not in index | ✅ Index | New dataset needs indexing |
| Dataset newer than indexed version | ✅ Re-index | Dataset has been updated |
| Dataset same/older than indexed version | ❌ Skip | Already up to date |
| OpenSearch document check fails | ✅ Index | Better safe than sorry |
| Invalid XML metadata | ❌ Skip + Log Error | Cannot process invalid data |
### Timestamp Comparison
```typescript
// Example comparison logic
const existingModified = DateTime.fromMillis(Number(existingDoc.server_date_modified) * 1000);
const currentModified = dataset.server_date_modified;
if (currentModified <= existingModified) {
// Skip - already up to date
return false;
}
// Proceed with indexing
```
## XML Transformation Process
### 1. **XML Generation**
```xml
<?xml version="1.0" encoding="UTF-8" standalone="true"?>
<root>
<Dataset>
<!-- Dataset metadata fields -->
<title>Research Dataset Title</title>
<description>Dataset description...</description>
<!-- Additional metadata -->
</Dataset>
</root>
```
### 2. **XSLT Processing**
The command uses Saxon-JS with a compiled stylesheet (`solr.sef.json`) to transform XML to JSON:
```javascript
const result = await SaxonJS.transform({
stylesheetText: proc,
destination: 'serialized',
sourceText: xmlString,
});
```
### 3. **Final JSON Document**
```json
{
"id": "231",
"title": "Research Dataset Title",
"description": "Dataset description...",
"authors": ["Author Name"],
"server_date_modified": 1634567890,
"publish_id": 231
}
```
## Configuration Requirements
### Environment Variables
```bash
# OpenSearch Configuration
OPENSEARCH_HOST=localhost:9200
# For production:
# OPENSEARCH_HOST=your-opensearch-cluster:9200
```
### Required Files
- **XSLT Stylesheet**: `public/assets2/solr.sef.json` - Compiled Saxon-JS stylesheet for XML transformation
### Database Relationships
The command expects these model relationships:
```typescript
// Dataset model must have:
@hasOne(() => XmlCache, { foreignKey: 'dataset_id' })
public xmlCache: HasOne<typeof XmlCache>
```
## Error Handling
The command handles various error scenarios gracefully:
### Common Errors and Solutions
| Error | Cause | Solution |
|-------|-------|----------|
| `XSLT transformation failed` | Invalid XML or missing stylesheet | Check XML structure and stylesheet path |
| `OpenSearch connection error` | Service unavailable | Verify OpenSearch is running and accessible |
| `JSON parse error` | Malformed transformation result | Check XSLT stylesheet output format |
| `Missing xmlCache relationship` | Data integrity issue | Ensure xmlCache exists for dataset |
### Error Logging
```bash
# Typical error log entry
An error occurred while indexing dataset with publish_id 231.
Error: XSLT transformation failed: Invalid XML structure at line 15
```
## Performance Considerations
### Batch Processing
- Processes datasets sequentially to avoid overwhelming OpenSearch
- Each dataset is committed individually for reliability
- Failed indexing of one dataset doesn't stop processing others
### Resource Usage
- **Memory**: XML/JSON transformations require temporary memory
- **Network**: OpenSearch API calls for each dataset
- **CPU**: XSLT transformations are CPU-intensive
### Optimization Tips
```bash
# Index only recently modified datasets (run regularly)
node ace index:datasets
# Index specific datasets when needed
node ace index:datasets --publish_id 231
# Consider running during off-peak hours for large batches
```
## Integration with Other Systems
### Search Functionality
The indexed documents power:
- **Dataset Search**: Full-text search across metadata
- **Faceted Browsing**: Filter by authors, keywords, dates
- **Geographic Search**: Spatial query capabilities
- **Auto-complete**: Suggest dataset titles and keywords
### Related Commands
- [`update:datacite`](update-datacite.md) - Often run after indexing to sync DOI metadata
- **Database migrations** - May require re-indexing after schema changes
### API Integration
The indexed data is consumed by:
- **Search API**: `/api/search` endpoints
- **Browse API**: `/api/datasets` with filtering
- **Recommendations**: Related dataset suggestions
## Monitoring and Maintenance
### Regular Tasks
```bash
# Daily indexing (recommended cron job)
0 2 * * * cd /path/to/project && node ace index:datasets
# Weekly full re-index (if needed)
0 3 * * 0 cd /path/to/project && node ace index:datasets --force
```
### Health Checks
- Monitor OpenSearch cluster health
- Check for failed indexing operations in logs
- Verify search functionality is working
- Compare dataset counts between database and index
### Troubleshooting
```bash
# Check specific dataset indexing
node ace index:datasets --publish_id 231
# Verify OpenSearch connectivity
curl -X GET "localhost:9200/_cluster/health"
# Check index statistics
curl -X GET "localhost:9200/tethys-records/_stats"
```
## Best Practices
1. **Regular Scheduling**: Run the command regularly (daily) to keep the search index current
2. **Monitor Logs**: Watch for transformation errors or OpenSearch issues
3. **Backup Strategy**: Include OpenSearch indices in backup procedures
4. **Resource Management**: Monitor OpenSearch cluster resources during bulk operations
5. **Testing**: Verify search functionality after major indexing operations
6. **Coordination**: Run indexing before DataCite updates when both are needed

View file

@ -1,216 +0,0 @@
# DataCite Update Command
AdonisJS Ace command for updating DataCite DOI records for published datasets.
## Overview
The `update:datacite` command synchronizes your local dataset metadata with DataCite DOI records. It intelligently compares modification dates to only update records when necessary, reducing unnecessary API calls and maintaining data consistency.
## Command Syntax
```bash
node ace update:datacite [options]
```
## Options
| Flag | Alias | Description |
|------|-------|-------------|
| `--publish_id <number>` | `-p` | Update a specific dataset by publish_id |
| `--force` | `-f` | Force update all records regardless of modification date |
| `--dry-run` | `-d` | Preview what would be updated without making changes |
| `--stats` | `-s` | Show detailed statistics for datasets that need updating |
## Usage Examples
### Basic Operations
```bash
# Update all datasets that have been modified since their DOI was last updated
node ace update:datacite
# Update a specific dataset
node ace update:datacite --publish_id 231
node ace update:datacite -p 231
# Force update all datasets with DOIs (ignores modification dates)
node ace update:datacite --force
```
### Preview and Analysis
```bash
# Preview what would be updated (dry run)
node ace update:datacite --dry-run
# Show detailed statistics for datasets that need updating
node ace update:datacite --stats
# Show stats for a specific dataset
node ace update:datacite --stats --publish_id 231
```
### Combined Options
```bash
# Dry run for a specific dataset
node ace update:datacite --dry-run --publish_id 231
# Show stats for all datasets (including up-to-date ones)
node ace update:datacite --stats --force
```
## Command Modes
### 1. **Normal Mode** (Default)
Updates DataCite records for datasets that have been modified since their DOI was last updated.
**Example Output:**
```
Using DataCite API: https://api.test.datacite.org
Found 50 datasets to process
Dataset 231: Successfully updated DataCite record
Dataset 245: Up to date, skipping
Dataset 267: Successfully updated DataCite record
DataCite update completed. Updated: 15, Skipped: 35, Errors: 0
```
### 2. **Dry Run Mode** (`--dry-run`)
Shows what would be updated without making any changes to DataCite.
**Use Case:** Preview updates before running the actual command.
**Example Output:**
```
Dataset 231: Would update DataCite record (dry run)
Dataset 267: Would update DataCite record (dry run)
Dataset 245: Up to date, skipping
DataCite update completed. Updated: 2, Skipped: 1, Errors: 0
```
### 3. **Stats Mode** (`--stats`)
Shows detailed information for each dataset that needs updating, including why it needs updating.
**Use Case:** Debug synchronization issues, monitor dataset/DOI status, generate reports.
**Example Output:**
```
┌─ Dataset 231 ─────────────────────────────────────────────────────────
│ DOI Value: 10.21388/tethys.231
│ DOI Status (DB): findable
│ DOI State (DataCite): findable
│ Dataset Modified: 2024-09-15T10:30:00.000Z
│ DOI Modified: 2024-09-10T08:15:00.000Z
│ Needs Update: YES - Dataset newer than DOI
└───────────────────────────────────────────────────────────────────────
┌─ Dataset 267 ─────────────────────────────────────────────────────────
│ DOI Value: 10.21388/tethys.267
│ DOI Status (DB): findable
│ DOI State (DataCite): findable
│ Dataset Modified: 2024-09-18T14:20:00.000Z
│ DOI Modified: 2024-09-16T12:45:00.000Z
│ Needs Update: YES - Dataset newer than DOI
└───────────────────────────────────────────────────────────────────────
DataCite Stats Summary: 2 datasets need updating, 48 are up to date
```
## Update Logic
The command uses intelligent update detection:
1. **Compares modification dates**: Dataset `server_date_modified` vs DOI last modification date from DataCite
2. **Validates data integrity**: Checks for missing or future dates
3. **Handles API failures gracefully**: Updates anyway if DataCite info can't be retrieved
4. **Uses dual API approach**: DataCite REST API (primary) with MDS API fallback
### When Updates Happen
| Condition | Action | Reason |
|-----------|--------|--------|
| Dataset modified > DOI modified | ✅ Update | Dataset has newer changes |
| Dataset modified ≤ DOI modified | ❌ Skip | DOI is up to date |
| Dataset date in future | ❌ Skip | Invalid data, needs investigation |
| Dataset date missing | ✅ Update | Can't determine staleness |
| DataCite API error | ✅ Update | Better safe than sorry |
| `--force` flag used | ✅ Update | Override all logic |
## Environment Configuration
Required environment variables:
```bash
# DataCite Credentials
DATACITE_USERNAME=your_username
DATACITE_PASSWORD=your_password
# API Endpoints (environment-specific)
DATACITE_API_URL=https://api.test.datacite.org # Test environment
DATACITE_SERVICE_URL=https://mds.test.datacite.org # Test MDS
DATACITE_API_URL=https://api.datacite.org # Production
DATACITE_SERVICE_URL=https://mds.datacite.org # Production MDS
# Project Configuration
DATACITE_PREFIX=10.21388 # Your DOI prefix
BASE_DOMAIN=tethys.at # Your domain
```
## Error Handling
The command handles various error scenarios:
- **Invalid modification dates**: Logs errors but continues processing other datasets
- **DataCite API failures**: Falls back to MDS API, then to safe update
- **Missing DOI identifiers**: Skips datasets without DOI identifiers
- **Network issues**: Continues with next dataset after logging error
## Integration
The command integrates with:
- **Dataset Model**: Uses `server_date_modified` for change detection
- **DatasetIdentifier Model**: Reads DOI values and status
- **OpenSearch Index**: Updates search index after DataCite update
- **DoiClient**: Handles all DataCite API interactions
## Common Workflows
### Daily Maintenance
```bash
# Update any datasets modified today
node ace update:datacite
```
### Pre-Deployment Check
```bash
# Check what would be updated before deployment
node ace update:datacite --dry-run
```
### Debugging Sync Issues
```bash
# Investigate why specific dataset isn't syncing
node ace update:datacite --stats --publish_id 231
```
### Full Resync
```bash
# Force update all DOI records (use with caution)
node ace update:datacite --force
```
### Monitoring Report
```bash
# Generate sync status report
node ace update:datacite --stats > datacite-sync-report.txt
```
## Best Practices
1. **Regular Updates**: Run daily or after bulk dataset modifications
2. **Test First**: Use `--dry-run` or `--stats` before bulk operations
3. **Monitor Logs**: Check for data integrity warnings
4. **Environment Separation**: Use correct API URLs for test vs production
5. **Rate Limiting**: The command handles DataCite rate limits automatically

View file

@ -1,47 +1,229 @@
## ##
## Container-optimized freshclam configuration ## Example config file for freshclam
## Please read the freshclam.conf(5) manual before editing this file.
## ##
# Database directory
# Comment or remove the line below.
# Path to the database directory.
# WARNING: It must match clamd.conf's directive!
# Default: hardcoded (depends on installation options)
DatabaseDirectory /var/lib/clamav DatabaseDirectory /var/lib/clamav
# Log to stdout for container logging # Path to the log file (make sure it has proper permissions)
# Default: disabled
# UpdateLogFile /dev/stdout # UpdateLogFile /dev/stdout
# Basic logging settings # Maximum size of the log file.
# Value of 0 disables the limit.
# You may use 'M' or 'm' for megabytes (1M = 1m = 1048576 bytes)
# and 'K' or 'k' for kilobytes (1K = 1k = 1024 bytes).
# in bytes just don't use modifiers. If LogFileMaxSize is enabled,
# log rotation (the LogRotate option) will always be enabled.
# Default: 1M
#LogFileMaxSize 2M
# Log time with each message.
# Default: no
LogTime yes LogTime yes
# Enable verbose logging.
# Default: no
LogVerbose yes LogVerbose yes
# Use system logger (can work together with UpdateLogFile).
# Default: no
LogSyslog no LogSyslog no
# PID file location # Specify the type of syslog messages - please refer to 'man syslog'
# for facility names.
# Default: LOG_LOCAL6
#LogFacility LOG_MAIL
# Enable log rotation. Always enabled when LogFileMaxSize is enabled.
# Default: no
#LogRotate yes
# This option allows you to save the process identifier of the daemon
# Default: disabled
#PidFile /var/run/freshclam.pid
PidFile /var/run/clamav/freshclam.pid PidFile /var/run/clamav/freshclam.pid
# Database owner # By default when started freshclam drops privileges and switches to the
# "clamav" user. This directive allows you to change the database owner.
# Default: clamav (may depend on installation options)
DatabaseOwner node DatabaseOwner node
# Mirror settings for Austria # Use DNS to verify virus database version. Freshclam uses DNS TXT records
# to verify database and software versions. With this directive you can change
# the database verification domain.
# WARNING: Do not touch it unless you're configuring freshclam to use your
# own database verification domain.
# Default: current.cvd.clamav.net
#DNSDatabaseInfo current.cvd.clamav.net
# Uncomment the following line and replace XY with your country
# code. See http://www.iana.org/cctld/cctld-whois.htm for the full list.
# You can use db.XY.ipv6.clamav.net for IPv6 connections.
DatabaseMirror db.at.clamav.net DatabaseMirror db.at.clamav.net
# database.clamav.net is a round-robin record which points to our most
# reliable mirrors. It's used as a fall back in case db.XY.clamav.net is
# not working. DO NOT TOUCH the following line unless you know what you
# are doing.
DatabaseMirror database.clamav.net DatabaseMirror database.clamav.net
# How many attempts to make before giving up.
# Default: 3 (per mirror)
#MaxAttempts 5
# With this option you can control scripted updates. It's highly recommended # With this option you can control scripted updates. It's highly recommended
# to keep it enabled. # to keep it enabled.
# Default: yes # Default: yes
# Update settings #ScriptedUpdates yes
ScriptedUpdates yes
# By default freshclam will keep the local databases (.cld) uncompressed to
# make their handling faster. With this option you can enable the compression;
# the change will take effect with the next database update.
# Default: no
#CompressLocalDatabase no
# With this option you can provide custom sources (http:// or file://) for
# database files. This option can be used multiple times.
# Default: no custom URLs
#DatabaseCustomURL http://myserver.com/mysigs.ndb
#DatabaseCustomURL file:///mnt/nfs/local.hdb
# This option allows you to easily point freshclam to private mirrors.
# If PrivateMirror is set, freshclam does not attempt to use DNS
# to determine whether its databases are out-of-date, instead it will
# use the If-Modified-Since request or directly check the headers of the
# remote database files. For each database, freshclam first attempts
# to download the CLD file. If that fails, it tries to download the
# CVD file. This option overrides DatabaseMirror, DNSDatabaseInfo
# and ScriptedUpdates. It can be used multiple times to provide
# fall-back mirrors.
# Default: disabled
#PrivateMirror mirror1.mynetwork.com
#PrivateMirror mirror2.mynetwork.com
# Number of database checks per day. # Number of database checks per day.
# Default: 12 (every two hours) # Default: 12 (every two hours)
Checks 12 #Checks 24
# Don't fork (good for containers) # Proxy settings
# Default: disabled
#HTTPProxyServer myproxy.com
#HTTPProxyPort 1234
#HTTPProxyUsername myusername
#HTTPProxyPassword mypass
# If your servers are behind a firewall/proxy which applies User-Agent
# filtering you can use this option to force the use of a different
# User-Agent header.
# Default: clamav/version_number
#HTTPUserAgent SomeUserAgentIdString
# Use aaa.bbb.ccc.ddd as client address for downloading databases. Useful for
# multi-homed systems.
# Default: Use OS'es default outgoing IP address.
#LocalIPAddress aaa.bbb.ccc.ddd
# Send the RELOAD command to clamd.
# Default: no
#NotifyClamd /path/to/clamd.conf
# Run command after successful database update.
# Default: disabled
#OnUpdateExecute command
# Run command when database update process fails.
# Default: disabled
#OnErrorExecute command
# Run command when freshclam reports outdated version.
# In the command string %v will be replaced by the new version number.
# Default: disabled
#OnOutdatedExecute command
# Don't fork into background.
# Default: no
Foreground no Foreground no
# Connection timeouts # Enable debug messages in libclamav.
ConnectTimeout 60 # Default: no
ReceiveTimeout 60 #Debug yes
# Test databases before using them # Timeout in seconds when connecting to database server.
TestDatabases yes # Default: 30
#ConnectTimeout 60
# Enable bytecode signatures # Timeout in seconds when reading from database server.
Bytecode yes # Default: 30
#ReceiveTimeout 60
# With this option enabled, freshclam will attempt to load new
# databases into memory to make sure they are properly handled
# by libclamav before replacing the old ones.
# Default: yes
#TestDatabases yes
# When enabled freshclam will submit statistics to the ClamAV Project about
# the latest virus detections in your environment. The ClamAV maintainers
# will then use this data to determine what types of malware are the most
# detected in the field and in what geographic area they are.
# Freshclam will connect to clamd in order to get recent statistics.
# Default: no
#SubmitDetectionStats /path/to/clamd.conf
# Country of origin of malware/detection statistics (for statistical
# purposes only). The statistics collector at ClamAV.net will look up
# your IP address to determine the geographical origin of the malware
# reported by your installation. If this installation is mainly used to
# scan data which comes from a different location, please enable this
# option and enter a two-letter code (see http://www.iana.org/domains/root/db/)
# of the country of origin.
# Default: disabled
#DetectionStatsCountry country-code
# This option enables support for our "Personal Statistics" service.
# When this option is enabled, the information on malware detected by
# your clamd installation is made available to you through our website.
# To get your HostID, log on http://www.stats.clamav.net and add a new
# host to your host list. Once you have the HostID, uncomment this option
# and paste the HostID here. As soon as your freshclam starts submitting
# information to our stats collecting service, you will be able to view
# the statistics of this clamd installation by logging into
# http://www.stats.clamav.net with the same credentials you used to
# generate the HostID. For more information refer to:
# http://www.clamav.net/documentation.html#cctts
# This feature requires SubmitDetectionStats to be enabled.
# Default: disabled
#DetectionStatsHostID unique-id
# This option enables support for Google Safe Browsing. When activated for
# the first time, freshclam will download a new database file (safebrowsing.cvd)
# which will be automatically loaded by clamd and clamscan during the next
# reload, provided that the heuristic phishing detection is turned on. This
# database includes information about websites that may be phishing sites or
# possible sources of malware. When using this option, it's mandatory to run
# freshclam at least every 30 minutes.
# Freshclam uses the ClamAV's mirror infrastructure to distribute the
# database and its updates but all the contents are provided under Google's
# terms of use. See http://www.google.com/transparencyreport/safebrowsing
# and http://www.clamav.net/documentation.html#safebrowsing
# for more information.
# Default: disabled
#SafeBrowsing yes
# This option enables downloading of bytecode.cvd, which includes additional
# detection mechanisms and improvements to the ClamAV engine.
# Default: enabled
#Bytecode yes
# Download an additional 3rd party signature database distributed through
# the ClamAV mirrors.
# This option can be used multiple times.
#ExtraDatabase dbname1
#ExtraDatabase dbname2

6
index.d.ts vendored
View file

@ -183,9 +183,3 @@ declare module 'saxon-js' {
export function transform(options: ITransformOptions): Promise<ITransformOutput> | ITransformOutput; export function transform(options: ITransformOptions): Promise<ITransformOutput> | ITransformOutput;
} }
declare global {
interface File {
sort_order?: number;
}
}

13761
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -4,8 +4,7 @@
"private": true, "private": true,
"scripts": { "scripts": {
"type-check": "tsc --noEmit", "type-check": "tsc --noEmit",
"dev": "node ace serve", "dev": "node ace serve --watch",
"devInspect": "node ace serve --watch --node-args='--inspect'",
"compress:xslt": "./node_modules/xslt3/xslt3.js -xsl:public/assets2/datasetxml2oai-pmh.xslt -export:public/assets2/datasetxml2oai.sef.json -t -nogo '-ns:##html5'", "compress:xslt": "./node_modules/xslt3/xslt3.js -xsl:public/assets2/datasetxml2oai-pmh.xslt -export:public/assets2/datasetxml2oai.sef.json -t -nogo '-ns:##html5'",
"compress:solr": "./node_modules/xslt3/xslt3.js -xsl:public/assets2/solr.xslt -export:public/assets2/solr.sef.json -t -nogo '-ns:##html5'", "compress:solr": "./node_modules/xslt3/xslt3.js -xsl:public/assets2/solr.xslt -export:public/assets2/solr.sef.json -t -nogo '-ns:##html5'",
"compress:doi": "./node_modules/xslt3/xslt3.js -xsl:public/assets2/doi_datacite.xslt -export:public/assets2/doi_datacite.sef.json -t -nogo '-ns:##html5'", "compress:doi": "./node_modules/xslt3/xslt3.js -xsl:public/assets2/doi_datacite.xslt -export:public/assets2/doi_datacite.sef.json -t -nogo '-ns:##html5'",
@ -16,58 +15,59 @@
"format-check": "prettier --check ./**/*.{ts,js}", "format-check": "prettier --check ./**/*.{ts,js}",
"test": "node ace test" "test": "node ace test"
}, },
"eslintConfig": { "eslintIgnore": [
"ignorePatterns": [ "build"
"build" ],
]
},
"alias": { "alias": {
"vue": "./node_modules/vue/dist/vue.esm-bundler.js" "vue": "./node_modules/vue/dist/vue.esm-bundler.js"
}, },
"devDependencies": { "devDependencies": {
"@adonisjs/assembler": "^7.1.1", "@adonisjs/assembler": "^7.1.1",
"@adonisjs/tsconfig": "^1.4.0", "@adonisjs/tsconfig": "^1.2.1",
"@headlessui/vue": "^1.7.23", "@babel/core": "^7.20.12",
"@japa/assert": "^4.0.1", "@babel/plugin-proposal-class-properties": "^7.18.6",
"@japa/plugin-adonisjs": "^4.0.0", "@babel/plugin-proposal-decorators": "^7.20.13",
"@japa/runner": "^4.2.0", "@babel/plugin-transform-runtime": "^7.19.6",
"@babel/preset-env": "^7.20.2",
"@babel/preset-typescript": "^7.18.6",
"@japa/api-client": "^2.0.3",
"@japa/assert": "^3.0.0",
"@japa/plugin-adonisjs": "^3.0.0",
"@japa/runner": "^3.1.1",
"@mdi/js": "^7.1.96", "@mdi/js": "^7.1.96",
"@poppinss/utils": "^6.7.2", "@poppinss/utils": "^6.7.2",
"@swc/wasm": "^1.10.14", "@swc/core": "^1.4.2",
"@symfony/webpack-encore": "^5.0.1",
"@tailwindcss/forms": "^0.5.2", "@tailwindcss/forms": "^0.5.2",
"@types/bcryptjs": "^2.4.6", "@types/bcryptjs": "^2.4.6",
"@types/clamscan": "^2.0.4", "@types/clamscan": "^2.0.4",
"@types/escape-html": "^1.0.4", "@types/escape-html": "^1.0.4",
"@types/fs-extra": "^11.0.4", "@types/leaflet": "^1.9.3",
"@types/leaflet": "^1.9.16",
"@types/luxon": "^3.4.2", "@types/luxon": "^3.4.2",
"@types/node": "^22.10.2", "@types/node": "^22.5.5",
"@types/proxy-addr": "^2.0.0", "@types/proxy-addr": "^2.0.0",
"@types/qrcode": "^1.5.5", "@types/qrcode": "^1.5.5",
"@types/source-map-support": "^0.5.6", "@types/source-map-support": "^0.5.6",
"@types/sprintf-js": "^1.1.4", "@types/sprintf-js": "^1.1.4",
"@types/supertest": "^6.0.2", "@types/supertest": "^6.0.2",
"@vitejs/plugin-vue": "^5.2.1",
"autoprefixer": "^10.4.13", "autoprefixer": "^10.4.13",
"babel-preset-typescript-vue3": "^2.0.17", "babel-preset-typescript-vue3": "^2.0.17",
"chart.js": "^4.2.0", "chart.js": "^4.2.0",
"dotenv-webpack": "^8.0.1", "dotenv-webpack": "^8.0.1",
"eslint": "^8.57.1", "eslint": "^8.57.1",
"eslint-config-prettier": "^10.0.1", "eslint-config-prettier": "^9.0.0",
"eslint-plugin-adonis": "^2.1.1", "eslint-plugin-adonis": "^2.1.1",
"eslint-plugin-prettier": "^5.0.0-alpha.2", "eslint-plugin-prettier": "^5.0.0-alpha.2",
"hot-hook": "^0.4.0",
"numeral": "^2.0.6", "numeral": "^2.0.6",
"pinia": "^3.0.2", "pinia": "^2.0.30",
"pino-pretty": "^13.0.0", "pino-pretty": "^11.2.2",
"postcss-loader": "^8.1.1", "postcss-loader": "^8.1.1",
"prettier": "^3.4.2", "prettier": "^3.0.0",
"supertest": "^6.3.3", "supertest": "^6.3.3",
"tailwindcss": "^3.4.17", "tailwindcss": "^3.2.4",
"ts-loader": "^9.4.2", "ts-loader": "^9.4.2",
"ts-node-maintained": "^10.9.5", "ts-node": "^10.9.2",
"typescript": "~5.7", "typescript": "^5.1.3",
"vite": "^6.0.11",
"vue": "^3.4.26", "vue": "^3.4.26",
"vue-facing-decorator": "^3.0.0", "vue-facing-decorator": "^3.0.0",
"vue-loader": "^17.0.1", "vue-loader": "^17.0.1",
@ -75,32 +75,30 @@
"xslt3": "^2.5.0" "xslt3": "^2.5.0"
}, },
"dependencies": { "dependencies": {
"@adonisjs/auth": "^9.2.4", "@adonisjs/auth": "^9.1.1",
"@adonisjs/bodyparser": "^10.0.1", "@adonisjs/core": "^6.3.1",
"@adonisjs/core": "6.17.2",
"@adonisjs/cors": "^2.2.1", "@adonisjs/cors": "^2.2.1",
"@adonisjs/drive": "^3.2.0", "@adonisjs/drive": "^2.3.0",
"@adonisjs/inertia": "^2.1.3", "@adonisjs/encore": "^1.0.0",
"@adonisjs/lucid": "^21.5.1", "@adonisjs/inertia": "^1.0.0-7",
"@adonisjs/lucid": "^21.1.0",
"@adonisjs/mail": "^9.2.2", "@adonisjs/mail": "^9.2.2",
"@adonisjs/redis": "^9.1.0", "@adonisjs/redis": "^9.1.0",
"@adonisjs/session": "^7.5.0", "@adonisjs/session": "^7.1.1",
"@adonisjs/shield": "^8.1.1", "@adonisjs/shield": "^8.1.1",
"@adonisjs/static": "^1.1.1", "@adonisjs/static": "^1.1.1",
"@adonisjs/vite": "^4.0.0",
"@eidellev/adonis-stardust": "^3.0.0", "@eidellev/adonis-stardust": "^3.0.0",
"@fontsource/archivo-black": "^5.0.1", "@fontsource/archivo-black": "^5.0.1",
"@fontsource/inter": "^5.0.1", "@fontsource/inter": "^5.0.1",
"@inertiajs/inertia": "^0.11.1", "@inertiajs/inertia": "^0.11.1",
"@inertiajs/vue3": "^2.0.3", "@inertiajs/vue3": "^1.0.0",
"@opensearch-project/opensearch": "^3.2.0", "@opensearch-project/opensearch": "^2.4.0",
"@phc/format": "^1.0.0", "@phc/format": "^1.0.0",
"@poppinss/manager": "^5.0.2", "@vinejs/vine": "^2.0.0",
"@vinejs/vine": "^3.0.0",
"axios": "^1.7.9",
"bcrypt": "^5.1.1", "bcrypt": "^5.1.1",
"bcryptjs": "^2.4.3", "bcryptjs": "^2.4.3",
"clamscan": "^2.1.2", "clamscan": "^2.1.2",
"crypto": "^1.0.1",
"dayjs": "^1.11.7", "dayjs": "^1.11.7",
"deep-email-validator": "^0.1.21", "deep-email-validator": "^0.1.21",
"edge.js": "^6.0.1", "edge.js": "^6.0.1",
@ -116,19 +114,13 @@
"notiwind": "^2.0.0", "notiwind": "^2.0.0",
"pg": "^8.9.0", "pg": "^8.9.0",
"qrcode": "^1.5.3", "qrcode": "^1.5.3",
"redis": "^5.0.0", "redis": "^4.6.10",
"reflect-metadata": "^0.2.1", "reflect-metadata": "^0.2.1",
"saxon-js": "^2.5.0", "saxon-js": "^2.5.0",
"toastify-js": "^1.12.0", "toastify-js": "^1.12.0",
"vuedraggable": "^4.1.0", "vuedraggable": "^4.1.0",
"xmlbuilder2": "^3.1.1" "xmlbuilder2": "^3.1.1"
}, },
"hotHook": {
"boundaries": [
"./app/Controllers/**/*.ts",
"./app/middleware/*.ts"
]
},
"type": "module", "type": "module",
"imports": { "imports": {
"#controllers/*": "./app/Controllers/*.js", "#controllers/*": "./app/Controllers/*.js",

View file

@ -1,10 +1,7 @@
module.exports = { module.exports = {
plugins: { plugins: {
// 'postcss-import': {}, // 'postcss-import': {},
// 'postcss-nesting': {}, 'tailwindcss/nesting': {},
'tailwindcss/nesting': {},
// "@tailwindcss/postcss": {},
// tailwindcss: {},
tailwindcss: {}, tailwindcss: {},
autoprefixer: {}, autoprefixer: {},
}, },

View file

@ -74,8 +74,7 @@ export class LocalDriver implements LocalDriverContract {
*/ */
public async exists(location: string): Promise<boolean> { public async exists(location: string): Promise<boolean> {
try { try {
let path_temp = this.makePath(location); //'/storage/app/files/421' return await this.adapter.pathExists(this.makePath(location));
return await this.adapter.pathExists(path_temp);
} catch (error) { } catch (error) {
throw CannotGetMetaDataException.invoke(location, 'exists', error); throw CannotGetMetaDataException.invoke(location, 'exists', error);
} }

View file

@ -69,7 +69,7 @@ export default class MailProvider {
const mailConfigProvider = this.app.config.get('mail'); const mailConfigProvider = this.app.config.get('mail');
const config = await configProvider.resolve<any>(this.app, mailConfigProvider); const config = await configProvider.resolve<any>(this.app, mailConfigProvider);
await config.mailers.smtp(); const iwas = await config.mailers.smtp();
// iwas.config.host = 'hhhost'; // iwas.config.host = 'hhhost';
// this.app.config.set('mail.mailers.smtp.host', 'xhost'); // this.app.config.set('mail.mailers.smtp.host', 'xhost');
// const iwas = await config.mailers.smtp(); // const iwas = await config.mailers.smtp();

View file

@ -63,15 +63,6 @@ export default class QueryBuilderProvider {
public register() { public register() {
// Register your own bindings // Register your own bindings
// const ModelQueryBuilder = this.app.container.bind('@adonisjs/lucid/orm/ModelQueryBuilder');
// ModelQueryBuilder.macro('whereTrue', function (columnName: string) {
// return this.where(columnName, true);
// });
// ModelQueryBuilder.macro('whereFalse', function (columnName: string) {
// return this.where(columnName, false);
// });
} }
public async boot() { public async boot() {
@ -82,14 +73,15 @@ export default class QueryBuilderProvider {
// let rolesPluck = {}; // let rolesPluck = {};
let rolesPluck: { [key: number]: any } = {}; let rolesPluck: { [key: number]: any } = {};
const result = await this.exec(); const result = await this.exec();
result.forEach((user: { [key: string]: any }, index: number) => { result.forEach((user, index) => {
let idc: number; let idc;
if (!id) { if (!id) {
idc = index; idc = index;
} else { } else {
idc = user[id]; idc = user[id];
} }
const value: any = user[valueColumn]; const value = user[valueColumn];
// rolesPluck[idc] = user.name;
rolesPluck[idc] = value; rolesPluck[idc] = value;
}); });
return rolesPluck; return rolesPluck;

View file

@ -1,34 +0,0 @@
import { ApplicationService } from '@adonisjs/core/types';
export default class RuleProvider {
constructor(protected app: ApplicationService) {}
public register() {
// Register your own bindings
}
public async boot() {
// IoC container is ready
// await import("../src/rules/index.js");
await import('#start/rules/unique');
await import('#start/rules/translated_language');
await import('#start/rules/unique_person');
// () => import('#start/rules/file_length'),
// () => import('#start/rules/file_scan'),
// () => import('#start/rules/allowed_extensions_mimetypes'),
await import('#start/rules/dependent_array_min_length');
await import('#start/rules/referenceValidation');
await import('#start/rules/valid_mimetype');
await import('#start/rules/array_contains_types');
await import('#start/rules/orcid');
}
public async ready() {
// App is ready
}
public async shutdown() {
// Cleanup, since app is going down
}
}

View file

@ -4,8 +4,9 @@
|-------------------------------------------------------------------------- |--------------------------------------------------------------------------
|*/ |*/
import type { ApplicationService } from '@adonisjs/core/types'; import type { ApplicationService } from '@adonisjs/core/types';
import vine, { symbols, BaseLiteralType, Vine } from '@vinejs/vine'; import vine, { BaseLiteralType, Vine } from '@vinejs/vine';
import type { FieldContext, FieldOptions } from '@vinejs/vine/types'; import type { Validation, FieldContext, FieldOptions } from '@vinejs/vine/types';
// import type { MultipartFile, FileValidationOptions } from '@adonisjs/bodyparser/types';
import type { MultipartFile } from '@adonisjs/core/bodyparser'; import type { MultipartFile } from '@adonisjs/core/bodyparser';
import type { FileValidationOptions } from '@adonisjs/core/types/bodyparser'; import type { FileValidationOptions } from '@adonisjs/core/types/bodyparser';
import { Request, RequestValidator } from '@adonisjs/core/http'; import { Request, RequestValidator } from '@adonisjs/core/http';
@ -15,7 +16,6 @@ import MimeType from '#models/mime_type';
* Validation options accepted by the "file" rule * Validation options accepted by the "file" rule
*/ */
export type FileRuleValidationOptions = Partial<FileValidationOptions> | ((field: FieldContext) => Partial<FileValidationOptions>); export type FileRuleValidationOptions = Partial<FileValidationOptions> | ((field: FieldContext) => Partial<FileValidationOptions>);
/** /**
* Extend VineJS * Extend VineJS
*/ */
@ -24,66 +24,31 @@ declare module '@vinejs/vine' {
myfile(options?: FileRuleValidationOptions): VineMultipartFile; myfile(options?: FileRuleValidationOptions): VineMultipartFile;
} }
} }
/** /**
* Extend HTTP request class * Extend HTTP request class
*/ */
declare module '@adonisjs/core/http' { declare module '@adonisjs/core/http' {
interface Request extends RequestValidator {} interface Request extends RequestValidator {
}
} }
/** /**
* Checks if the value is an instance of multipart file * Checks if the value is an instance of multipart file
* from bodyparser. * from bodyparser.
*/ */
export function isBodyParserFile(file: MultipartFile | unknown): file is MultipartFile { export function isBodyParserFile(file: MultipartFile | unknown): boolean {
return !!(file && typeof file === 'object' && 'isMultipartFile' in file); return !!(file && typeof file === 'object' && 'isMultipartFile' in file);
} }
export async function getEnabledExtensions() {
const enabledExtensions = await MimeType.query().select('file_extension').where('enabled', true).exec();
const extensions = enabledExtensions
.map((extension) => {
return extension.file_extension.split('|');
})
.flat();
/** return extensions;
* Cache for enabled extensions to reduce database queries };
*/
let extensionsCache: string[] | null = null;
let cacheTimestamp = 0;
const CACHE_DURATION = 5 * 60 * 1000; // 5 minutes
/**
* Get enabled extensions with caching
*/
export async function getEnabledExtensions(): Promise<string[]> {
const now = Date.now();
if (extensionsCache && now - cacheTimestamp < CACHE_DURATION) {
return extensionsCache;
}
try {
const enabledExtensions = await MimeType.query().select('file_extension').where('enabled', true).exec();
const extensions = enabledExtensions
.map((extension) => extension.file_extension.split('|'))
.flat()
.map((ext) => ext.toLowerCase().trim())
.filter((ext) => ext.length > 0);
extensionsCache = [...new Set(extensions)]; // Remove duplicates
cacheTimestamp = now;
return extensionsCache;
} catch (error) {
console.error('Error fetching enabled extensions:', error);
return extensionsCache || [];
}
}
/**
* Clear extensions cache
*/
export function clearExtensionsCache(): void {
extensionsCache = null;
cacheTimestamp = 0;
}
/** /**
* VineJS validation rule that validates the file to be an * VineJS validation rule that validates the file to be an
* instance of BodyParser MultipartFile class. * instance of BodyParser MultipartFile class.
@ -100,7 +65,6 @@ const isMultipartFile = vine.createRule(async (file: MultipartFile | unknown, op
// At this point, you can use type assertion to explicitly tell TypeScript that file is of type MultipartFile // At this point, you can use type assertion to explicitly tell TypeScript that file is of type MultipartFile
const validatedFile = file as MultipartFile; const validatedFile = file as MultipartFile;
const validationOptions = typeof options === 'function' ? options(field) : options; const validationOptions = typeof options === 'function' ? options(field) : options;
/** /**
* Set size when it's defined in the options and missing * Set size when it's defined in the options and missing
* on the file instance * on the file instance
@ -108,29 +72,28 @@ const isMultipartFile = vine.createRule(async (file: MultipartFile | unknown, op
if (validatedFile.sizeLimit === undefined && validationOptions.size) { if (validatedFile.sizeLimit === undefined && validationOptions.size) {
validatedFile.sizeLimit = validationOptions.size; validatedFile.sizeLimit = validationOptions.size;
} }
/** /**
* Set extensions when it's defined in the options and missing * Set extensions when it's defined in the options and missing
* on the file instance * on the file instance
*/ */
if (validatedFile.allowedExtensions === undefined) { // if (validatedFile.allowedExtensions === undefined && validationOptions.extnames) {
if (validationOptions.extnames !== undefined) { // validatedFile.allowedExtensions = validationOptions.extnames;
validatedFile.allowedExtensions = validationOptions.extnames; // }
} else { if (validatedFile.allowedExtensions === undefined && validationOptions.extnames) {
validatedFile.allowedExtensions = await getEnabledExtensions(); validatedFile.allowedExtensions = await getEnabledExtensions();
}
} }
/**
* wieder löschen
* Set extensions when it's defined in the options and missing
* on the file instance
*/
// if (file.clientNameSizeLimit === undefined && validationOptions.clientNameSizeLimit) {
// file.clientNameSizeLimit = validationOptions.clientNameSizeLimit;
// }
/** /**
* Validate file * Validate file
*/ */
try { validatedFile.validate();
validatedFile.validate();
} catch (error) {
field.report(`File validation failed: ${error.message}`, 'file.validation_error', field, validationOptions);
return;
}
/** /**
* Report errors * Report errors
*/ */
@ -139,40 +102,24 @@ const isMultipartFile = vine.createRule(async (file: MultipartFile | unknown, op
}); });
}); });
const MULTIPART_FILE: typeof symbols.SUBTYPE = symbols.SUBTYPE;
export class VineMultipartFile extends BaseLiteralType<MultipartFile, MultipartFile, MultipartFile> { export class VineMultipartFile extends BaseLiteralType<MultipartFile, MultipartFile, MultipartFile> {
[MULTIPART_FILE]: string; // #private;
public validationOptions?: FileRuleValidationOptions; // constructor(validationOptions?: FileRuleValidationOptions, options?: FieldOptions, validations?: Validation<any>[]);
// clone(): this;
public validationOptions;
// extnames: (18) ['gpkg', 'htm', 'html', 'csv', 'txt', 'asc', 'c', 'cc', 'h', 'srt', 'tiff', 'pdf', 'png', 'zip', 'jpg', 'jpeg', 'jpe', 'xlsx'] // extnames: (18) ['gpkg', 'htm', 'html', 'csv', 'txt', 'asc', 'c', 'cc', 'h', 'srt', 'tiff', 'pdf', 'png', 'zip', 'jpg', 'jpeg', 'jpe', 'xlsx']
// size: '512mb' // size: '512mb'
public constructor(validationOptions?: FileRuleValidationOptions, options?: FieldOptions) { public constructor(validationOptions?: FileRuleValidationOptions, options?: FieldOptions, validations?: Validation<any>[]) {
// super(options, validations);
super(options, [isMultipartFile(validationOptions || {})]); super(options, [isMultipartFile(validationOptions || {})]);
this.validationOptions = validationOptions; this.validationOptions = validationOptions;
} }
public clone(): any { public clone(): any {
return new VineMultipartFile(this.validationOptions, this.cloneOptions()); return new VineMultipartFile(this.validationOptions, this.cloneOptions(), this.cloneValidations());
} }
/**
* Set maximum file size
*/
public maxSize(size: string | number): this {
const newOptions = { ...this.validationOptions, size };
return new VineMultipartFile(newOptions, this.cloneOptions()) as this;
}
/**
* Set allowed extensions
*/
public extensions(extnames: string[]): this {
const newOptions = { ...this.validationOptions, extnames };
return new VineMultipartFile(newOptions, this.cloneOptions()) as this;
}
} }
export default class VinejsProvider { export default class VinejsProvider {
@ -191,8 +138,13 @@ export default class VinejsProvider {
/** /**
* The container bindings have booted * The container bindings have booted
*/ */
boot(): void { boot(): void {
Vine.macro('myfile', function (this: Vine, options?: FileRuleValidationOptions) { // VineString.macro('translatedLanguage', function (this: VineString, options: Options) {
// return this.use(translatedLanguageRule(options));
// });
Vine.macro('myfile', function (this: Vine, options) {
return new VineMultipartFile(options); return new VineMultipartFile(options);
}); });
@ -200,47 +152,10 @@ export default class VinejsProvider {
* The validate method can be used to validate the request * The validate method can be used to validate the request
* data for the current request using VineJS validators * data for the current request using VineJS validators
*/ */
Request.macro('validateUsing', function (this: Request, ...args) { Request.macro('validateUsing', function (...args) {
if (!this.ctx) { return new RequestValidator(this.ctx).validateUsing(...args);
throw new Error('HttpContext is not available'); });
}
return new RequestValidator(this.ctx).validateUsing(...args);
});
// Ensure MIME validation macros are loaded
this.loadMimeValidationMacros();
this.loadFileScanMacros();
this.loadFileLengthMacros();
}
/**
* Load MIME validation macros - called during boot to ensure they're available
*/
private async loadMimeValidationMacros(): Promise<void> {
try {
// Dynamically import the MIME validation rule to ensure macros are registered
await import('#start/rules/allowed_extensions_mimetypes');
} catch (error) {
console.warn('Could not load MIME validation macros:', error);
}
}
private async loadFileScanMacros(): Promise<void> {
try {
// Dynamically import the MIME validation rule to ensure macros are registered
await import('#start/rules/file_scan');
} catch (error) {
console.warn('Could not load MIME validation macros:', error);
}
}
private async loadFileLengthMacros(): Promise<void> {
try {
// Dynamically import the MIME validation rule to ensure macros are registered
await import('#start/rules/file_length');
} catch (error) {
console.warn('Could not load MIME validation macros:', error);
}
} }
/** /**
@ -256,7 +171,5 @@ export default class VinejsProvider {
/** /**
* Preparing to shutdown the app * Preparing to shutdown the app
*/ */
async shutdown() { async shutdown() {}
clearExtensionsCache();
}
} }

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.8 KiB

View file

@ -0,0 +1,12 @@
{
"entrypoints": {
"app": {
"css": [
"http://localhost:8080/assets/app.css"
],
"js": [
"http://localhost:8080/assets/app.js"
]
}
}
}

103
public/assets/manifest.json Normal file
View file

@ -0,0 +1,103 @@
{
"assets/app.css": "http://localhost:8080/assets/app.css",
"assets/app.js": "http://localhost:8080/assets/app.js",
"assets/resources_js_apps_settings_l18n_de_js.js": "http://localhost:8080/assets/resources_js_apps_settings_l18n_de_js.js",
"assets/resources_js_apps_settings_l18n_en_js.js": "http://localhost:8080/assets/resources_js_apps_settings_l18n_en_js.js",
"assets/resources_js_Pages_Admin_License_Index_vue.js": "http://localhost:8080/assets/resources_js_Pages_Admin_License_Index_vue.js",
"assets/resources_js_Pages_Admin_Mimetype_Create_vue.js": "http://localhost:8080/assets/resources_js_Pages_Admin_Mimetype_Create_vue.js",
"assets/resources_js_Pages_Admin_Mimetype_Delete_vue.js": "http://localhost:8080/assets/resources_js_Pages_Admin_Mimetype_Delete_vue.js",
"assets/resources_js_Pages_Admin_Mimetype_Index_vue.js": "http://localhost:8080/assets/resources_js_Pages_Admin_Mimetype_Index_vue.js",
"assets/resources_js_Pages_Admin_Permission_Create_vue.js": "http://localhost:8080/assets/resources_js_Pages_Admin_Permission_Create_vue.js",
"assets/resources_js_Pages_Admin_Permission_Edit_vue.js": "http://localhost:8080/assets/resources_js_Pages_Admin_Permission_Edit_vue.js",
"assets/resources_js_Pages_Admin_Permission_Index_vue.js": "http://localhost:8080/assets/resources_js_Pages_Admin_Permission_Index_vue.js",
"assets/resources_js_Pages_Admin_Permission_Show_vue.js": "http://localhost:8080/assets/resources_js_Pages_Admin_Permission_Show_vue.js",
"assets/resources_js_Pages_Admin_Role_Create_vue.js": "http://localhost:8080/assets/resources_js_Pages_Admin_Role_Create_vue.js",
"assets/resources_js_Pages_Admin_Role_Edit_vue.js": "http://localhost:8080/assets/resources_js_Pages_Admin_Role_Edit_vue.js",
"assets/resources_js_Pages_Admin_Role_Index_vue.js": "http://localhost:8080/assets/resources_js_Pages_Admin_Role_Index_vue.js",
"assets/resources_js_Pages_Admin_Role_Show_vue.js": "http://localhost:8080/assets/resources_js_Pages_Admin_Role_Show_vue.js",
"assets/resources_js_Pages_Admin_Settings_vue-resources_js_utils_toast_css.css": "http://localhost:8080/assets/resources_js_Pages_Admin_Settings_vue-resources_js_utils_toast_css.css",
"assets/resources_js_Pages_Admin_Settings_vue-resources_js_utils_toast_css.js": "http://localhost:8080/assets/resources_js_Pages_Admin_Settings_vue-resources_js_utils_toast_css.js",
"assets/resources_js_Pages_Admin_User_Create_vue-resources_js_Components_SimplePasswordMeter_password-f3312a.css": "http://localhost:8080/assets/resources_js_Pages_Admin_User_Create_vue-resources_js_Components_SimplePasswordMeter_password-f3312a.css",
"assets/resources_js_Pages_Admin_User_Create_vue-resources_js_Components_SimplePasswordMeter_password-f3312a.js": "http://localhost:8080/assets/resources_js_Pages_Admin_User_Create_vue-resources_js_Components_SimplePasswordMeter_password-f3312a.js",
"assets/resources_js_Pages_Admin_User_Edit_vue-resources_js_Components_SimplePasswordMeter_password-m-6dc207.css": "http://localhost:8080/assets/resources_js_Pages_Admin_User_Edit_vue-resources_js_Components_SimplePasswordMeter_password-m-6dc207.css",
"assets/resources_js_Pages_Admin_User_Edit_vue-resources_js_Components_SimplePasswordMeter_password-m-6dc207.js": "http://localhost:8080/assets/resources_js_Pages_Admin_User_Edit_vue-resources_js_Components_SimplePasswordMeter_password-m-6dc207.js",
"assets/resources_js_Pages_Admin_User_Index_vue.js": "http://localhost:8080/assets/resources_js_Pages_Admin_User_Index_vue.js",
"assets/resources_js_Pages_Admin_User_Show_vue.js": "http://localhost:8080/assets/resources_js_Pages_Admin_User_Show_vue.js",
"assets/resources_js_Pages_App_vue.js": "http://localhost:8080/assets/resources_js_Pages_App_vue.js",
"assets/resources_js_Pages_Auth_AccountInfo_vue-resources_js_utils_toast_css-resources_js_Components_-06c7b5.css": "http://localhost:8080/assets/resources_js_Pages_Auth_AccountInfo_vue-resources_js_utils_toast_css-resources_js_Components_-06c7b5.css",
"assets/resources_js_Pages_Auth_AccountInfo_vue-resources_js_utils_toast_css-resources_js_Components_-06c7b5.js": "http://localhost:8080/assets/resources_js_Pages_Auth_AccountInfo_vue-resources_js_utils_toast_css-resources_js_Components_-06c7b5.js",
"assets/resources_js_Pages_Auth_Login_vue.js": "http://localhost:8080/assets/resources_js_Pages_Auth_Login_vue.js",
"assets/resources_js_Pages_Auth_Register_vue.js": "http://localhost:8080/assets/resources_js_Pages_Auth_Register_vue.js",
"assets/resources_js_Pages_Dashboard_vue.js": "http://localhost:8080/assets/resources_js_Pages_Dashboard_vue.js",
"assets/resources_js_Pages_Editor_Dataset_Approve_vue.js": "http://localhost:8080/assets/resources_js_Pages_Editor_Dataset_Approve_vue.js",
"assets/resources_js_Pages_Editor_Dataset_Doi_vue.js": "http://localhost:8080/assets/resources_js_Pages_Editor_Dataset_Doi_vue.js",
"assets/resources_js_Pages_Editor_Dataset_Index_vue.js": "http://localhost:8080/assets/resources_js_Pages_Editor_Dataset_Index_vue.js",
"assets/resources_js_Pages_Editor_Dataset_Publish_vue.js": "http://localhost:8080/assets/resources_js_Pages_Editor_Dataset_Publish_vue.js",
"assets/resources_js_Pages_Editor_Dataset_Receive_vue.js": "http://localhost:8080/assets/resources_js_Pages_Editor_Dataset_Receive_vue.js",
"assets/resources_js_Pages_Editor_Dataset_Reject_vue.js": "http://localhost:8080/assets/resources_js_Pages_Editor_Dataset_Reject_vue.js",
"assets/resources_js_Pages_Error_vue.js": "http://localhost:8080/assets/resources_js_Pages_Error_vue.js",
"assets/resources_js_Pages_Errors_ServerError_vue.js": "http://localhost:8080/assets/resources_js_Pages_Errors_ServerError_vue.js",
"assets/resources_js_Pages_Errors_not_found_vue.js": "http://localhost:8080/assets/resources_js_Pages_Errors_not_found_vue.js",
"assets/resources_js_Pages_Map_vue-resources_js_Components_Map_draw_component_vue-resources_js_Compon-b0925c.css": "http://localhost:8080/assets/resources_js_Pages_Map_vue-resources_js_Components_Map_draw_component_vue-resources_js_Compon-b0925c.css",
"assets/resources_js_Pages_Map_vue-resources_js_Components_Map_draw_component_vue-resources_js_Compon-b0925c.js": "http://localhost:8080/assets/resources_js_Pages_Map_vue-resources_js_Components_Map_draw_component_vue-resources_js_Compon-b0925c.js",
"assets/resources_js_Pages_ProfileView_vue.js": "http://localhost:8080/assets/resources_js_Pages_ProfileView_vue.js",
"assets/resources_js_Pages_Reviewer_Dataset_Index_vue.js": "http://localhost:8080/assets/resources_js_Pages_Reviewer_Dataset_Index_vue.js",
"assets/resources_js_Pages_Reviewer_Dataset_Reject_vue.js": "http://localhost:8080/assets/resources_js_Pages_Reviewer_Dataset_Reject_vue.js",
"assets/resources_js_Pages_Reviewer_Dataset_Review_vue.js": "http://localhost:8080/assets/resources_js_Pages_Reviewer_Dataset_Review_vue.js",
"assets/resources_js_Pages_Submitter_Dataset_Category_vue.css": "http://localhost:8080/assets/resources_js_Pages_Submitter_Dataset_Category_vue.css",
"assets/resources_js_Pages_Submitter_Dataset_Category_vue.js": "http://localhost:8080/assets/resources_js_Pages_Submitter_Dataset_Category_vue.js",
"assets/resources_js_Pages_Submitter_Dataset_Create_vue-resources_js_utils_toast_css-resources_js_Com-03a898.css": "http://localhost:8080/assets/resources_js_Pages_Submitter_Dataset_Create_vue-resources_js_utils_toast_css-resources_js_Com-03a898.css",
"assets/resources_js_Pages_Submitter_Dataset_Create_vue-resources_js_utils_toast_css-resources_js_Com-03a898.js": "http://localhost:8080/assets/resources_js_Pages_Submitter_Dataset_Create_vue-resources_js_utils_toast_css-resources_js_Com-03a898.js",
"assets/resources_js_Pages_Submitter_Dataset_Delete_vue.js": "http://localhost:8080/assets/resources_js_Pages_Submitter_Dataset_Delete_vue.js",
"assets/resources_js_Pages_Submitter_Dataset_Edit_vue-resources_js_utils_toast_css-resources_js_Compo-a37b65.css": "http://localhost:8080/assets/resources_js_Pages_Submitter_Dataset_Edit_vue-resources_js_utils_toast_css-resources_js_Compo-a37b65.css",
"assets/resources_js_Pages_Submitter_Dataset_Edit_vue-resources_js_utils_toast_css-resources_js_Compo-a37b65.js": "http://localhost:8080/assets/resources_js_Pages_Submitter_Dataset_Edit_vue-resources_js_utils_toast_css-resources_js_Compo-a37b65.js",
"assets/resources_js_Pages_Submitter_Dataset_Index_vue.css": "http://localhost:8080/assets/resources_js_Pages_Submitter_Dataset_Index_vue.css",
"assets/resources_js_Pages_Submitter_Dataset_Index_vue.js": "http://localhost:8080/assets/resources_js_Pages_Submitter_Dataset_Index_vue.js",
"assets/resources_js_Pages_Submitter_Dataset_Release_vue.js": "http://localhost:8080/assets/resources_js_Pages_Submitter_Dataset_Release_vue.js",
"assets/resources_js_Pages_Submitter_Person_Index_vue.js": "http://localhost:8080/assets/resources_js_Pages_Submitter_Person_Index_vue.js",
"assets/resources_js_Pages_register-view_register-view-component_vue.js": "http://localhost:8080/assets/resources_js_Pages_register-view_register-view-component_vue.js",
"assets/vendors-node_modules_mdi_js_mdi_js-node_modules_vue-loader_dist_exportHelper_js.js": "http://localhost:8080/assets/vendors-node_modules_mdi_js_mdi_js-node_modules_vue-loader_dist_exportHelper_js.js",
"assets/vendors-node_modules_focus-trap_dist_focus-trap_esm_js-node_modules_notiwind_dist_index_esm_js.js": "http://localhost:8080/assets/vendors-node_modules_focus-trap_dist_focus-trap_esm_js-node_modules_notiwind_dist_index_esm_js.js",
"assets/vendors-node_modules_vue-facing-decorator_dist_esm_utils_js.js": "http://localhost:8080/assets/vendors-node_modules_vue-facing-decorator_dist_esm_utils_js.js",
"assets/vendors-node_modules_toastify-js_src_toastify_js.js": "http://localhost:8080/assets/vendors-node_modules_toastify-js_src_toastify_js.js",
"assets/vendors-node_modules_leaflet_dist_leaflet-src_js-node_modules_leaflet_src_control_Control_Att-adabdc.js": "http://localhost:8080/assets/vendors-node_modules_leaflet_dist_leaflet-src_js-node_modules_leaflet_src_control_Control_Att-adabdc.js",
"assets/vendors-node_modules_buffer_index_js-node_modules_vuedraggable_dist_vuedraggable_umd_js.js": "http://localhost:8080/assets/vendors-node_modules_buffer_index_js-node_modules_vuedraggable_dist_vuedraggable_umd_js.js",
"assets/vendors-node_modules_mime_dist_src_index_js.js": "http://localhost:8080/assets/vendors-node_modules_mime_dist_src_index_js.js",
"assets/vendors-node_modules_numeral_numeral_js-node_modules_chart_js_dist_chart_js.js": "http://localhost:8080/assets/vendors-node_modules_numeral_numeral_js-node_modules_chart_js_dist_chart_js.js",
"assets/resources_js_Components_BaseButton_vue.js": "http://localhost:8080/assets/resources_js_Components_BaseButton_vue.js",
"assets/resources_js_Stores_main_ts-resources_js_Components_BaseDivider_vue-resources_js_Components_C-b45805.js": "http://localhost:8080/assets/resources_js_Stores_main_ts-resources_js_Components_BaseDivider_vue-resources_js_Components_C-b45805.js",
"assets/resources_js_Layouts_LayoutAuthenticated_vue.css": "http://localhost:8080/assets/resources_js_Layouts_LayoutAuthenticated_vue.css",
"assets/resources_js_Layouts_LayoutAuthenticated_vue.js": "http://localhost:8080/assets/resources_js_Layouts_LayoutAuthenticated_vue.js",
"assets/resources_js_Components_BaseButtons_vue-resources_js_Components_FormControl_vue-resources_js_-d830d6.js": "http://localhost:8080/assets/resources_js_Components_BaseButtons_vue-resources_js_Components_FormControl_vue-resources_js_-d830d6.js",
"assets/resources_js_Components_Admin_Pagination_vue-resources_js_Components_BaseButtons_vue-resource-6f3a70.js": "http://localhost:8080/assets/resources_js_Components_Admin_Pagination_vue-resources_js_Components_BaseButtons_vue-resource-6f3a70.js",
"assets/resources_js_utils_toast_ts-resources_js_Components_NotificationBar_vue.js": "http://localhost:8080/assets/resources_js_utils_toast_ts-resources_js_Components_NotificationBar_vue.js",
"assets/resources_js_Components_Map_draw_component_vue-resources_js_Components_Map_zoom_component_vue-058bcc.js": "http://localhost:8080/assets/resources_js_Components_Map_draw_component_vue-resources_js_Components_Map_zoom_component_vue-058bcc.js",
"assets/resources_js_Components_SectionMain_vue-resources_js_Components_SectionTitleLineWithButton_vu-764dfe.js": "http://localhost:8080/assets/resources_js_Components_SectionMain_vue-resources_js_Components_SectionTitleLineWithButton_vu-764dfe.js",
"assets/resources_js_Components_BaseButtons_vue-resources_js_Components_NotificationBar_vue-resources-7e06d8.js": "http://localhost:8080/assets/resources_js_Components_BaseButtons_vue-resources_js_Components_NotificationBar_vue-resources-7e06d8.js",
"assets/resources_js_Components_Admin_Sort_vue-resources_js_Components_SectionTitleLineWithButton_vue.js": "http://localhost:8080/assets/resources_js_Components_Admin_Sort_vue-resources_js_Components_SectionTitleLineWithButton_vue.js",
"assets/resources_js_Components_CardBoxModal_vue.js": "http://localhost:8080/assets/resources_js_Components_CardBoxModal_vue.js",
"assets/resources_js_Components_FileUpload_vue-resources_js_Components_FormCheckRadioGroup_vue-resour-25e686.js": "http://localhost:8080/assets/resources_js_Components_FileUpload_vue-resources_js_Components_FormCheckRadioGroup_vue-resour-25e686.js",
"assets/fonts/inter-latin-ext-400-normal.woff": "http://localhost:8080/assets/fonts/inter-latin-ext-400-normal.1c20f7dc.woff",
"assets/fonts/inter-latin-400-normal.woff": "http://localhost:8080/assets/fonts/inter-latin-400-normal.b0c8fe9d.woff",
"assets/fonts/inter-latin-ext-400-normal.woff2": "http://localhost:8080/assets/fonts/inter-latin-ext-400-normal.3d10c85f.woff2",
"assets/fonts/inter-latin-400-normal.woff2": "http://localhost:8080/assets/fonts/inter-latin-400-normal.9698cc7d.woff2",
"assets/fonts/archivo-black-latin-400-normal.woff2": "http://localhost:8080/assets/fonts/archivo-black-latin-400-normal.fc847a1f.woff2",
"assets/fonts/archivo-black-latin-ext-400-normal.woff2": "http://localhost:8080/assets/fonts/archivo-black-latin-ext-400-normal.21761451.woff2",
"assets/fonts/inter-cyrillic-ext-400-normal.woff": "http://localhost:8080/assets/fonts/inter-cyrillic-ext-400-normal.e8945162.woff",
"assets/fonts/archivo-black-latin-400-normal.woff": "http://localhost:8080/assets/fonts/archivo-black-latin-400-normal.58a301a6.woff",
"assets/fonts/inter-cyrillic-ext-400-normal.woff2": "http://localhost:8080/assets/fonts/inter-cyrillic-ext-400-normal.fd1478dc.woff2",
"assets/fonts/inter-cyrillic-400-normal.woff": "http://localhost:8080/assets/fonts/inter-cyrillic-400-normal.e2841352.woff",
"assets/fonts/inter-greek-400-normal.woff": "http://localhost:8080/assets/fonts/inter-greek-400-normal.a42da273.woff",
"assets/fonts/archivo-black-latin-ext-400-normal.woff": "http://localhost:8080/assets/fonts/archivo-black-latin-ext-400-normal.5ab5ba92.woff",
"assets/fonts/inter-greek-400-normal.woff2": "http://localhost:8080/assets/fonts/inter-greek-400-normal.a8de720a.woff2",
"assets/fonts/inter-cyrillic-400-normal.woff2": "http://localhost:8080/assets/fonts/inter-cyrillic-400-normal.cb04b2ee.woff2",
"assets/fonts/inter-greek-ext-400-normal.woff": "http://localhost:8080/assets/fonts/inter-greek-ext-400-normal.b9e1e894.woff",
"assets/fonts/inter-vietnamese-400-normal.woff": "http://localhost:8080/assets/fonts/inter-vietnamese-400-normal.96f8adc7.woff",
"assets/fonts/inter-greek-ext-400-normal.woff2": "http://localhost:8080/assets/fonts/inter-greek-ext-400-normal.f2fa0d9e.woff2",
"assets/fonts/inter-vietnamese-400-normal.woff2": "http://localhost:8080/assets/fonts/inter-vietnamese-400-normal.44c9df13.woff2",
"assets/images/marker-icon.png": "http://localhost:8080/assets/images/marker-icon.2b3e1faf.png",
"assets/images/layers-2x.png": "http://localhost:8080/assets/images/layers-2x.8f2c4d11.png",
"assets/images/layers.png": "http://localhost:8080/assets/images/layers.416d9136.png",
"assets/images/Close.svg": "http://localhost:8080/assets/images/Close.e4887675.svg",
"assets/vendors-node_modules_vue-facing-decorator_dist_esm_index_js-node_modules_vue-facing-decorator-818045.js": "http://localhost:8080/assets/vendors-node_modules_vue-facing-decorator_dist_esm_index_js-node_modules_vue-facing-decorator-818045.js"
}

File diff suppressed because one or more lines are too long

View file

@ -111,14 +111,7 @@
<!--5 server_date_modified --> <!--5 server_date_modified -->
<xsl:if test="ServerDateModified/@UnixTimestamp != ''"> <xsl:if test="ServerDateModified/@UnixTimestamp != ''">
<xsl:text>"server_date_modified": "</xsl:text> <xsl:text>"server_date_modified": "</xsl:text>
<xsl:value-of select="ServerDateModified/@UnixTimestamp" /> <xsl:value-of select="/ServerDateModified/@UnixTimestamp" />
<xsl:text>",</xsl:text>
</xsl:if>
<!--5 embargo_date -->
<xsl:if test="EmbargoDate/@UnixTimestamp != ''">
<xsl:text>"embargo_date": "</xsl:text>
<xsl:value-of select="EmbargoDate/@UnixTimestamp" />
<xsl:text>",</xsl:text> <xsl:text>",</xsl:text>
</xsl:if> </xsl:if>
@ -207,8 +200,7 @@
<!--17 +18 uncontrolled subject (swd) --> <!--17 +18 uncontrolled subject (swd) -->
<xsl:variable name="subjects"> <xsl:variable name="subjects">
<!-- <xsl:for-each select="Subject[@Type = 'Uncontrolled']"> --> <xsl:for-each select="Subject[@Type = 'Uncontrolled']">
<xsl:for-each select="Subject[@Type = 'Uncontrolled' or @Type = 'Geoera']">
<xsl:text>"</xsl:text> <xsl:text>"</xsl:text>
<xsl:value-of select="fn:escapeQuotes(@Value)"/> <xsl:value-of select="fn:escapeQuotes(@Value)"/>
<xsl:text>"</xsl:text> <xsl:text>"</xsl:text>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 526 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.2 KiB

View file

@ -1,3 +0,0 @@
[ZoneTransfer]
ZoneId=3
HostUrl=https://sea1.geoinformation.dev/favicon-32x32.png

BIN
public/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

BIN
public/favicon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 952 KiB

Some files were not shown because too many files have changed in this diff Show more