Compare commits

..

No commits in common. "master" and "feat/checkReferenceType" have entirely different histories.

141 changed files with 5074 additions and 20122 deletions

View file

@ -13,7 +13,7 @@ jobs:
uses: actions/checkout@v3
- run: echo "The ${{ github.repository }} repository has been cloned to the runner."
- run: echo "The workflow is now ready to test your code on the runner."
- name: List files in the repository
- name: List files in the repository:
run: |
ls ${{ github.workspace }}
- run: echo "This job's status is ${{ job.status }}."

View file

@ -1,63 +1,57 @@
################## First Stage - Creating base #########################
# Created a variable to hold our node base image
ARG NODE_IMAGE=node:22-trixie-slim
ARG NODE_IMAGE=node:22-bookworm-slim
FROM $NODE_IMAGE AS base
# Install dumb-init and ClamAV, and perform ClamAV database update
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
dumb-init \
clamav \
clamav-daemon \
clamdscan \
ca-certificates \
RUN apt update \
&& apt-get install -y dumb-init clamav clamav-daemon nano \
&& rm -rf /var/lib/apt/lists/* \
# Creating folders and changing ownerships
&& mkdir -p /home/node/app \
&& mkdir -p /var/lib/clamav \
&& mkdir -p /home/node/app && chown node:node /home/node/app \
&& mkdir -p /var/lib/clamav \
&& mkdir /usr/local/share/clamav \
&& chown -R node:clamav /var/lib/clamav /usr/local/share/clamav /etc/clamav \
# permissions
&& mkdir /var/run/clamav \
&& mkdir -p /var/log/clamav \
&& mkdir -p /tmp/clamav-logs \
# Set ownership and permissions
&& chown node:node /home/node/app \
# && chown -R node:clamav /var/lib/clamav /usr/local/share/clamav /etc/clamav /var/run/clamav \
&& chown -R node:clamav /var/lib/clamav /usr/local/share/clamav /etc/clamav /var/run/clamav /var/log/clamav \
&& chown -R node:clamav /etc/clamav \
&& chmod 755 /tmp/clamav-logs \
&& chmod 750 /var/run/clamav \
&& chmod 755 /var/lib/clamav \
&& chmod 755 /var/log/clamav \
# Add node user to clamav group and allow sudo for clamav commands
&& usermod -a -G clamav node
# && chmod 666 /var/run/clamav/clamd.socket
# Make directories group-writable so node (as member of clamav group) can access them
# && chmod 750 /var/run/clamav /var/lib/clamav /var/log/clamav /tmp/clamav-logs
&& chown node:clamav /var/run/clamav \
&& chmod 750 /var/run/clamav
# -----------------------------------------------
# --- ClamAV & FeshClam -------------------------
# -----------------------------------------------
# RUN \
# chmod 644 /etc/clamav/freshclam.conf && \
# freshclam && \
# mkdir /var/run/clamav && \
# chown -R clamav:root /var/run/clamav
# # initial update of av databases
# RUN freshclam
# Configure ClamAV - copy config files before switching user
# COPY --chown=node:clamav ./*.conf /etc/clamav/
# Configure Clam AV...
COPY --chown=node:clamav ./*.conf /etc/clamav/
# # permissions
# RUN mkdir /var/run/clamav && \
# chown node:clamav /var/run/clamav && \
# chmod 750 /var/run/clamav
# Setting the working directory
WORKDIR /home/node/app
# Changing the current active user to "node"
# Download initial ClamAV database as root before switching users
USER node
RUN freshclam --quiet || echo "Initial database download failed - will retry at runtime"
# Copy entrypoint script
# initial update of av databases
RUN freshclam
# VOLUME /var/lib/clamav
COPY --chown=node:clamav docker-entrypoint.sh /home/node/app/docker-entrypoint.sh
RUN chmod +x /home/node/app/docker-entrypoint.sh
ENV TZ="Europe/Vienna"
################## Second Stage - Installing dependencies ##########
# In this stage, we will start installing dependencies
FROM base AS dependencies
@ -76,6 +70,7 @@ ENV NODE_ENV=production
# We run "node ace build" to build the app (dist folder) for production
RUN node ace build --ignore-ts-errors
# RUN node ace build --production
# RUN node ace build --ignore-ts-errors
################## Final Stage - Production #########################
@ -93,7 +88,6 @@ RUN npm ci --omit=dev
# Copy files to the working directory from the build folder the user
COPY --chown=node:node --from=build /home/node/app/build .
# Expose port
# EXPOSE 3310
EXPOSE 3333
ENTRYPOINT ["/home/node/app/docker-entrypoint.sh"]
# Run the command to start the server using "dumb-init"

22
LICENSE
View file

@ -1,22 +0,0 @@
MIT License
Copyright (c) 2025 Tethys Research Repository
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE

View file

@ -11,10 +11,9 @@ export default defineConfig({
*/
commands: [
() => import('@adonisjs/core/commands'),
() => import('@adonisjs/lucid/commands'),
() => import('@adonisjs/mail/commands')
],
() => import('@adonisjs/core/commands'),
() => import('@adonisjs/lucid/commands'),
() => import('@adonisjs/mail/commands')],
/*
|--------------------------------------------------------------------------
| Preloads
@ -27,17 +26,15 @@ export default defineConfig({
() => import('./start/routes.js'),
() => import('./start/kernel.js'),
() => import('#start/validator'),
// () => import('#start/rules/unique'),
// () => import('#start/rules/translated_language'),
// () => import('#start/rules/unique_person'),
// // () => import('#start/rules/file_length'),
// // () => import('#start/rules/file_scan'),
// // () => import('#start/rules/allowed_extensions_mimetypes'),
// () => import('#start/rules/dependent_array_min_length'),
// () => import('#start/rules/referenceValidation'),
// () => import('#start/rules/valid_mimetype'),
// () => import('#start/rules/array_contains_types'),
// () => import('#start/rules/orcid'),
() => import('#start/rules/unique'),
() => import('#start/rules/translated_language'),
() => import('#start/rules/unique_person'),
() => import('#start/rules/file_length'),
() => import('#start/rules/file_scan'),
() => import('#start/rules/allowed_extensions_mimetypes'),
() => import('#start/rules/dependent_array_min_length'),
() => import('#start/rules/referenceValidation'),
() => import('#start/rules/valid_mimetype'),
],
/*
|--------------------------------------------------------------------------
@ -72,7 +69,7 @@ export default defineConfig({
() => import('#providers/stardust_provider'),
() => import('#providers/query_builder_provider'),
() => import('#providers/token_worker_provider'),
() => import('#providers/rule_provider'),
// () => import('#providers/validator_provider'),
// () => import('#providers/drive/provider/drive_provider'),
() => import('@adonisjs/drive/drive_provider'),
// () => import('@adonisjs/core/providers/vinejs_provider'),

View file

@ -85,9 +85,7 @@ export default class AdminuserController {
// return response.badRequest(error.messages);
throw error;
}
const input: Record<string, any> = request.only(['login', 'email','first_name', 'last_name']);
input.password = request.input('new_password');
const input = request.only(['login', 'email', 'password', 'first_name', 'last_name']);
const user = await User.create(input);
if (request.input('roles')) {
const roles: Array<number> = request.input('roles');
@ -97,6 +95,7 @@ export default class AdminuserController {
session.flash('message', 'User has been created successfully');
return response.redirect().toRoute('settings.user.index');
}
public async show({ request, inertia }: HttpContext) {
const id = request.param('id');
const user = await User.query().where('id', id).firstOrFail();
@ -140,11 +139,9 @@ export default class AdminuserController {
});
// password is optional
let input: Record<string, any>;
if (request.input('new_password')) {
input = request.only(['login', 'email', 'first_name', 'last_name']);
input.password = request.input('new_password');
let input;
if (request.input('password')) {
input = request.only(['login', 'email', 'password', 'first_name', 'last_name']);
} else {
input = request.only(['login', 'email', 'first_name', 'last_name']);
}
@ -159,6 +156,7 @@ export default class AdminuserController {
session.flash('message', 'User has been updated successfully');
return response.redirect().toRoute('settings.user.index');
}
public async destroy({ request, response, session }: HttpContext) {
const id = request.param('id');
const user = await User.findOrFail(id);

View file

@ -64,7 +64,7 @@ export default class MimetypeController {
'maxLength': '{{ field }} must be less then {{ max }} characters long',
'isUnique': '{{ field }} must be unique, and this value is already taken',
'required': '{{ field }} is required',
'file_extension.array.minLength': 'at least {{ min }} mimetypes must be defined',
'file_extension.minLength': 'at least {{ min }} mimetypes must be defined',
'file_extension.*.string': 'Each file extension must be a valid string', // Adjusted to match the type
};

View file

@ -76,24 +76,23 @@ export default class MailSettingsController {
public async sendTestMail({ response, auth }: HttpContext) {
const user = auth.user!;
const userEmail = user.email;
// let mailManager = await app.container.make('mail.manager');
// let iwas = mailManager.use();
// let iwas = mailManager.use();
// let test = mail.config.mailers.smtp();
if (!userEmail) {
return response.badRequest({ message: 'User email is not set. Please update your profile.' });
}
try {
await mail.send(
(message) => {
message
// .from(Config.get('mail.from.address'))
.from('tethys@geosphere.at')
.to(userEmail)
.subject('Test Email')
.html('<p>If you received this email, the email configuration seems to be correct.</p>');
});
await mail.send((message) => {
message
// .from(Config.get('mail.from.address'))
.from('tethys@geosphere.at')
.to(userEmail)
.subject('Test Email')
.html('<p>If you received this email, the email configuration seems to be correct.</p>');
});
return response.json({ success: true, message: 'Test email sent successfully' });
// return response.flash('Test email sent successfully!', 'message').redirect().back();

View file

@ -4,29 +4,19 @@ import Person from '#models/person';
// node ace make:controller Author
export default class AuthorsController {
public async index({}: HttpContext) {
public async index({}: HttpContext) {
// select * from gba.persons
// where exists (select * from gba.documents inner join gba.link_documents_persons on "documents"."id" = "link_documents_persons"."document_id"
// where ("link_documents_persons"."role" = 'author') and ("persons"."id" = "link_documents_persons"."person_id"));
const authors = await Person.query()
.select([
'id',
'academic_title',
'first_name',
'last_name',
'identifier_orcid',
'status',
'name_type',
'created_at'
// Note: 'email' is omitted
])
.preload('datasets')
.where('name_type', 'Personal')
.whereHas('datasets', (dQuery) => {
dQuery.wherePivot('role', 'author');
})
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.orderBy('datasets_count', 'desc');
.where('name_type', 'Personal')
.whereHas('datasets', (dQuery) => {
dQuery.wherePivot('role', 'author');
})
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.orderBy('datasets_count', 'desc');
return authors;
}
@ -37,10 +27,7 @@ export default class AuthorsController {
if (request.input('filter')) {
// users = users.whereRaw('name like %?%', [request.input('search')])
const searchTerm = request.input('filter');
authors.andWhere((query) => {
query.whereILike('first_name', `%${searchTerm}%`)
.orWhereILike('last_name', `%${searchTerm}%`);
});
authors.whereILike('first_name', `%${searchTerm}%`).orWhereILike('last_name', `%${searchTerm}%`);
// .orWhere('email', 'like', `%${searchTerm}%`);
}

View file

@ -2,46 +2,26 @@ import type { HttpContext } from '@adonisjs/core/http';
import { StatusCodes } from 'http-status-codes';
import redis from '@adonisjs/redis/services/main';
const PREFIXES = ['von', 'van', 'de', 'del', 'della', 'di', 'da', 'dos', 'du', 'le', 'la'];
const PREFIXES = ['von', 'van'];
const DEFAULT_SIZE = 50;
const MIN_SIZE = 16;
const MAX_SIZE = 512;
const FONT_SIZE_RATIO = 0.4;
const COLOR_LIGHTENING_PERCENT = 60;
const COLOR_DARKENING_FACTOR = 0.6;
const CACHE_TTL = 24 * 60 * 60; // 24 hours instead of 1 hour
export default class AvatarController {
public async generateAvatar({ request, response }: HttpContext) {
try {
const { name, size = DEFAULT_SIZE } = request.only(['name', 'size']);
// Enhanced validation
if (!name || typeof name !== 'string' || name.trim().length === 0) {
return response.status(StatusCodes.BAD_REQUEST).json({
error: 'Name is required and must be a non-empty string',
});
}
const parsedSize = this.validateSize(size);
if (!parsedSize.isValid) {
return response.status(StatusCodes.BAD_REQUEST).json({
error: parsedSize.error,
});
if (!name) {
return response.status(StatusCodes.BAD_REQUEST).json({ error: 'Name is required' });
}
// Build a unique cache key for the given name and size
const cacheKey = `avatar:${this.sanitizeName(name)}-${parsedSize.value}`;
// const cacheKey = `avatar:${name.trim().toLowerCase()}-${size}`;
try {
const cachedSvg = await redis.get(cacheKey);
if (cachedSvg) {
this.setResponseHeaders(response);
return response.send(cachedSvg);
}
} catch (redisError) {
// Log redis error but continue without cache
console.warn('Redis cache read failed:', redisError);
const cacheKey = `avatar:${name.trim().toLowerCase()}-${size}`;
const cachedSvg = await redis.get(cacheKey);
if (cachedSvg) {
this.setResponseHeaders(response);
return response.send(cachedSvg);
}
const initials = this.getInitials(name);
@ -49,85 +29,41 @@ export default class AvatarController {
const svgContent = this.createSvg(size, colors, initials);
// // Cache the generated avatar for future use, e.g. 1 hour expiry
try {
await redis.setex(cacheKey, CACHE_TTL, svgContent);
} catch (redisError) {
// Log but don't fail the request
console.warn('Redis cache write failed:', redisError);
}
await redis.setex(cacheKey, 3600, svgContent);
this.setResponseHeaders(response);
return response.send(svgContent);
} catch (error) {
console.error('Avatar generation error:', error);
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
error: 'Failed to generate avatar',
});
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({ error: error.message });
}
}
private validateSize(size: any): { isValid: boolean; value?: number; error?: string } {
const numSize = Number(size);
if (isNaN(numSize)) {
return { isValid: false, error: 'Size must be a valid number' };
}
if (numSize < MIN_SIZE || numSize > MAX_SIZE) {
return {
isValid: false,
error: `Size must be between ${MIN_SIZE} and ${MAX_SIZE}`,
};
}
return { isValid: true, value: Math.floor(numSize) };
}
private sanitizeName(name: string): string {
return name
.trim()
.toLowerCase()
.replace(/[^a-z0-9\s-]/gi, '');
}
private getInitials(name: string): string {
const sanitized = name.trim().replace(/\s+/g, ' '); // normalize whitespace
const parts = sanitized
const parts = name
.trim()
.split(' ')
.filter((part) => part.length > 0)
.map((part) => part.trim());
.filter((part) => part.length > 0);
if (parts.length === 0) {
return 'NA';
}
if (parts.length === 1) {
// For single word, take first 2 characters or first char if only 1 char
return parts[0].substring(0, Math.min(2, parts[0].length)).toUpperCase();
if (parts.length >= 2) {
return this.getMultiWordInitials(parts);
}
return this.getMultiWordInitials(parts);
return parts[0].substring(0, 2).toUpperCase();
}
private getMultiWordInitials(parts: string[]): string {
// Filter out prefixes and short words
const significantParts = parts.filter((part) => !PREFIXES.includes(part.toLowerCase()) && part.length > 1);
const firstName = parts[0];
const lastName = parts[parts.length - 1];
const firstInitial = firstName.charAt(0).toUpperCase();
const lastInitial = lastName.charAt(0).toUpperCase();
if (significantParts.length === 0) {
// Fallback to first and last regardless of prefixes
const firstName = parts[0];
const lastName = parts[parts.length - 1];
return (firstName.charAt(0) + lastName.charAt(0)).toUpperCase();
if (PREFIXES.includes(lastName.toLowerCase()) && lastName === lastName.toUpperCase()) {
return firstInitial + lastName.charAt(1).toUpperCase();
}
if (significantParts.length === 1) {
return significantParts[0].substring(0, 2).toUpperCase();
}
// Take first and last significant parts
const firstName = significantParts[0];
const lastName = significantParts[significantParts.length - 1];
return (firstName.charAt(0) + lastName.charAt(0)).toUpperCase();
return firstInitial + lastInitial;
}
private generateColors(name: string): { background: string; text: string } {
@ -139,44 +75,31 @@ export default class AvatarController {
}
private createSvg(size: number, colors: { background: string; text: string }, initials: string): string {
const fontSize = Math.max(12, Math.floor(size * FONT_SIZE_RATIO)); // Ensure readable font size
// Escape any potential HTML/XML characters in initials
const escapedInitials = this.escapeXml(initials);
return `<svg width="${size}" height="${size}" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 ${size} ${size}">
<rect width="100%" height="100%" fill="#${colors.background}" rx="${size * 0.1}"/>
<text x="50%" y="50%" dominant-baseline="central" text-anchor="middle"
font-weight="600" font-family="-apple-system, BlinkMacSystemFont, 'Segoe UI', system-ui, sans-serif"
font-size="${fontSize}" fill="#${colors.text}">${escapedInitials}</text>
</svg>`;
}
private escapeXml(text: string): string {
return text.replace(/&/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;').replace(/"/g, '&quot;').replace(/'/g, '&apos;');
const fontSize = size * FONT_SIZE_RATIO;
return `
<svg width="${size}" height="${size}" xmlns="http://www.w3.org/2000/svg">
<rect width="100%" height="100%" fill="#${colors.background}"/>
<text x="50%" y="50%" dominant-baseline="middle" text-anchor="middle" font-weight="bold" font-family="Arial, sans-serif" font-size="${fontSize}" fill="#${colors.text}">${initials}</text>
</svg>
`;
}
private setResponseHeaders(response: HttpContext['response']): void {
response.header('Content-Type', 'image/svg+xml');
response.header('Cache-Control', 'public, max-age=86400'); // Cache for 1 day
response.header('ETag', `"${Date.now()}"`); // Simple ETag
response.header('Content-type', 'image/svg+xml');
response.header('Cache-Control', 'no-cache');
response.header('Pragma', 'no-cache');
response.header('Expires', '0');
}
private getColorFromName(name: string): string {
let hash = 0;
const normalizedName = name.toLowerCase().trim();
for (let i = 0; i < normalizedName.length; i++) {
hash = normalizedName.charCodeAt(i) + ((hash << 5) - hash);
hash = hash & hash; // Convert to 32-bit integer
for (let i = 0; i < name.length; i++) {
hash = name.charCodeAt(i) + ((hash << 5) - hash);
}
// Ensure we get vibrant colors by constraining the color space
const colorParts = [];
for (let i = 0; i < 3; i++) {
let value = (hash >> (i * 8)) & 0xff;
// Ensure minimum color intensity for better contrast
value = Math.max(50, value);
const value = (hash >> (i * 8)) & 0xff;
colorParts.push(value.toString(16).padStart(2, '0'));
}
return colorParts.join('');
@ -187,7 +110,7 @@ export default class AvatarController {
const g = parseInt(hexColor.substring(2, 4), 16);
const b = parseInt(hexColor.substring(4, 6), 16);
const lightenValue = (value: number) => Math.min(255, Math.floor(value + (255 - value) * (percent / 100)));
const lightenValue = (value: number) => Math.min(255, Math.floor((value * (100 + percent)) / 100));
const newR = lightenValue(r);
const newG = lightenValue(g);
@ -201,7 +124,7 @@ export default class AvatarController {
const g = parseInt(hexColor.slice(2, 4), 16);
const b = parseInt(hexColor.slice(4, 6), 16);
const darkenValue = (value: number) => Math.max(0, Math.floor(value * COLOR_DARKENING_FACTOR));
const darkenValue = (value: number) => Math.round(value * COLOR_DARKENING_FACTOR);
const darkerR = darkenValue(r);
const darkerG = darkenValue(g);

View file

@ -1,36 +1,24 @@
import type { HttpContext } from '@adonisjs/core/http';
// import Person from 'App/Models/Person';
import Dataset from '#models/dataset';
import { StatusCodes } from 'http-status-codes';
import DatasetReference from '#models/dataset_reference';
// node ace make:controller Author
export default class DatasetController {
/**
* GET /api/datasets
* Find all published datasets
*/
public async index({ response }: HttpContext) {
try {
const datasets = await Dataset.query()
.where(function (query) {
query.where('server_state', 'published').orWhere('server_state', 'deleted');
})
.preload('titles')
.preload('identifier')
.orderBy('server_date_published', 'desc');
public async index({}: HttpContext) {
// Select datasets with server_state 'published' or 'deleted' and sort by the last published date
const datasets = await Dataset.query()
.where(function (query) {
query.where('server_state', 'published')
.orWhere('server_state', 'deleted');
})
.preload('titles')
.preload('identifier')
.orderBy('server_date_published', 'desc');
return response.status(StatusCodes.OK).json(datasets);
} catch (error) {
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
message: error.message || 'Some error occurred while retrieving datasets.',
});
}
return datasets;
}
/**
* GET /api/dataset
* Find all published datasets
*/
public async findAll({ response }: HttpContext) {
try {
const datasets = await Dataset.query()
@ -46,279 +34,34 @@ export default class DatasetController {
}
}
/**
* GET /api/dataset/:publish_id
* Find one dataset by publish_id
*/
public async findOne({ response, params }: HttpContext) {
try {
const dataset = await Dataset.query()
.where('publish_id', params.publish_id)
.preload('titles')
.preload('descriptions') // Using 'descriptions' instead of 'abstracts'
.preload('user', (builder) => {
builder.select(['id', 'firstName', 'lastName', 'avatar', 'login']);
})
.preload('authors', (builder) => {
builder
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.pivotColumns(['role', 'sort_order'])
.orderBy('pivot_sort_order', 'asc');
})
.preload('contributors', (builder) => {
builder
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.pivotColumns(['role', 'sort_order', 'contributor_type'])
.orderBy('pivot_sort_order', 'asc');
})
.preload('subjects')
.preload('coverage')
.preload('licenses')
.preload('references')
.preload('project')
// .preload('referenced_by', (builder) => {
// builder.preload('dataset', (builder) => {
// builder.preload('identifier');
// });
// })
.preload('files', (builder) => {
builder.preload('hashvalues');
})
.preload('identifier')
.first(); // Use first() instead of firstOrFail() to handle not found gracefully
if (!dataset) {
return response.status(StatusCodes.NOT_FOUND).json({
message: `Cannot find Dataset with publish_id=${params.publish_id}.`,
public async findOne({ params }: HttpContext) {
const datasets = await Dataset.query()
.where('publish_id', params.publish_id)
.preload('titles')
.preload('descriptions')
.preload('user')
.preload('authors', (builder) => {
builder.orderBy('pivot_sort_order', 'asc');
})
.preload('contributors', (builder) => {
builder.orderBy('pivot_sort_order', 'asc');
})
.preload('subjects')
.preload('coverage')
.preload('licenses')
.preload('references')
.preload('project')
.preload('referenced_by', (builder) => {
builder.preload('dataset', (builder) => {
builder.preload('identifier');
});
}
})
.preload('files', (builder) => {
builder.preload('hashvalues');
})
.preload('identifier')
.firstOrFail();
// Build the version chain
const versionChain = await this.buildVersionChain(dataset);
// Add version chain to response
const responseData = {
...dataset.toJSON(),
versionChain: versionChain,
};
// return response.status(StatusCodes.OK).json(dataset);
return response.status(StatusCodes.OK).json(responseData);
} catch (error) {
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
message: error.message || `Error retrieving Dataset with publish_id=${params.publish_id}.`,
});
}
}
/**
* GET /:prefix/:value
* Find dataset by identifier (e.g., https://doi.tethys.at/10.24341/tethys.99.2)
*/
public async findByIdentifier({ response, params }: HttpContext) {
const identifierValue = `${params.prefix}/${params.value}`;
// Optional: Validate DOI format
if (!identifierValue.match(/^10\.\d+\/[a-zA-Z0-9._-]+\.[0-9]+(?:\.[0-9]+)*$/)) {
return response.status(StatusCodes.BAD_REQUEST).json({
message: `Invalid DOI format: ${identifierValue}`,
});
}
try {
// Method 1: Using subquery with whereIn (most similar to your original)
const dataset = await Dataset.query()
// .whereIn('id', (subQuery) => {
// subQuery.select('dataset_id').from('dataset_identifiers').where('value', identifierValue);
// })
.whereHas('identifier', (builder) => {
builder.where('value', identifierValue);
})
.preload('titles')
.preload('descriptions') // Using 'descriptions' instead of 'abstracts'
.preload('user', (builder) => {
builder.select(['id', 'firstName', 'lastName', 'avatar', 'login']);
})
.preload('authors', (builder) => {
builder
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.pivotColumns(['role', 'sort_order'])
.wherePivot('role', 'author')
.orderBy('pivot_sort_order', 'asc');
})
.preload('contributors', (builder) => {
builder
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.pivotColumns(['role', 'sort_order', 'contributor_type'])
.wherePivot('role', 'contributor')
.orderBy('pivot_sort_order', 'asc');
})
.preload('subjects')
.preload('coverage')
.preload('licenses')
.preload('references')
.preload('project')
// .preload('referenced_by', (builder) => {
// builder.preload('dataset', (builder) => {
// builder.preload('identifier');
// });
// })
.preload('files', (builder) => {
builder.preload('hashvalues');
})
.preload('identifier')
.first();
if (!dataset) {
return response.status(StatusCodes.NOT_FOUND).json({
message: `Cannot find Dataset with identifier=${identifierValue}.`,
});
}
// Build the version chain
const versionChain = await this.buildVersionChain(dataset);
// Add version chain to response
const responseData = {
...dataset.toJSON(),
versionChain: versionChain,
};
// return response.status(StatusCodes.OK).json(dataset);
return response.status(StatusCodes.OK).json(responseData);
} catch (error) {
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
message: error.message || `Error retrieving Dataset with identifier=${identifierValue}.`,
});
}
}
/**
* Build the complete version chain for a dataset
* Traverses both backwards (previous versions) and forwards (newer versions)
*/
private async buildVersionChain(dataset: Dataset) {
const versionChain = {
current: {
id: dataset.id,
publish_id: dataset.publish_id,
doi: dataset.identifier?.value || null,
main_title: dataset.mainTitle || null,
server_date_published: dataset.server_date_published,
},
previousVersions: [] as any[],
newerVersions: [] as any[],
};
// Get all previous versions (going backwards in time)
versionChain.previousVersions = await this.getPreviousVersions(dataset.id);
// Get all newer versions (going forwards in time)
versionChain.newerVersions = await this.getNewerVersions(dataset.id);
return versionChain;
}
/**
* Recursively get all previous versions
*/
private async getPreviousVersions(datasetId: number, visited: Set<number> = new Set()): Promise<any[]> {
// Prevent infinite loops
if (visited.has(datasetId)) {
return [];
}
visited.add(datasetId);
const previousVersions: any[] = [];
// Find references where this dataset "IsNewVersionOf" another dataset
const previousRefs = await DatasetReference.query()
.where('document_id', datasetId)
.where('relation', 'IsNewVersionOf')
.whereNotNull('related_document_id');
for (const ref of previousRefs) {
if (!ref.related_document_id) continue;
const previousDataset = await Dataset.query()
.where('id', ref.related_document_id)
.preload('identifier')
.preload('titles')
.first();
if (previousDataset) {
const versionInfo = {
id: previousDataset.id,
publish_id: previousDataset.publish_id,
doi: previousDataset.identifier?.value || null,
main_title: previousDataset.mainTitle || null,
server_date_published: previousDataset.server_date_published,
relation: 'IsPreviousVersionOf', // From perspective of current dataset
};
previousVersions.push(versionInfo);
// Recursively get even older versions
const olderVersions = await this.getPreviousVersions(previousDataset.id, visited);
previousVersions.push(...olderVersions);
}
}
return previousVersions;
}
/**
* Recursively get all newer versions
*/
private async getNewerVersions(datasetId: number, visited: Set<number> = new Set()): Promise<any[]> {
// Prevent infinite loops
if (visited.has(datasetId)) {
return [];
}
visited.add(datasetId);
const newerVersions: any[] = [];
// Find references where this dataset "IsPreviousVersionOf" another dataset
const newerRefs = await DatasetReference.query()
.where('document_id', datasetId)
.where('relation', 'IsPreviousVersionOf')
.whereNotNull('related_document_id');
for (const ref of newerRefs) {
if (!ref.related_document_id) continue;
const newerDataset = await Dataset.query().where('id', ref.related_document_id).preload('identifier').preload('titles').first();
if (newerDataset) {
const versionInfo = {
id: newerDataset.id,
publish_id: newerDataset.publish_id,
doi: newerDataset.identifier?.value || null,
main_title: newerDataset.mainTitle || null,
server_date_published: newerDataset.server_date_published,
relation: 'IsNewVersionOf', // From perspective of current dataset
};
newerVersions.push(versionInfo);
// Recursively get even newer versions
const evenNewerVersions = await this.getNewerVersions(newerDataset.id, visited);
newerVersions.push(...evenNewerVersions);
}
}
return newerVersions;
return datasets;
}
}

View file

@ -2,103 +2,53 @@ import type { HttpContext } from '@adonisjs/core/http';
import File from '#models/file';
import { StatusCodes } from 'http-status-codes';
import * as fs from 'fs';
import { DateTime } from 'luxon';
import * as path from 'path';
// node ace make:controller Author
export default class FileController {
// @Get("download/:id")
public async findOne({ response, params }: HttpContext) {
const id = params.id;
// const file = await File.findOrFail(id);
// Load file with its related dataset to check embargo
const file = await File.query()
.where('id', id)
.preload('dataset') // or 'dataset' - whatever your relationship is named
.firstOrFail();
const file = await File.findOrFail(id);
// const file = await File.findOne({
// where: { id: id },
// });
if (file) {
const filePath = '/storage/app/data/' + file.pathName;
const ext = path.extname(filePath);
const fileName = file.label + ext;
try {
fs.accessSync(filePath, fs.constants.R_OK); //| fs.constants.W_OK);
// console.log("can read/write:", path);
response
.header('Cache-Control', 'no-cache private')
.header('Content-Description', 'File Transfer')
.header('Content-Type', file.mimeType)
.header('Content-Disposition', 'inline; filename=' + fileName)
.header('Content-Transfer-Encoding', 'binary')
.header('Access-Control-Allow-Origin', '*')
.header('Access-Control-Allow-Methods', 'GET,POST');
response.status(StatusCodes.OK).download(filePath);
} catch (err) {
// console.log("no access:", path);
response.status(StatusCodes.NOT_FOUND).send({
message: `File with id ${id} doesn't exist on file server`,
});
}
if (!file) {
return response.status(StatusCodes.NOT_FOUND).send({
// res.status(StatusCodes.OK).sendFile(filePath, (err) => {
// // res.setHeader("Content-Type", "application/json");
// // res.removeHeader("Content-Disposition");
// res.status(StatusCodes.NOT_FOUND).send({
// message: `File with id ${id} doesn't exist on file server`,
// });
// });
} else {
response.status(StatusCodes.NOT_FOUND).send({
message: `Cannot find File with id=${id}.`,
});
}
const dataset = file.dataset;
// Files from unpublished datasets are now blocked
if (dataset.server_state !== 'published') {
return response.status(StatusCodes.FORBIDDEN).send({
message: `File access denied: Dataset is not published.`,
});
}
if (dataset && this.isUnderEmbargo(dataset.embargo_date)) {
return response.status(StatusCodes.FORBIDDEN).send({
message: `File is under embargo until ${dataset.embargo_date?.toFormat('yyyy-MM-dd')}`,
});
}
// Proceed with file download
const filePath = '/storage/app/data/' + file.pathName;
const fileExt = file.filePath.split('.').pop() || '';
// const fileName = file.label + fileExt;
const fileName = file.label.toLowerCase().endsWith(`.${fileExt.toLowerCase()}`) ? file.label : `${file.label}.${fileExt}`;
// Determine if file can be previewed inline in browser
const canPreviewInline = (mimeType: string): boolean => {
const type = mimeType.toLowerCase();
return (
type === 'application/pdf' ||
type.startsWith('image/') ||
type.startsWith('text/') ||
type === 'application/json' ||
type === 'application/xml' ||
// Uncomment if you want video/audio inline
type.startsWith('video/') ||
type.startsWith('audio/')
);
};
const disposition = canPreviewInline(file.mimeType) ? 'inline' : 'attachment';
try {
fs.accessSync(filePath, fs.constants.R_OK); //| fs.constants.W_OK);
// console.log("can read/write:", filePath);
response
.header('Cache-Control', 'no-cache private')
.header('Content-Description', 'File Transfer')
.header('Content-Type', file.mimeType)
.header('Content-Disposition', `${disposition}; filename="${fileName}"`)
.header('Content-Transfer-Encoding', 'binary')
.header('Access-Control-Allow-Origin', '*')
.header('Access-Control-Allow-Methods', 'GET');
response.status(StatusCodes.OK).download(filePath);
} catch (err) {
// console.log("no access:", path);
response.status(StatusCodes.NOT_FOUND).send({
message: `File with id ${id} doesn't exist on file server`,
});
}
}
/**
* Check if the dataset is under embargo
* Compares only dates (ignoring time) for embargo check
* @param embargoDate - The embargo date from dataset
* @returns true if under embargo, false if embargo has passed or no embargo set
*/
private isUnderEmbargo(embargoDate: DateTime | null): boolean {
// No embargo date set - allow download
if (!embargoDate) {
return false;
}
// Get current date at start of day (00:00:00)
const today = DateTime.now().startOf('day');
// Get embargo date at start of day (00:00:00)
const embargoDateOnly = embargoDate.startOf('day');
// File is under embargo if embargo date is after today
// This means the embargo lifts at the start of the embargo date
return embargoDateOnly >= today;
}
}

View file

@ -17,8 +17,7 @@ export default class HomeController {
// .preload('authors')
// .orderBy('server_date_published');
const datasets = await db
.from('documents as doc')
const datasets = await db.from('documents as doc')
.select(['publish_id', 'server_date_published', db.raw(`date_part('year', server_date_published) as pub_year`)])
.where('server_state', serverState)
.innerJoin('link_documents_persons as ba', 'doc.id', 'ba.document_id')
@ -60,6 +59,7 @@ export default class HomeController {
// const year = params.year;
// const from = parseInt(year);
try {
// const datasets = await Database.from('documents as doc')
// .select([Database.raw(`date_part('month', server_date_published) as pub_month`), Database.raw('COUNT(*) as count')])
// .where('server_state', serverState)
@ -68,12 +68,9 @@ export default class HomeController {
// .groupBy('pub_month');
// // .orderBy('server_date_published');
// Calculate the last 4 years including the current year
const currentYear = new Date().getFullYear();
const years = Array.from({ length: 4 }, (_, i) => currentYear - (i + 1)).reverse();
const years = [2021, 2022, 2023]; // Add the second year
const result = await db
.from('documents as doc')
const result = await db.from('documents as doc')
.select([
db.raw(`date_part('year', server_date_published) as pub_year`),
db.raw(`date_part('month', server_date_published) as pub_month`),
@ -86,7 +83,7 @@ export default class HomeController {
.groupBy('pub_year', 'pub_month')
.orderBy('pub_year', 'asc')
.orderBy('pub_month', 'asc');
const labels = Array.from({ length: 12 }, (_, i) => i + 1); // Assuming 12 months
const inputDatasets: Map<string, ChartDataset> = result.reduce((acc, item) => {
@ -103,15 +100,15 @@ export default class HomeController {
acc[pub_year].data[pub_month - 1] = parseInt(count);
return acc;
return acc ;
}, {});
const outputDatasets = Object.entries(inputDatasets).map(([year, data]) => ({
data: data.data,
label: year,
borderColor: data.borderColor,
fill: data.fill,
}));
fill: data.fill
}));
const data = {
labels: labels,
@ -129,11 +126,11 @@ export default class HomeController {
private getRandomHexColor() {
const letters = '0123456789ABCDEF';
let color = '#';
for (let i = 0; i < 6; i++) {
color += letters[Math.floor(Math.random() * 16)];
}
return color;
}
}
@ -142,4 +139,5 @@ interface ChartDataset {
label: string;
borderColor: string;
fill: boolean;
}

View file

@ -5,7 +5,7 @@ import BackupCode from '#models/backup_code';
// import InvalidCredentialException from 'App/Exceptions/InvalidCredentialException';
import { authValidator } from '#validators/auth';
import hash from '@adonisjs/core/services/hash';
import db from '@adonisjs/lucid/services/db';
import TwoFactorAuthProvider from '#app/services/TwoFactorAuthProvider';
// import { Authenticator } from '@adonisjs/auth';
// import { LoginState } from 'Contracts/enums';
@ -29,10 +29,6 @@ export default class AuthController {
const { email, password } = request.only(['email', 'password']);
try {
await db.connection().rawQuery('SELECT 1')
// // attempt to verify credential and login user
// await auth.use('web').attempt(email, plainPassword);
@ -55,9 +51,6 @@ export default class AuthController {
await auth.use('web').login(user);
} catch (error) {
if (error.code === 'ECONNREFUSED') {
throw error
}
// if login fails, return vague form message and redirect back
session.flash('message', 'Your username, email, or password is incorrect');
return response.redirect().back();

View file

@ -3,7 +3,7 @@ import { Client } from '@opensearch-project/opensearch';
import User from '#models/user';
import Dataset from '#models/dataset';
import DatasetIdentifier from '#models/dataset_identifier';
import DatasetXmlSerializer from '#app/Library/DatasetXmlSerializer';
import XmlModel from '#app/Library/XmlModel';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import { create } from 'xmlbuilder2';
import { readFileSync } from 'fs';
@ -18,33 +18,9 @@ import { HttpException } from 'node-exceptions';
import { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model';
import vine, { SimpleMessagesProvider } from '@vinejs/vine';
import mail from '@adonisjs/mail/services/main';
// import { resolveMx } from 'dns/promises';
// import * as net from 'net';
import { validate } from 'deep-email-validator';
import {
TitleTypes,
DescriptionTypes,
ContributorTypes,
PersonNameTypes,
ReferenceIdentifierTypes,
RelationTypes,
SubjectTypes,
DatasetTypes,
} from '#contracts/enums';
import { TransactionClientContract } from '@adonisjs/lucid/types/database';
import db from '@adonisjs/lucid/services/db';
import Project from '#models/project';
import License from '#models/license';
import Language from '#models/language';
import File from '#models/file';
import Coverage from '#models/coverage';
import Title from '#models/title';
import Description from '#models/description';
import Subject from '#models/subject';
import DatasetReference from '#models/dataset_reference';
import Collection from '#models/collection';
import CollectionRole from '#models/collection_role';
import { updateEditorDatasetValidator } from '#validators/dataset';
import { savePersons } from '#app/utils/utility-functions';
// Create a new instance of the client
const client = new Client({ node: 'http://localhost:9200' }); // replace with your OpenSearch endpoint
@ -87,15 +63,8 @@ export default class DatasetsController {
}
datasets.orderBy(attribute, sortOrder);
} else {
// datasets.orderBy('id', 'asc');
// Custom ordering to prioritize rejected_editor state
datasets.orderByRaw(`
CASE
WHEN server_state = 'rejected_reviewer' THEN 0
ELSE 1
END ASC,
id ASC
`);
// users.orderBy('created_at', 'desc');
datasets.orderBy('id', 'asc');
}
// const users = await User.query().orderBy('login').paginate(page, limit);
@ -188,16 +157,10 @@ export default class DatasetsController {
}
}
public async approve({ request, inertia, response, auth }: HttpContext) {
public async approve({ request, inertia, response }: HttpContext) {
const id = request.param('id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
// $dataset = Dataset::with('user:id,login')->findOrFail($id);
const dataset = await Dataset.query().where('id', id).where('editor_id', user.id).firstOrFail();
const dataset = await Dataset.findOrFail(id);
const validStates = ['editor_accepted', 'rejected_reviewer'];
if (!validStates.includes(dataset.server_state)) {
@ -223,7 +186,7 @@ export default class DatasetsController {
});
}
public async approveUpdate({ request, response, auth }: HttpContext) {
public async approveUpdate({ request, response }: HttpContext) {
const approveDatasetSchema = vine.object({
reviewer_id: vine.number(),
});
@ -236,11 +199,7 @@ export default class DatasetsController {
throw error;
}
const id = request.param('id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const dataset = await Dataset.query().where('id', id).where('editor_id', user.id).firstOrFail();
const dataset = await Dataset.findOrFail(id);
const validStates = ['editor_accepted', 'rejected_reviewer'];
if (!validStates.includes(dataset.server_state)) {
@ -258,9 +217,6 @@ export default class DatasetsController {
if (dataset.reject_reviewer_note != null) {
dataset.reject_reviewer_note = null;
}
if (dataset.reject_editor_note != null) {
dataset.reject_editor_note = null;
}
//save main and additional titles
const reviewer_id = request.input('reviewer_id', null);
@ -271,15 +227,10 @@ export default class DatasetsController {
}
}
public async reject({ request, inertia, response, auth }: HttpContext) {
public async reject({ request, inertia, response }: HttpContext) {
const id = request.param('id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const dataset = await Dataset.query()
.where('id', id)
.where('editor_id', user.id) // Ensure the user is the editor of the dataset
// .preload('titles')
// .preload('descriptions')
.preload('user', (builder) => {
@ -304,17 +255,77 @@ export default class DatasetsController {
});
}
// private async checkEmailDomain(email: string): Promise<boolean> {
// const domain = email.split('@')[1];
// try {
// // Step 1: Check MX records for the domain
// const mxRecords = await resolveMx(domain);
// if (mxRecords.length === 0) {
// return false; // No MX records, can't send email
// }
// // Sort MX records by priority
// mxRecords.sort((a, b) => a.priority - b.priority);
// // Step 2: Attempt SMTP connection to the first available mail server
// const smtpServer = mxRecords[0].exchange;
// return await this.checkMailboxExists(smtpServer, email);
// } catch (error) {
// console.error('Error during MX lookup or SMTP validation:', error);
// return false;
// }
// }
//// Helper function to check if the mailbox exists using SMTP
// private async checkMailboxExists(smtpServer: string, email: string): Promise<boolean> {
// return new Promise((resolve, reject) => {
// const socket = net.createConnection(25, smtpServer);
// socket.on('connect', () => {
// socket.write(`HELO ${smtpServer}\r\n`);
// socket.write(`MAIL FROM: <test@example.com>\r\n`);
// socket.write(`RCPT TO: <${email}>\r\n`);
// });
// socket.on('data', (data) => {
// const response = data.toString();
// if (response.includes('250')) {
// // 250 is an SMTP success code
// socket.end();
// resolve(true); // Email exists
// } else if (response.includes('550')) {
// // 550 means the mailbox doesn't exist
// socket.end();
// resolve(false); // Email doesn't exist
// }
// });
// socket.on('error', (error) => {
// console.error('SMTP connection error:', error);
// socket.end();
// resolve(false);
// });
// socket.on('end', () => {
// // SMTP connection closed
// });
// socket.setTimeout(5000, () => {
// // Timeout after 5 seconds
// socket.end();
// resolve(false); // Assume email doesn't exist if no response
// });
// });
// }
public async rejectUpdate({ request, response, auth }: HttpContext) {
const authUser = auth.user!;
if (!authUser) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const id = request.param('id');
const dataset = await Dataset.query()
.where('id', id)
.where('editor_id', authUser.id) // Ensure the user is the editor of the dataset
.preload('user', (builder) => {
builder.select('id', 'login', 'email');
})
@ -342,7 +353,7 @@ export default class DatasetsController {
return response
.flash(
`Invalid server state. Dataset with id ${id} cannot be rejected. Datset has server state ${dataset.server_state}.`,
'warning',
'warning'
)
.redirect()
.toRoute('editor.dataset.list');
@ -377,9 +388,7 @@ export default class DatasetsController {
emailStatusMessage = ` A rejection email was successfully sent to ${dataset.user.email}.`;
} catch (error) {
logger.error(error);
return response
.flash('Dataset has not been rejected due to an email error: ' + error.message, 'error')
.toRoute('editor.dataset.list');
return response.flash('Dataset has not been rejected due to an email error: ' + error.message, 'error').toRoute('editor.dataset.list');
}
} else {
emailStatusMessage = ` However, the email could not be sent because the submitter's email address (${dataset.user.email}) is not valid.`;
@ -395,16 +404,11 @@ export default class DatasetsController {
.toRoute('editor.dataset.list');
}
public async publish({ request, inertia, response, auth }: HttpContext) {
public async publish({ request, inertia, response }: HttpContext) {
const id = request.param('id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const dataset = await Dataset.query()
.where('id', id)
.where('editor_id', user.id) // Ensure the user is the editor of the dataset
.preload('titles')
.preload('authors')
// .preload('persons', (builder) => {
@ -426,14 +430,10 @@ export default class DatasetsController {
return inertia.render('Editor/Dataset/Publish', {
dataset,
can: {
reject: await auth.user?.can(['dataset-editor-reject']),
publish: await auth.user?.can(['dataset-publish']),
},
});
}
public async publishUpdate({ request, response, auth }: HttpContext) {
public async publishUpdate({ request, response }: HttpContext) {
const publishDatasetSchema = vine.object({
publisher_name: vine.string().trim(),
});
@ -445,12 +445,7 @@ export default class DatasetsController {
throw error;
}
const id = request.param('id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const dataset = await Dataset.query().where('id', id).where('editor_id', user.id).firstOrFail();
const dataset = await Dataset.findOrFail(id);
// let test = await Dataset.getMax('publish_id');
// const maxPublishId = await Database.from('documents').max('publish_id as max_publish_id').first();
@ -476,139 +471,10 @@ export default class DatasetsController {
}
}
public async rejectToReviewer({ request, inertia, response, auth }: HttpContext) {
const id = request.param('id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const dataset = await Dataset.query()
.where('id', id)
.where('editor_id', user.id) // Ensure the user is the editor of the dataset
.preload('reviewer', (builder) => {
builder.select('id', 'login', 'email');
})
.firstOrFail();
const validStates = ['reviewed'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be rejected to the reviewer. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.toRoute('editor.dataset.list');
}
return inertia.render('Editor/Dataset/RejectToReviewer', {
dataset,
});
}
public async rejectToReviewerUpdate({ request, response, auth }: HttpContext) {
const authUser = auth.user!;
if (!authUser) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
public async doiCreate({ request, inertia }: HttpContext) {
const id = request.param('id');
const dataset = await Dataset.query()
.where('id', id)
.where('editor_id', authUser.id) // Ensure the user is the editor of the dataset
.preload('reviewer', (builder) => {
builder.select('id', 'login', 'email');
})
.firstOrFail();
const newSchema = vine.object({
server_state: vine.string().trim(),
reject_editor_note: vine.string().trim().minLength(10).maxLength(500),
send_mail: vine.boolean().optional(),
});
try {
// await request.validate({ schema: newSchema });
const validator = vine.compile(newSchema);
await request.validateUsing(validator);
} catch (error) {
// return response.badRequest(error.messages);
throw error;
}
const validStates = ['reviewed'];
if (!validStates.includes(dataset.server_state)) {
// throw new Error('Invalid server state!');
// return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back();
return response
.flash(
`Invalid server state. Dataset with id ${id} cannot be rejected to reviewer. Datset has server state ${dataset.server_state}.`,
'warning',
)
.redirect()
.toRoute('editor.dataset.list');
}
dataset.server_state = 'rejected_to_reviewer';
const rejectEditorNote = request.input('reject_editor_note', '');
dataset.reject_editor_note = rejectEditorNote;
// add logic for sending reject message
const sendMail = request.input('send_email', false);
// const validRecipientEmail = await this.checkEmailDomain('arno.kaimbacher@outlook.at');
const validationResult = await validate({
email: dataset.reviewer.email,
validateSMTP: false,
});
const validRecipientEmail: boolean = validationResult.valid;
await dataset.save();
let emailStatusMessage = '';
if (sendMail == true) {
if (dataset.reviewer.email && validRecipientEmail) {
try {
await mail.send((message) => {
message.to(dataset.reviewer.email).subject('Dataset Rejection Notification').html(`
<p>Dear ${dataset.reviewer.login},</p>
<p>Your dataset with ID ${dataset.id} has been rejected.</p>
<p>Reason for rejection: ${rejectEditorNote}</p>
<p>Best regards,<br>Your Tethys editor: ${authUser.login}</p>
`);
});
emailStatusMessage = ` A rejection email was successfully sent to ${dataset.reviewer.email}.`;
} catch (error) {
logger.error(error);
return response
.flash('Dataset has not been rejected due to an email error: ' + error.message, 'error')
.toRoute('editor.dataset.list');
}
} else {
emailStatusMessage = ` However, the email could not be sent because the submitter's email address (${dataset.reviewer.email}) is not valid.`;
}
}
return response
.flash(
`You have successfully rejected dataset ${dataset.id} reviewed by ${dataset.reviewer.login}.${emailStatusMessage}`,
'message',
)
.toRoute('editor.dataset.list');
}
public async doiCreate({ request, inertia, auth, response }: HttpContext) {
const id = request.param('id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const dataset = await Dataset.query()
.where('id', id)
.where('editor_id', user.id) // Ensure the user is the editor of the dataset
.preload('titles')
.preload('descriptions')
// .preload('identifier')
@ -619,494 +485,61 @@ export default class DatasetsController {
});
}
public async doiStore({ request, response, auth }: HttpContext) {
public async doiStore({ request, response }: HttpContext) {
const dataId = request.param('publish_id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
// Load dataset with minimal required relationships
const dataset = await Dataset.query()
.where('editor_id', user.id) // Ensure the user is the editor of the dataset
// .preload('xmlCache')
.where('publish_id', dataId)
.firstOrFail();
const prefix = process.env.DATACITE_PREFIX || '';
const base_domain = process.env.BASE_DOMAIN || '';
// Generate DOI metadata XML
const xmlMeta = (await Index.getDoiRegisterString(dataset)) as string;
// Prepare DOI registration data
const doiValue = `${prefix}/tethys.${dataset.publish_id}`; //'10.21388/tethys.213'
const landingPageUrl = `https://doi.${getDomain(base_domain)}/${prefix}/tethys.${dataset.publish_id}`; //https://doi.dev.tethys.at/10.21388/tethys.213
let prefix = '';
let base_domain = '';
// const datacite_environment = process.env.DATACITE_ENVIRONMENT || 'debug';
prefix = process.env.DATACITE_PREFIX || '';
base_domain = process.env.BASE_DOMAIN || '';
// Register DOI with DataCite
// register DOI:
const doiValue = prefix + '/tethys.' + dataset.publish_id; //'10.21388/tethys.213'
const landingPageUrl = 'https://doi.' + getDomain(base_domain) + '/' + prefix + '/tethys.' + dataset.publish_id; //https://doi.dev.tethys.at/10.21388/tethys.213
const doiClient = new DoiClient();
const dataciteResponse = await doiClient.registerDoi(doiValue, xmlMeta, landingPageUrl);
if (dataciteResponse?.status !== 201) {
if (dataciteResponse?.status === 201) {
// if response OK 201; save the Identifier value into db
const doiIdentifier = new DatasetIdentifier();
doiIdentifier.value = doiValue;
doiIdentifier.dataset_id = dataset.id;
doiIdentifier.type = 'doi';
doiIdentifier.status = 'findable';
// save modified date of datset for re-caching model in db an update the search index
dataset.server_date_modified = DateTime.now();
// save updated dataset to db an index to OpenSearch
try {
await dataset.related('identifier').save(doiIdentifier);
const index_name = 'tethys-records';
await Index.indexDocument(dataset, index_name);
} catch (error) {
logger.error(`${__filename}: Indexing document ${dataset.id} failed: ${error.message}`);
// Log the error or handle it as needed
throw new HttpException(error.message);
}
return response.toRoute('editor.dataset.list').flash('message', 'You have successfully created a DOI for the dataset!');
} else {
const message = `Unexpected DataCite MDS response code ${dataciteResponse?.status}`;
// Log the error or handle it as needed
throw new DoiClientException(dataciteResponse?.status, message);
}
// DOI registration successful - persist and index
try {
// Save identifier
await this.persistDoiAndIndex(dataset, doiValue);
return response.toRoute('editor.dataset.list').flash('message', 'You have successfully created a DOI for the dataset!');
} catch (error) {
logger.error(`${__filename}: Failed to persist DOI and index dataset ${dataset.id}: ${error.message}`);
throw new HttpException(error.message);
}
// return response.toRoute('editor.dataset.list').flash('message', xmlMeta);
}
/**
* Persist DOI identifier and update search index
* Handles cache invalidation to ensure fresh indexing
*/
private async persistDoiAndIndex(dataset: Dataset, doiValue: string): Promise<void> {
// Create DOI identifier
const doiIdentifier = new DatasetIdentifier();
doiIdentifier.value = doiValue;
doiIdentifier.dataset_id = dataset.id;
doiIdentifier.type = 'doi';
doiIdentifier.status = 'findable';
// Save identifier (this will trigger database insert)
await dataset.related('identifier').save(doiIdentifier);
// Update dataset modification timestamp to reflect the change
dataset.server_date_modified = DateTime.now();
await dataset.save();
// Invalidate stale XML cache
await this.invalidateDatasetCache(dataset);
// Reload dataset with fresh state for indexing
const freshDataset = await Dataset.query().where('id', dataset.id).preload('identifier').preload('xmlCache').firstOrFail();
// Index to OpenSearch with fresh data
const index_name = process.env.OPENSEARCH_INDEX || 'tethys-records';
await Index.indexDocument(freshDataset, index_name);
logger.info(`Successfully created DOI ${doiValue} and indexed dataset ${dataset.id}`);
}
/**
* Invalidate XML cache for dataset
* Ensures fresh cache generation on next access
*/
private async invalidateDatasetCache(dataset: Dataset): Promise<void> {
await dataset.load('xmlCache');
if (dataset.xmlCache) {
await dataset.xmlCache.delete();
logger.debug(`Invalidated XML cache for dataset ${dataset.id}`);
}
}
public async show({}: HttpContext) {}
public async edit({ request, inertia, response, auth }: HttpContext) {
const id = request.param('id');
// Check if user is authenticated
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
// Prefilter by both id AND editor_id to ensure user has permission to edit
const datasetQuery = Dataset.query().where('id', id).where('editor_id', user.id);
datasetQuery
.preload('titles', (query) => query.orderBy('id', 'asc'))
.preload('descriptions', (query) => query.orderBy('id', 'asc'))
.preload('coverage')
.preload('licenses')
.preload('authors', (query) => query.orderBy('pivot_sort_order', 'asc'))
.preload('contributors', (query) => query.orderBy('pivot_sort_order', 'asc'))
// .preload('subjects')
.preload('subjects', (builder) => {
builder.orderBy('id', 'asc').withCount('datasets');
})
.preload('references')
.preload('files', (query) => {
query.orderBy('sort_order', 'asc'); // Sort by sort_order column
});
// This will throw 404 if editor_id does not match logged in user
const dataset = await datasetQuery.firstOrFail();
const validStates = ['editor_accepted', 'rejected_reviewer'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
'warning',
)
.toRoute('editor.dataset.list');
}
const titleTypes = Object.entries(TitleTypes)
.filter(([value]) => value !== 'Main')
.map(([key, value]) => ({ value: key, label: value }));
const descriptionTypes = Object.entries(DescriptionTypes)
.filter(([value]) => value !== 'Abstract')
.map(([key, value]) => ({ value: key, label: value }));
const languages = await Language.query().where('active', true).pluck('part1', 'part1');
// const contributorTypes = Config.get('enums.contributor_types');
const contributorTypes = Object.entries(ContributorTypes).map(([key, value]) => ({ value: key, label: value }));
// const nameTypes = Config.get('enums.name_types');
const nameTypes = Object.entries(PersonNameTypes).map(([key, value]) => ({ value: key, label: value }));
// const messages = await Database.table('messages')
// .pluck('help_text', 'metadata_element');
const projects = await Project.query().pluck('label', 'id');
const currentDate = new Date();
const currentYear = currentDate.getFullYear();
const years = Array.from({ length: currentYear - 1990 + 1 }, (_, index) => 1990 + index);
const licenses = await License.query().select('id', 'name_long').where('active', 'true').pluck('name_long', 'id');
// const userHasRoles = user.roles;
// const datasetHasLicenses = await dataset.related('licenses').query().pluck('id');
// const checkeds = dataset.licenses.first().id;
// const doctypes = {
// analysisdata: { label: 'Analysis', value: 'analysisdata' },
// measurementdata: { label: 'Measurements', value: 'measurementdata' },
// monitoring: 'Monitoring',
// remotesensing: 'Remote Sensing',
// gis: 'GIS',
// models: 'Models',
// mixedtype: 'Mixed Type',
// };
return inertia.render('Editor/Dataset/Edit', {
dataset,
titletypes: titleTypes,
descriptiontypes: descriptionTypes,
contributorTypes,
nameTypes,
languages,
// messages,
projects,
licenses,
// datasetHasLicenses: Object.keys(datasetHasLicenses).map((key) => datasetHasLicenses[key]), //convert object to array with license ids
// checkeds,
years,
// languages,
subjectTypes: SubjectTypes,
referenceIdentifierTypes: Object.entries(ReferenceIdentifierTypes).map(([key, value]) => ({ value: key, label: value })),
relationTypes: Object.entries(RelationTypes).map(([key, value]) => ({ value: key, label: value })),
doctypes: DatasetTypes,
});
}
public async update({ request, response, session, auth }: HttpContext) {
// Get the dataset id from the route parameter
const datasetId = request.param('id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
// Retrieve the dataset and load its existing files
const dataset = await Dataset.query().where('id', datasetId).where('editor_id', user.id).firstOrFail();
await dataset.load('files');
let trx: TransactionClientContract | null = null;
try {
await request.validateUsing(updateEditorDatasetValidator);
trx = await db.transaction();
// const user = (await User.find(auth.user?.id)) as User;
// await this.createDatasetAndAssociations(user, request, trx);
// const dataset = await Dataset.findOrFail(datasetId);
// save the licenses
const licenses: number[] = request.input('licenses', []);
// await dataset.useTransaction(trx).related('licenses').sync(licenses);
await dataset.useTransaction(trx).related('licenses').sync(licenses);
// save authors and contributors
await dataset.useTransaction(trx).related('authors').sync([]);
await dataset.useTransaction(trx).related('contributors').sync([]);
await savePersons(dataset, request.input('authors', []), 'author', trx);
await savePersons(dataset, request.input('contributors', []), 'contributor', trx);
//save the titles:
const titles = request.input('titles', []);
// const savedTitles:Array<Title> = [];
for (const titleData of titles) {
if (titleData.id) {
const title = await Title.findOrFail(titleData.id);
title.value = titleData.value;
title.language = titleData.language;
title.type = titleData.type;
if (title.$isDirty) {
await title.useTransaction(trx).save();
// await dataset.useTransaction(trx).related('titles').save(title);
// savedTitles.push(title);
}
} else {
const title = new Title();
title.fill(titleData);
// savedTitles.push(title);
await dataset.useTransaction(trx).related('titles').save(title);
}
}
// save the abstracts
const descriptions = request.input('descriptions', []);
// const savedTitles:Array<Title> = [];
for (const descriptionData of descriptions) {
if (descriptionData.id) {
const description = await Description.findOrFail(descriptionData.id);
description.value = descriptionData.value;
description.language = descriptionData.language;
description.type = descriptionData.type;
if (description.$isDirty) {
await description.useTransaction(trx).save();
// await dataset.useTransaction(trx).related('titles').save(title);
// savedTitles.push(title);
}
} else {
const description = new Description();
description.fill(descriptionData);
// savedTitles.push(title);
await dataset.useTransaction(trx).related('descriptions').save(description);
}
}
// Process all subjects/keywords from the request
const subjects = request.input('subjects');
for (const subjectData of subjects) {
// Case 1: Subject already exists in the database (has an ID)
if (subjectData.id) {
// Retrieve the existing subject
const existingSubject = await Subject.findOrFail(subjectData.id);
// Update subject properties from the request data
existingSubject.value = subjectData.value;
existingSubject.type = subjectData.type;
existingSubject.external_key = subjectData.external_key;
// Only save if there are actual changes
if (existingSubject.$isDirty) {
await existingSubject.save();
}
// Note: The relationship between dataset and subject is already established,
// so we don't need to attach it again
}
// Case 2: New subject being added (no ID)
else {
// Check if a subject with the same value and type already exists in the database
const subject = await Subject.firstOrNew({ value: subjectData.value, type: subjectData.type }, subjectData);
if (subject.$isNew === true) {
// If it's a completely new subject, create and associate it with the dataset
await dataset.useTransaction(trx).related('subjects').save(subject);
} else {
// If the subject already exists, just create the relationship
await dataset.useTransaction(trx).related('subjects').attach([subject.id]);
}
}
}
const subjectsToDelete = request.input('subjectsToDelete', []);
for (const subjectData of subjectsToDelete) {
if (subjectData.id) {
// const subject = await Subject.findOrFail(subjectData.id);
const subject = await Subject.query()
.where('id', subjectData.id)
.preload('datasets', (builder) => {
builder.orderBy('id', 'asc');
})
.withCount('datasets')
.firstOrFail();
// Check if the subject is used by multiple datasets
if (subject.$extras.datasets_count > 1) {
// If used by multiple datasets, just detach it from the current dataset
await dataset.useTransaction(trx).related('subjects').detach([subject.id]);
} else {
// If only used by this dataset, delete the subject completely
await dataset.useTransaction(trx).related('subjects').detach([subject.id]);
await subject.useTransaction(trx).delete();
}
}
}
// Process references
const references = request.input('references', []);
// First, get existing references to determine which ones to update vs. create
const existingReferences = await dataset.related('references').query();
const existingReferencesMap: Map<number, DatasetReference> = new Map(existingReferences.map((ref) => [ref.id, ref]));
for (const referenceData of references) {
if (existingReferencesMap.has(referenceData.id) && referenceData.id) {
// Update existing reference
const reference = existingReferencesMap.get(referenceData.id);
if (reference) {
reference.merge(referenceData);
if (reference.$isDirty) {
await reference.useTransaction(trx).save();
}
}
} else {
// Create new reference
const dataReference = new DatasetReference();
dataReference.fill(referenceData);
await dataset.useTransaction(trx).related('references').save(dataReference);
}
}
// Handle references to delete if provided
const referencesToDelete = request.input('referencesToDelete', []);
for (const referenceData of referencesToDelete) {
if (referenceData.id) {
const reference = await DatasetReference.findOrFail(referenceData.id);
await reference.useTransaction(trx).delete();
}
}
// save coverage
const coverageData = request.input('coverage');
if (coverageData) {
if (coverageData.id) {
const coverage = await Coverage.findOrFail(coverageData.id);
coverage.merge(coverageData);
if (coverage.$isDirty) {
await coverage.useTransaction(trx).save();
}
}
}
const input = request.only(['project_id', 'embargo_date', 'language', 'type', 'creating_corporation']);
// dataset.type = request.input('type');
dataset.merge(input);
dataset.server_date_modified = DateTime.now();
// let test: boolean = dataset.$isDirty;
await dataset.useTransaction(trx).save();
await trx.commit();
// console.log('Dataset has been updated successfully');
session.flash('message', 'Dataset has been updated successfully');
// return response.redirect().toRoute('user.index');
return response.redirect().toRoute('editor.dataset.edit', [dataset.id]);
} catch (error) {
if (trx !== null) {
await trx.rollback();
}
console.error('Failed to update dataset and related models:', error);
// throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` });
throw error;
}
}
public async categorize({ inertia, request, response, auth }: HttpContext) {
const id = request.param('id');
// Check if user is authenticated
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
// Preload dataset and its "collections" relation
const dataset = await Dataset.query().where('id', id).where('editor_id', user.id).preload('collections').firstOrFail();
const validStates = ['editor_accepted', 'rejected_reviewer'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.toRoute('editor.dataset.list');
}
const collectionRoles = await CollectionRole.query()
.whereIn('name', ['ddc', 'ccs'])
.preload('collections', (coll: Collection) => {
// preloa only top level collection with noparent_id
coll.whereNull('parent_id').orderBy('number', 'asc');
})
.exec();
return inertia.render('Editor/Dataset/Category', {
collectionRoles: collectionRoles,
dataset: dataset,
relatedCollections: dataset.collections,
});
}
public async categorizeUpdate({ request, response, session, auth }: HttpContext) {
// Get the dataset id from the route parameter
const id = request.param('id');
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
// Retrieve the dataset and load its existing files
const dataset = await Dataset.query().preload('files').where('id', id).where('editor_id', user.id).firstOrFail();
const validStates = ['editor_accepted', 'rejected_reviewer'];
if (!validStates.includes(dataset.server_state)) {
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be categorized. Dataset has server state ${dataset.server_state}.`,
)
.redirect()
.toRoute('editor.dataset.list');
}
let trx: TransactionClientContract | null = null;
try {
trx = await db.transaction();
// const user = (await User.find(auth.user?.id)) as User;
// await this.createDatasetAndAssociations(user, request, trx);
// Retrieve the selected collections from the request.
// This should be an array of collection ids.
const collections: number[] = request.input('collections', []);
// Synchronize the dataset collections using the transaction.
await dataset.useTransaction(trx).related('collections').sync(collections);
// Commit the transaction.await trx.commit()
await trx.commit();
// Redirect with a success flash message.
// return response.flash('success', 'Dataset collections updated successfully!').redirect().toRoute('dataset.list');
session.flash('message', 'Dataset collections updated successfully!');
return response.redirect().toRoute('editor.dataset.list');
} catch (error) {
if (trx !== null) {
await trx.rollback();
}
console.error('Failed tocatgorize dataset collections:', error);
// throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` });
throw error;
}
}
public async edit({}: HttpContext) {}
// public async update({}: HttpContextContract) {}
public async updateOpensearch({ response }: HttpContext) {
public async update({ response }: HttpContext) {
const id = 273; //request.param('id');
const dataset = await Dataset.query().preload('xmlCache').where('id', id).firstOrFail();
// add xml elements
@ -1222,30 +655,6 @@ export default class DatasetsController {
}
}
public async download({ params, response }: HttpContext) {
const id = params.id;
// Find the file by ID
const file = await File.findOrFail(id);
// const filePath = await drive.use('local').getUrl('/'+ file.filePath)
const filePath = file.filePath;
const fileExt = file.filePath.split('.').pop() || '';
// Check if label already includes the extension
const fileName = file.label.toLowerCase().endsWith(`.${fileExt.toLowerCase()}`) ? file.label : `${file.label}.${fileExt}`;
// Set the response headers and download the file
response
.header('Cache-Control', 'no-cache private')
.header('Content-Description', 'File Transfer')
.header('Content-Type', file.mime_type || 'application/octet-stream')
// .header('Content-Disposition', 'inline; filename=' + fileName)
.header('Content-Transfer-Encoding', 'binary')
.header('Access-Control-Allow-Origin', '*')
.header('Access-Control-Allow-Methods', 'GET');
response.attachment(fileName);
return response.download(filePath);
}
public async destroy({}: HttpContext) {}
private async createXmlRecord(dataset: Dataset, datasetNode: XMLBuilder) {
@ -1255,18 +664,19 @@ export default class DatasetsController {
}
}
private async getDatasetXmlDomNode(dataset: Dataset): Promise<XMLBuilder | null> {
const serializer = new DatasetXmlSerializer(dataset).enableCaching().excludeEmptyFields();
private async getDatasetXmlDomNode(dataset: Dataset) {
const xmlModel = new XmlModel(dataset);
// xmlModel.setModel(dataset);
// Load existing cache if available
await dataset.load('xmlCache');
xmlModel.excludeEmptyFields();
xmlModel.caching = true;
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
// dataset.load('xmlCache');
if (dataset.xmlCache) {
serializer.setCache(dataset.xmlCache);
xmlModel.xmlCache = dataset.xmlCache;
}
// return cache.getDomDocument();
const xmlDocument: XMLBuilder | null = await serializer.toXmlDocument();
return xmlDocument;
const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
return domDocument;
}
}

View file

@ -15,7 +15,7 @@ import { OaiModelException, BadOaiModelException } from '#app/exceptions/OaiMode
import Dataset from '#models/dataset';
import Collection from '#models/collection';
import { getDomain, preg_match } from '#app/utils/utility-functions';
import DatasetXmlSerializer from '#app/Library/DatasetXmlSerializer';
import XmlModel from '#app/Library/XmlModel';
import logger from '@adonisjs/core/services/logger';
import ResumptionToken from '#app/Library/Oai/ResumptionToken';
// import Config from '@ioc:Adonis/Core/Config';
@ -292,7 +292,7 @@ export default class OaiController {
this.xsltParameter['repIdentifier'] = repIdentifier;
const datasetNode = this.xml.root().ele('Datasets');
const paginationParams: PagingParameter = {
const paginationParams: PagingParameter ={
cursor: 0,
totalLength: 0,
start: maxRecords + 1,
@ -333,7 +333,7 @@ export default class OaiController {
}
private async handleNoResumptionToken(oaiRequest: Dictionary, paginationParams: PagingParameter, maxRecords: number) {
this.validateMetadataPrefix(oaiRequest, paginationParams);
this.validateMetadataPrefix(oaiRequest, paginationParams);
const finder: ModelQueryBuilderContract<typeof Dataset, Dataset> = Dataset.query().whereIn(
'server_state',
this.deliveringDocumentStates,
@ -347,20 +347,16 @@ export default class OaiController {
finder: ModelQueryBuilderContract<typeof Dataset, Dataset>,
paginationParams: PagingParameter,
oaiRequest: Dictionary,
maxRecords: number,
maxRecords: number
) {
const totalResult = await finder
.clone()
.count('* as total')
.first()
.then((res) => res?.$extras.total);
paginationParams.totalLength = Number(totalResult);
paginationParams.totalLength = Number(totalResult);
const combinedRecords: Dataset[] = await finder
.select('publish_id')
.orderBy('publish_id')
.offset(0)
.limit(maxRecords * 2);
const combinedRecords: Dataset[] = await finder.select('publish_id').orderBy('publish_id').offset(0).limit(maxRecords*2);
paginationParams.activeWorkIds = combinedRecords.slice(0, 100).map((dat) => Number(dat.publish_id));
paginationParams.nextDocIds = combinedRecords.slice(100).map((dat) => Number(dat.publish_id));
@ -606,17 +602,19 @@ export default class OaiController {
}
private async getDatasetXmlDomNode(dataset: Dataset) {
const serializer = new DatasetXmlSerializer(dataset).enableCaching().excludeEmptyFields();
const xmlModel = new XmlModel(dataset);
// xmlModel.setModel(dataset);
xmlModel.excludeEmptyFields();
xmlModel.caching = true;
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
// dataset.load('xmlCache');
if (dataset.xmlCache) {
serializer.setCache(dataset.xmlCache);
xmlModel.xmlCache = dataset.xmlCache;
}
// return cache.toXmlDocument();
const xmlDocument: XMLBuilder | null = await serializer.toXmlDocument();
return xmlDocument;
// return cache.getDomDocument();
const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
return domDocument;
}
private addSpecInformation(domNode: XMLBuilder, information: string) {

View file

@ -9,7 +9,6 @@ import vine from '@vinejs/vine';
import mail from '@adonisjs/mail/services/main';
import logger from '@adonisjs/core/services/logger';
import { validate } from 'deep-email-validator';
import File from '#models/file';
interface Dictionary {
[index: string]: string;
@ -39,21 +38,13 @@ export default class DatasetsController {
}
datasets.orderBy(attribute, sortOrder);
} else {
// datasets.orderBy('id', 'asc');
// Custom ordering to prioritize rejected_editor state
datasets.orderByRaw(`
CASE
WHEN server_state = 'rejected_to_reviewer' THEN 0
ELSE 1
END ASC,
id ASC
`);
// users.orderBy('created_at', 'desc');
datasets.orderBy('id', 'asc');
}
// const users = await User.query().orderBy('login').paginate(page, limit);
const myDatasets = await datasets
// .where('server_state', 'approved')
.whereIn('server_state', ['approved', 'rejected_to_reviewer'])
.where('server_state', 'approved')
.where('reviewer_id', user.id)
.preload('titles')
@ -71,51 +62,7 @@ export default class DatasetsController {
});
}
public async review({ request, inertia, response, auth }: HttpContext) {
const id = request.param('id');
const datasetQuery = Dataset.query().where('id', id);
datasetQuery
.preload('titles', (query) => query.orderBy('id', 'asc'))
.preload('descriptions', (query) => query.orderBy('id', 'asc'))
.preload('coverage')
.preload('licenses')
.preload('authors', (query) => query.orderBy('pivot_sort_order', 'asc'))
.preload('contributors', (query) => query.orderBy('pivot_sort_order', 'asc'))
// .preload('subjects')
.preload('subjects', (builder) => {
builder.orderBy('id', 'asc').withCount('datasets');
})
.preload('references')
.preload('project')
.preload('files', (query) => {
query.orderBy('sort_order', 'asc'); // Sort by sort_order column
});
const dataset = await datasetQuery.firstOrFail();
const validStates = ['approved', 'rejected_to_reviewer'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be reviewed. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.toRoute('reviewer.dataset.list');
}
return inertia.render('Reviewer/Dataset/Review', {
dataset,
can: {
review: await auth.user?.can(['dataset-review']),
reject: await auth.user?.can(['dataset-review-reject']),
},
});
}
public async review_old({ request, inertia, response, auth }: HttpContext) {
public async review({ request, inertia, response }: HttpContext) {
const id = request.param('id');
const dataset = await Dataset.query()
.where('id', id)
@ -211,10 +158,6 @@ export default class DatasetsController {
return inertia.render('Reviewer/Dataset/Review', {
dataset,
fields: fields,
can: {
review: await auth.user?.can(['dataset-review']),
reject: await auth.user?.can(['dataset-review-reject']),
},
});
}
@ -223,7 +166,7 @@ export default class DatasetsController {
// const { id } = params;
const dataset = await Dataset.findOrFail(id);
const validStates = ['approved', 'rejected_to_reviewer'];
const validStates = ['approved'];
if (!validStates.includes(dataset.server_state)) {
// throw new Error('Invalid server state!');
// return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back();
@ -237,10 +180,6 @@ export default class DatasetsController {
}
dataset.server_state = 'reviewed';
// if editor has rejected to reviewer:
if (dataset.reject_editor_note != null) {
dataset.reject_editor_note = null;
}
try {
// await dataset.related('editor').associate(user); // speichert schon ab
@ -264,7 +203,7 @@ export default class DatasetsController {
})
.firstOrFail();
const validStates = ['approved', 'rejected_to_reviewer'];
const validStates = ['approved'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
@ -311,12 +250,12 @@ export default class DatasetsController {
throw error;
}
const validStates = ['approved', 'rejected_to_reviewer'];
const validStates = ['approved'];
if (!validStates.includes(dataset.server_state)) {
// throw new Error('Invalid server state!');
// return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back();
return response
.flash(
.flash(
`Invalid server state. Dataset with id ${id} cannot be rejected. Datset has server state ${dataset.server_state}.`,
'warning',
)
@ -368,41 +307,4 @@ export default class DatasetsController {
.toRoute('reviewer.dataset.list')
.flash(`You have rejected dataset ${dataset.id}! to editor ${dataset.editor.login}`, 'message');
}
// public async download({ params, response }: HttpContext) {
// const id = params.id;
// // Find the file by ID
// const file = await File.findOrFail(id);
// // const filePath = await drive.use('local').getUrl('/'+ file.filePath)
// const filePath = file.filePath;
// const fileExt = file.filePath.split('.').pop() || '';
// // Set the response headers and download the file
// response.header('Content-Type', file.mime_type || 'application/octet-stream');
// response.attachment(`${file.label}.${fileExt}`);
// return response.download(filePath);
// }
public async download({ params, response }: HttpContext) {
const id = params.id;
// Find the file by ID
const file = await File.findOrFail(id);
// const filePath = await drive.use('local').getUrl('/'+ file.filePath)
const filePath = file.filePath;
const fileExt = file.filePath.split('.').pop() || '';
// Check if label already includes the extension
const fileName = file.label.toLowerCase().endsWith(`.${fileExt.toLowerCase()}`) ? file.label : `${file.label}.${fileExt}`;
// Set the response headers and download the file
response
.header('Cache-Control', 'no-cache private')
.header('Content-Description', 'File Transfer')
.header('Content-Type', file.mime_type || 'application/octet-stream')
// .header('Content-Disposition', 'inline; filename=' + fileName)
.header('Content-Transfer-Encoding', 'binary')
.header('Access-Control-Allow-Origin', '*')
.header('Access-Control-Allow-Methods', 'GET');
response.attachment(fileName);
return response.download(filePath);
}
}

View file

@ -29,30 +29,23 @@ import {
} from '#contracts/enums';
import { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model';
import DatasetReference from '#models/dataset_reference';
import { cuid } from '@adonisjs/core/helpers';
import File from '#models/file';
import ClamScan from 'clamscan';
// import { ValidationException } from '@adonisjs/validator';
// import Drive from '@ioc:Adonis/Core/Drive';
// import drive from '#services/drive';
import drive from '@adonisjs/drive/services/main';
import path from 'path';
import { Exception } from '@adonisjs/core/exceptions';
import { MultipartFile } from '@adonisjs/core/types/bodyparser';
import * as crypto from 'crypto';
import { pipeline } from 'node:stream/promises';
import { createWriteStream } from 'node:fs';
import type { Multipart } from '@adonisjs/bodyparser';
import * as fs from 'fs';
import { parseBytesSize, getConfigFor, getTmpPath, formatBytes } from '#app/utils/utility-functions';
interface Dictionary {
[index: string]: string;
}
import vine, { SimpleMessagesProvider, errors } from '@vinejs/vine';
export default class DatasetController {
/**
* Bodyparser config
*/
// config: BodyParserConfig = config.get('bodyparser');
public async index({ auth, request, inertia }: HttpContext) {
const user = (await User.find(auth.user?.id)) as User;
const page = request.input('page', 1);
@ -76,16 +69,8 @@ export default class DatasetController {
}
datasets.orderBy(attribute, sortOrder);
} else {
// datasets.orderBy('id', 'asc');
// Custom ordering to prioritize rejected_editor state
datasets.orderByRaw(`
CASE
WHEN server_state = 'rejected_editor' THEN 0
WHEN server_state = 'rejected_reviewer' THEN 1
ELSE 2
END ASC,
id ASC
`);
// users.orderBy('created_at', 'desc');
datasets.orderBy('id', 'asc');
}
// const results = await Database
@ -105,7 +90,6 @@ export default class DatasetController {
'reviewed',
'rejected_editor',
'rejected_reviewer',
'rejected_to_reviewer',
])
.where('account_id', user.id)
.preload('titles')
@ -207,8 +191,7 @@ export default class DatasetController {
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}),
)
// .minLength(2)
.arrayContainsTypes({ typeA: 'main', typeB: 'translated' }),
.minLength(1),
descriptions: vine
.array(
vine.object({
@ -222,8 +205,7 @@ export default class DatasetController {
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}),
)
// .minLength(1),
.arrayContainsTypes({ typeA: 'abstract', typeB: 'translated' }),
.minLength(1),
authors: vine
.array(
vine.object({
@ -234,9 +216,8 @@ export default class DatasetController {
.email()
.normalizeEmail()
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
first_name: vine.string().trim().minLength(3).maxLength(255).optional().requiredWhen('name_type', '=', 'Personal'),
first_name: vine.string().trim().minLength(3).maxLength(255),
last_name: vine.string().trim().minLength(3).maxLength(255),
identifier_orcid: vine.string().trim().maxLength(255).orcid().optional(),
}),
)
.minLength(1)
@ -251,9 +232,8 @@ export default class DatasetController {
.email()
.normalizeEmail()
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
first_name: vine.string().trim().minLength(3).maxLength(255).optional().requiredWhen('name_type', '=', 'Personal'),
first_name: vine.string().trim().minLength(3).maxLength(255),
last_name: vine.string().trim().minLength(3).maxLength(255),
identifier_orcid: vine.string().trim().maxLength(255).orcid().optional(),
pivot_contributor_type: vine.enum(Object.keys(ContributorTypes)),
}),
)
@ -300,8 +280,7 @@ export default class DatasetController {
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}),
)
// .minLength(2)
.arrayContainsTypes({ typeA: 'main', typeB: 'translated' }),
.minLength(1),
descriptions: vine
.array(
vine.object({
@ -315,8 +294,7 @@ export default class DatasetController {
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}),
)
// .minLength(1),
.arrayContainsTypes({ typeA: 'abstract', typeB: 'translated' }),
.minLength(1),
authors: vine
.array(
vine.object({
@ -327,9 +305,8 @@ export default class DatasetController {
.email()
.normalizeEmail()
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
first_name: vine.string().trim().minLength(3).maxLength(255).optional().requiredWhen('name_type', '=', 'Personal'),
first_name: vine.string().trim().minLength(3).maxLength(255),
last_name: vine.string().trim().minLength(3).maxLength(255),
identifier_orcid: vine.string().trim().maxLength(255).orcid().optional(),
}),
)
.minLength(1)
@ -344,9 +321,8 @@ export default class DatasetController {
.email()
.normalizeEmail()
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
first_name: vine.string().trim().minLength(3).maxLength(255).optional().requiredWhen('name_type', '=', 'Personal'),
first_name: vine.string().trim().minLength(3).maxLength(255),
last_name: vine.string().trim().minLength(3).maxLength(255),
identifier_orcid: vine.string().trim().maxLength(255).orcid().optional(),
pivot_contributor_type: vine.enum(Object.keys(ContributorTypes)),
}),
)
@ -426,99 +402,21 @@ export default class DatasetController {
}
public async store({ auth, request, response, session }: HttpContext) {
// At the top of the store() method, declare an array to hold temporary file paths
const uploadedTmpFiles: string[] = [];
// Aggregated limit example (adjust as needed)
const multipartConfig = getConfigFor('multipart');
const aggregatedLimit = multipartConfig.limit ? parseBytesSize(multipartConfig.limit) : 100 * 1024 * 1024;
// const aggregatedLimit = 200 * 1024 * 1024;
let totalUploadedSize = 0;
// // Helper function to format bytes as human-readable text
// function formatBytes(bytes: number): string {
// if (bytes === 0) return '0 Bytes';
// const k = 1024;
// const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
// const i = Math.floor(Math.log(bytes) / Math.log(k));
// return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
// }
// const enabledExtensions = await this.getEnabledExtensions();
const multipart: Multipart = request.multipart;
multipart.onFile('files', { deferValidations: true }, async (part) => {
// Attach an individual file size accumulator if needed
let fileUploadedSize = 0;
// Simply accumulate the size in on('data') without performing the expensive check per chunk
part.on('data', (chunk) => {
// reporter(chunk);
// Increase counters using the chunk length
fileUploadedSize += chunk.length;
});
// After the file is completely read, update the global counter and check aggregated limit
part.on('end', () => {
totalUploadedSize += fileUploadedSize;
part.file.size = fileUploadedSize;
// Record the temporary file path
if (part.file.tmpPath) {
uploadedTmpFiles.push(part.file.tmpPath);
}
if (totalUploadedSize > aggregatedLimit) {
// Clean up all temporary files if aggregate limit is exceeded
uploadedTmpFiles.forEach((tmpPath) => {
try {
fs.unlinkSync(tmpPath);
} catch (cleanupError) {
console.error('Error cleaning up temporary file:', cleanupError);
}
});
const error = new errors.E_VALIDATION_ERROR({
'upload error': `Aggregated upload limit of ${formatBytes(aggregatedLimit)} exceeded. The total size of files being uploaded would exceed the limit.`,
});
request.multipart.abort(error);
}
});
part.on('error', (error) => {
// fileUploadError = error;
request.multipart.abort(error);
});
// await pipeline(part, createWriteStream(filePath));
// return { filePath };
// Process file with error handling
try {
// Extract extension from the client file name, e.g. "Tethys 5 - Ampflwang_dataset.zip"
const ext = path.extname(part.file.clientName).replace('.', '');
// Attach the extracted extension to the file object for later use
part.file.extname = ext;
// part.file.sortOrder = part.file.sortOrder;
const tmpPath = getTmpPath(multipartConfig);
(part.file as any).tmpPath = tmpPath;
const writeStream = createWriteStream(tmpPath);
await pipeline(part, writeStream);
} catch (error) {
request.multipart.abort(new errors.E_VALIDATION_ERROR({ 'upload error': error.message }));
}
});
// node ace make:validator CreateDataset
try {
await multipart.process();
// // Instead of letting an error abort the controller, check if any error occurred
// Step 2 - Validate request body against the schema
// await request.validate({ schema: newDatasetSchema, messages: this.messages });
// await request.validate(CreateDatasetValidator);
await request.validateUsing(createDatasetValidator);
// console.log({ payload });
} catch (error) {
// This is where you'd expect to catch any errors.
session.flash('errors', error.messages);
return response.redirect().back();
// Step 3 - Handle errors
// return response.badRequest(error.messages);
throw error;
}
let trx: TransactionClientContract | null = null;
try {
await request.validateUsing(createDatasetValidator);
trx = await db.transaction();
const user = (await User.find(auth.user?.id)) as User;
@ -527,14 +425,6 @@ export default class DatasetController {
await trx.commit();
console.log('Dataset and related models created successfully');
} catch (error) {
// Clean up temporary files if validation or later steps fail
uploadedTmpFiles.forEach((tmpPath) => {
try {
fs.unlinkSync(tmpPath);
} catch (cleanupError) {
console.error('Error cleaning up temporary file:', cleanupError);
}
});
if (trx !== null) {
await trx.rollback();
}
@ -547,19 +437,14 @@ export default class DatasetController {
return response.redirect().toRoute('dataset.list');
// return response.redirect().back();
}
private async createDatasetAndAssociations(
user: User,
request: HttpContext['request'],
trx: TransactionClientContract,
// uploadedFiles: Array<MultipartFile>,
) {
private async createDatasetAndAssociations(user: User, request: HttpContext['request'], trx: TransactionClientContract) {
// Create a new instance of the Dataset model:
const dataset = new Dataset();
dataset.type = request.input('type');
dataset.creating_corporation = request.input('creating_corporation');
dataset.language = request.input('language');
dataset.embargo_date = request.input('embargo_date');
dataset.project_id = request.input('project_id');
//await dataset.related('user').associate(user); // speichert schon ab
// Dataset.$getRelation('user').boot();
// Dataset.$getRelation('user').setRelated(dataset, user);
@ -668,7 +553,7 @@ export default class DatasetController {
newFile.fileSize = file.size;
newFile.mimeType = mimeType;
newFile.label = file.clientName;
newFile.sortOrder = index + 1;
newFile.sortOrder = index;
newFile.visibleInFrontdoor = true;
newFile.visibleInOai = true;
// let path = coverImage.filePath;
@ -819,25 +704,16 @@ export default class DatasetController {
'files.array.minLength': 'At least {{ min }} file upload is required.',
'files.*.size': 'file size is to big',
'files.*.extnames': 'file extension is not supported',
'embargo_date.date.afterOrEqual': `Embargo date must be on or after ${dayjs().add(10, 'day').format('DD.MM.YYYY')}`,
};
// public async release({ params, view }) {
public async release({ request, inertia, response, auth }: HttpContext) {
public async release({ request, inertia, response }: HttpContext) {
const id = request.param('id');
const user = auth.user;
// Check if user is authenticated
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const dataset = await Dataset.query()
.preload('user', (builder) => {
builder.select('id', 'login');
})
.where('account_id', user.id) // Only fetch if user owns it
.where('id', id)
.firstOrFail();
@ -858,20 +734,9 @@ export default class DatasetController {
});
}
public async releaseUpdate({ request, response, auth }: HttpContext) {
public async releaseUpdate({ request, response }: HttpContext) {
const id = request.param('id');
const user = auth.user;
// Check if user is authenticated
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
const dataset = await Dataset.query()
.preload('files')
.where('id', id)
.where('account_id', user.id) // Only fetch if user owns it
.firstOrFail();
const dataset = await Dataset.query().preload('files').where('id', id).firstOrFail();
const validStates = ['inprogress', 'rejected_editor'];
if (!validStates.includes(dataset.server_state)) {
@ -949,24 +814,16 @@ export default class DatasetController {
// throw new GeneralException(trans('exceptions.publish.release.update_error'));
}
public async edit({ request, inertia, response, auth }: HttpContext) {
public async edit({ request, inertia, response }: HttpContext) {
const id = request.param('id');
const user = auth.user;
// Check if user is authenticated
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
// Prefilter by both id AND account_id
const datasetQuery = Dataset.query().where('id', id).where('account_id', user.id); // Only fetch if user owns it
const datasetQuery = Dataset.query().where('id', id);
datasetQuery
.preload('titles', (query) => query.orderBy('id', 'asc'))
.preload('descriptions', (query) => query.orderBy('id', 'asc'))
.preload('coverage')
.preload('licenses')
.preload('authors', (query) => query.orderBy('pivot_sort_order', 'asc'))
.preload('contributors', (query) => query.orderBy('pivot_sort_order', 'asc'))
.preload('authors')
.preload('contributors')
// .preload('subjects')
.preload('subjects', (builder) => {
builder.orderBy('id', 'asc').withCount('datasets');
@ -975,17 +832,17 @@ export default class DatasetController {
.preload('files', (query) => {
query.orderBy('sort_order', 'asc'); // Sort by sort_order column
});
// This will throw 404 if dataset doesn't exist OR user doesn't own it
const dataset = await datasetQuery.firstOrFail();
const dataset = await datasetQuery.firstOrFail();
const validStates = ['inprogress', 'rejected_editor'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
'warning',
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.toRoute('dataset.list');
}
@ -1015,6 +872,19 @@ export default class DatasetController {
const years = Array.from({ length: currentYear - 1990 + 1 }, (_, index) => 1990 + index);
const licenses = await License.query().select('id', 'name_long').where('active', 'true').pluck('name_long', 'id');
// const userHasRoles = user.roles;
// const datasetHasLicenses = await dataset.related('licenses').query().pluck('id');
// const checkeds = dataset.licenses.first().id;
const doctypes = {
analysisdata: { label: 'Analysis', value: 'analysisdata' },
measurementdata: { label: 'Measurements', value: 'measurementdata' },
monitoring: 'Monitoring',
remotesensing: 'Remote Sensing',
gis: 'GIS',
models: 'Models',
mixedtype: 'Mixed Type',
};
return inertia.render('Submitter/Dataset/Edit', {
dataset,
@ -1033,114 +903,25 @@ export default class DatasetController {
subjectTypes: SubjectTypes,
referenceIdentifierTypes: Object.entries(ReferenceIdentifierTypes).map(([key, value]) => ({ value: key, label: value })),
relationTypes: Object.entries(RelationTypes).map(([key, value]) => ({ value: key, label: value })),
doctypes: DatasetTypes,
can: {
edit: await auth.user?.can(['dataset-edit']),
delete: await auth.user?.can(['dataset-delete']),
},
doctypes,
});
}
public async update({ request, response, session, auth }: HttpContext) {
// Get the dataset id from the route parameter
const datasetId = request.param('id');
const user = auth.user;
// Check if user is authenticated
if (!user) {
return response.flash('You must be logged in to update a dataset.', 'error').redirect().toRoute('app.login.show');
public async update({ request, response, session }: HttpContext) {
try {
// await request.validate(UpdateDatasetValidator);
await request.validateUsing(updateDatasetValidator);
} catch (error) {
// - Handle errors
// return response.badRequest(error.messages);
throw error;
// return response.badRequest(error.messages);
}
// Prefilter by both id AND account_id
const dataset = await Dataset.query()
.where('id', datasetId)
.where('account_id', user.id) // Only fetch if user owns it
.firstOrFail();
// // Check if the authenticated user is the owner of the dataset
// if (dataset.account_id !== user.id) {
// return response
// .flash(`Unauthorized access. You are not the owner of dataset with id ${id}.`, 'error')
// .redirect()
// .toRoute('dataset.list');
// }
await dataset.load('files');
// Accumulate the size of the already related files
// const preExistingFileSize = dataset.files.reduce((acc, file) => acc + file.fileSize, 0);
let preExistingFileSize = 0;
for (const file of dataset.files) {
preExistingFileSize += Number(file.fileSize);
}
const uploadedTmpFiles: string[] = [];
// Only process multipart if the request has a multipart content type
const contentType = request.request.headers['content-type'] || '';
if (contentType.includes('multipart/form-data')) {
const multipart: Multipart = request.multipart;
// Aggregated limit example (adjust as needed)
const multipartConfig = getConfigFor('multipart');
const aggregatedLimit = multipartConfig.limit ? parseBytesSize(multipartConfig.limit) : 100 * 1024 * 1024;
// Initialize totalUploadedSize with the size of existing files
let totalUploadedSize = preExistingFileSize;
multipart.onFile('files', { deferValidations: true }, async (part) => {
let fileUploadedSize = 0;
part.on('data', (chunk) => {
fileUploadedSize += chunk.length;
});
part.on('end', () => {
totalUploadedSize += fileUploadedSize;
part.file.size = fileUploadedSize;
if (part.file.tmpPath) {
uploadedTmpFiles.push(part.file.tmpPath);
}
if (totalUploadedSize > aggregatedLimit) {
uploadedTmpFiles.forEach((tmpPath) => {
try {
fs.unlinkSync(tmpPath);
} catch (cleanupError) {
console.error('Error cleaning up temporary file:', cleanupError);
}
});
const error = new errors.E_VALIDATION_ERROR({
'upload error': `Aggregated upload limit of ${formatBytes(aggregatedLimit)} exceeded. The total size of files being uploaded would exceed the limit.`,
});
request.multipart.abort(error);
}
});
part.on('error', (error) => {
request.multipart.abort(error);
});
try {
const fileNameWithoutParams = part.file.clientName.split('?')[0];
const ext = path.extname(fileNameWithoutParams).replace('.', '');
part.file.extname = ext;
const tmpPath = getTmpPath(multipartConfig);
(part.file as any).tmpPath = tmpPath;
const writeStream = createWriteStream(tmpPath);
await pipeline(part, writeStream);
} catch (error) {
request.multipart.abort(new errors.E_VALIDATION_ERROR({ 'upload error': error.message }));
}
});
try {
await multipart.process();
} catch (error) {
session.flash('errors', error.messages);
return response.redirect().back();
}
}
// await request.validate(UpdateDatasetValidator);
const id = request.param('id');
let trx: TransactionClientContract | null = null;
try {
await request.validateUsing(updateDatasetValidator);
trx = await db.transaction();
// const user = (await User.find(auth.user?.id)) as User;
// await this.createDatasetAndAssociations(user, request, trx);
@ -1201,148 +982,22 @@ export default class DatasetController {
}
}
// ============================================
// IMPROVED SUBJECTS PROCESSING
// ============================================
const subjects = request.input('subjects', []);
const currentDatasetSubjectIds = new Set<number>();
for (const subjectData of subjects) {
let subjectToRelate: Subject;
// Case 1: Subject has an ID (existing subject being updated)
if (subjectData.id) {
const existingSubject = await Subject.findOrFail(subjectData.id);
// Check if the updated value conflicts with another existing subject
const duplicateSubject = await Subject.query()
.where('value', subjectData.value)
.where('type', subjectData.type)
.where('language', subjectData.language || 'en') // Default language if not provided
.where('id', '!=', subjectData.id) // Exclude the current subject
.first();
if (duplicateSubject) {
// A duplicate exists - use the existing duplicate instead
subjectToRelate = duplicateSubject;
// Check if the original subject should be deleted (if it's only used by this dataset)
const originalSubjectUsage = await Subject.query()
.where('id', existingSubject.id)
.withCount('datasets')
.firstOrFail();
if (originalSubjectUsage.$extras.datasets_count <= 1) {
// Only used by this dataset, safe to delete after detaching
await dataset.useTransaction(trx).related('subjects').detach([existingSubject.id]);
await existingSubject.useTransaction(trx).delete();
} else {
// Used by other datasets, just detach from this one
await dataset.useTransaction(trx).related('subjects').detach([existingSubject.id]);
}
} else {
// No duplicate found, update the existing subject
existingSubject.value = subjectData.value;
existingSubject.type = subjectData.type;
existingSubject.language = subjectData.language;
existingSubject.external_key = subjectData.external_key;
if (existingSubject.$isDirty) {
await existingSubject.useTransaction(trx).save();
}
subjectToRelate = existingSubject;
}
}
// Case 2: New subject being added (no ID)
else {
// Use firstOrNew to either find existing or create new subject
subjectToRelate = await Subject.firstOrNew(
{
value: subjectData.value,
type: subjectData.type,
language: subjectData.language || 'en',
},
{
value: subjectData.value,
type: subjectData.type,
language: subjectData.language || 'en',
external_key: subjectData.external_key,
},
);
if (subjectToRelate.$isNew) {
await subjectToRelate.useTransaction(trx).save();
}
}
// Ensure the relationship exists between dataset and subject
const relationshipExists = await dataset.related('subjects').query().where('subject_id', subjectToRelate.id).first();
if (!relationshipExists) {
await dataset.useTransaction(trx).related('subjects').attach([subjectToRelate.id]);
}
// Track which subjects should remain associated with this dataset
currentDatasetSubjectIds.add(subjectToRelate.id);
}
// Handle explicit deletions
const subjectsToDelete = request.input('subjectsToDelete', []);
for (const subjectData of subjectsToDelete) {
if (subjectData.id) {
// const subject = await Subject.findOrFail(subjectData.id);
const subject = await Subject.query()
.where('id', subjectData.id)
.preload('datasets', (builder) => {
builder.orderBy('id', 'asc');
})
.withCount('datasets')
.firstOrFail();
// Detach the subject from this dataset
await dataset.useTransaction(trx).related('subjects').detach([subject.id]);
// If this was the only dataset using this subject, delete it entirely
if (subject.$extras.datasets_count <= 1) {
await subject.useTransaction(trx).delete();
}
// Remove from current set if it was added earlier
currentDatasetSubjectIds.delete(subjectData.id);
}
}
// Process references
const references = request.input('references', []);
// First, get existing references to determine which ones to update vs. create
const existingReferences = await dataset.related('references').query();
const existingReferencesMap: Map<number, DatasetReference> = new Map(existingReferences.map((ref) => [ref.id, ref]));
for (const referenceData of references) {
if (existingReferencesMap.has(referenceData.id) && referenceData.id) {
// Update existing reference
const reference = existingReferencesMap.get(referenceData.id);
if (reference) {
reference.merge(referenceData);
if (reference.$isDirty) {
await reference.useTransaction(trx).save();
}
// await dataset.useTransaction(trx).related('subjects').sync([]);
const keywords = request.input('subjects');
for (const keywordData of keywords) {
if (keywordData.id) {
const subject = await Subject.findOrFail(keywordData.id);
// await dataset.useTransaction(trx).related('subjects').attach([keywordData.id]);
subject.value = keywordData.value;
subject.type = keywordData.type;
subject.external_key = keywordData.external_key;
if (subject.$isDirty) {
await subject.save();
}
} else {
// Create new reference
const dataReference = new DatasetReference();
dataReference.fill(referenceData);
await dataset.useTransaction(trx).related('references').save(dataReference);
}
}
// Handle references to delete if provided
const referencesToDelete = request.input('referencesToDelete', []);
for (const referenceData of referencesToDelete) {
if (referenceData.id) {
const reference = await DatasetReference.findOrFail(referenceData.id);
await reference.useTransaction(trx).delete();
const keyword = new Subject();
keyword.fill(keywordData);
await dataset.useTransaction(trx).related('subjects').save(keyword, false);
}
}
@ -1374,9 +1029,9 @@ export default class DatasetController {
// handle new uploaded files:
const uploadedFiles: MultipartFile[] = request.files('files');
if (Array.isArray(uploadedFiles) && uploadedFiles.length > 0) {
for (const [index, file] of uploadedFiles.entries()) {
for (const [index, fileData] of uploadedFiles.entries()) {
try {
await this.scanFileForViruses(file.tmpPath); //, 'gitea.lan', 3310);
await this.scanFileForViruses(fileData.tmpPath); //, 'gitea.lan', 3310);
// await this.scanFileForViruses("/tmp/testfile.txt");
} catch (error) {
// If the file is infected or there's an error scanning the file, throw a validation exception
@ -1384,29 +1039,29 @@ export default class DatasetController {
}
// move to disk:
const fileName = this.generateFilename(file.extname as string);
const fileName = `file-${cuid()}.${fileData.extname}`; //'file-ls0jyb8xbzqtrclufu2z2e0c.pdf'
const datasetFolder = `files/${dataset.id}`; // 'files/307'
const datasetFullPath = path.join(`${datasetFolder}`, fileName);
// await file.moveToDisk(datasetFolder, { name: fileName, overwrite: true }, 'local');
// await file.move(drive.makePath(datasetFolder), {
// await fileData.moveToDisk(datasetFolder, { name: fileName, overwrite: true }, 'local');
// await fileData.move(drive.makePath(datasetFolder), {
// name: fileName,
// overwrite: true, // overwrite in case of conflict
// });
await file.moveToDisk(datasetFullPath, 'local', {
await fileData.moveToDisk(datasetFullPath, 'local', {
name: fileName,
overwrite: true, // overwrite in case of conflict
disk: 'local',
});
//save to db:
const { clientFileName, sortOrder } = this.extractVariableNameAndSortOrder(file.clientName);
const mimeType = file.headers['content-type'] || 'application/octet-stream'; // Fallback to a default MIME type
const { clientFileName, sortOrder } = this.extractVariableNameAndSortOrder(fileData.clientName);
const mimeType = fileData.headers['content-type'] || 'application/octet-stream'; // Fallback to a default MIME type
const newFile = await dataset
.useTransaction(trx)
.related('files')
.create({
pathName: `${datasetFolder}/${fileName}`,
fileSize: file.size,
fileSize: fileData.size,
mimeType,
label: clientFileName,
sortOrder: sortOrder || index,
@ -1446,24 +1101,16 @@ export default class DatasetController {
await dataset.useTransaction(trx).save();
await trx.commit();
console.log('Dataset has been updated successfully');
console.log('Dataset and related models created successfully');
session.flash('message', 'Dataset has been updated successfully');
// return response.redirect().toRoute('user.index');
return response.redirect().toRoute('dataset.edit', [dataset.id]);
} catch (error) {
// Clean up temporary files if validation or later steps fail
uploadedTmpFiles.forEach((tmpPath) => {
try {
fs.unlinkSync(tmpPath);
} catch (cleanupError) {
console.error('Error cleaning up temporary file:', cleanupError);
}
});
if (trx !== null) {
await trx.rollback();
}
console.error('Failed to update dataset and related models:', error);
console.error('Failed to create dataset and related models:', error);
// throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` });
throw error;
}
@ -1488,26 +1135,16 @@ export default class DatasetController {
}
}
public async delete({ request, inertia, response, session, auth }: HttpContext) {
public async delete({ request, inertia, response, session }: HttpContext) {
const id = request.param('id');
const user = auth.user;
// Check if user is authenticated
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
try {
// This will throw 404 if dataset doesn't exist OR user doesn't own it
const dataset = await Dataset.query()
.preload('user', (builder) => {
builder.select('id', 'login');
})
.where('id', id)
.where('account_id', user.id) // Only fetch if user owns it
.preload('files')
.firstOrFail();
const validStates = ['inprogress', 'rejected_editor'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
@ -1532,27 +1169,9 @@ export default class DatasetController {
}
}
public async deleteUpdate({ params, session, response, auth }: HttpContext) {
public async deleteUpdate({ params, session, response }: HttpContext) {
try {
const user = auth.user;
if (!user) {
return response.flash('You must be logged in to edit a dataset.', 'error').redirect().toRoute('app.login.show');
}
// This will throw 404 if dataset doesn't exist OR user doesn't own it
const dataset = await Dataset.query()
.where('id', params.id)
.where('account_id', user.id) // Only fetch if user owns it
.preload('files')
.firstOrFail();
// // Check if the authenticated user is the owner of the dataset
// if (dataset.account_id !== user.id) {
// return response
// .flash(`Unauthorized access. You are not the owner of dataset with id ${params.id}.`, 'error')
// .redirect()
// .toRoute('dataset.list');
// }
const dataset = await Dataset.query().where('id', params.id).preload('files').firstOrFail();
const validStates = ['inprogress', 'rejected_editor'];
if (validStates.includes(dataset.server_state)) {
@ -1617,7 +1236,6 @@ export default class DatasetController {
}
const collectionRoles = await CollectionRole.query()
.whereIn('name', ['ddc', 'ccs'])
.preload('collections', (coll: Collection) => {
// preloa only top level collection with noparent_id
coll.whereNull('parent_id').orderBy('number', 'asc');
@ -1657,7 +1275,7 @@ export default class DatasetController {
// This should be an array of collection ids.
const collections: number[] = request.input('collections', []);
// Synchronize the dataset collections using the transaction.
// Synchronize the dataset collections using the transaction.
await dataset.useTransaction(trx).related('collections').sync(collections);
// Commit the transaction.await trx.commit()

View file

@ -1,231 +0,0 @@
import DocumentXmlCache from '#models/DocumentXmlCache';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import Dataset from '#models/dataset';
import Strategy from './Strategy.js';
import { builder } from 'xmlbuilder2';
import logger from '@adonisjs/core/services/logger';
/**
* Configuration for XML serialization
*
* @interface XmlSerializationConfig
*/
export interface XmlSerializationConfig {
/** The dataset model to serialize */
model: Dataset;
/** DOM representation (if available) */
dom?: XMLBuilder;
/** Fields to exclude from serialization */
excludeFields: Array<string>;
/** Whether to exclude empty fields */
excludeEmpty: boolean;
/** Base URI for xlink:ref elements */
baseUri: string;
}
/**
* Options for controlling serialization behavior
*/
export interface SerializationOptions {
/** Enable XML caching */
enableCaching?: boolean;
/** Exclude empty fields from output */
excludeEmptyFields?: boolean;
/** Custom base URI */
baseUri?: string;
/** Fields to exclude */
excludeFields?: string[];
}
/**
* DatasetXmlSerializer
*
* Handles XML serialization of Dataset models with intelligent caching.
* Generates XML representations and manages cache lifecycle to optimize performance.
*
* @example
* ```typescript
* const serializer = new DatasetXmlSerializer(dataset);
* serializer.enableCaching();
* serializer.excludeEmptyFields();
*
* const xmlDocument = await serializer.toXmlDocument();
* ```
*/
export default class DatasetXmlSerializer {
private readonly config: XmlSerializationConfig;
private readonly strategy: Strategy;
private cache: DocumentXmlCache | null = null;
private cachingEnabled = false;
constructor(dataset: Dataset, options: SerializationOptions = {}) {
this.config = {
model: dataset,
excludeEmpty: options.excludeEmptyFields ?? false,
baseUri: options.baseUri ?? '',
excludeFields: options.excludeFields ?? [],
};
this.strategy = new Strategy({
excludeEmpty: options.excludeEmptyFields ?? false,
baseUri: options.baseUri ?? '',
excludeFields: options.excludeFields ?? [],
model: dataset,
});
if (options.enableCaching) {
this.cachingEnabled = true;
}
}
/**
* Enable caching for XML generation
* When enabled, generated XML is stored in database for faster retrieval
*/
public enableCaching(): this {
this.cachingEnabled = true;
return this;
}
/**
* Disable caching for XML generation
*/
public disableCaching(): this {
this.cachingEnabled = false;
return this;
}
set model(model: Dataset) {
this.config.model = model;
}
/**
* Configure to exclude empty fields from XML output
*/
public excludeEmptyFields(): this {
this.config.excludeEmpty = true;
return this;
}
/**
* Set the cache instance directly (useful when preloading)
* @param cache - The DocumentXmlCache instance
*/
public setCache(cache: DocumentXmlCache): this {
this.cache = cache;
return this;
}
/**
* Get the current cache instance
*/
public getCache(): DocumentXmlCache | null {
return this.cache;
}
/**
* Get DOM document with intelligent caching
* Returns cached version if valid, otherwise generates new document
*/
public async toXmlDocument(): Promise<XMLBuilder | null> {
const dataset = this.config.model;
// Try to get from cache first
let cachedDocument: XMLBuilder | null = await this.retrieveFromCache();
if (cachedDocument) {
logger.debug(`Using cached XML for dataset ${dataset.id}`);
return cachedDocument;
}
// Generate fresh document
logger.debug(`[DatasetXmlSerializer] Cache miss - generating fresh XML for dataset ${dataset.id}`);
const freshDocument = await this.strategy.createDomDocument();
if (!freshDocument) {
logger.error(`[DatasetXmlSerializer] Failed to generate XML for dataset ${dataset.id}`);
return null;
}
// Cache if caching is enabled
if (this.cachingEnabled) {
await this.persistToCache(freshDocument, dataset);
}
// Extract the dataset-specific node
return this.extractDatasetNode(freshDocument);
}
/**
* Generate XML string representation
* Convenience method that converts XMLBuilder to string
*/
public async toXmlString(): Promise<string | null> {
const document = await this.toXmlDocument();
return document ? document.end({ prettyPrint: false }) : null;
}
/**
* Persist generated XML document to cache
* Non-blocking - failures are logged but don't interrupt the flow
*/
private async persistToCache(domDocument: XMLBuilder, dataset: Dataset): Promise<void> {
try {
this.cache = this.cache || new DocumentXmlCache();
this.cache.document_id = dataset.id;
this.cache.xml_version = 1;
this.cache.server_date_modified = dataset.server_date_modified.toFormat('yyyy-MM-dd HH:mm:ss');
this.cache.xml_data = domDocument.end();
await this.cache.save();
logger.debug(`Cached XML for dataset ${dataset.id}`);
} catch (error) {
logger.error(`Failed to cache XML for dataset ${dataset.id}: ${error.message}`);
// Don't throw - caching failure shouldn't break the flow
}
}
/**
* Extract the Rdr_Dataset node from full document
*/
private extractDatasetNode(domDocument: XMLBuilder): XMLBuilder | null {
const node = domDocument.find((n) => n.node.nodeName === 'Rdr_Dataset', false, true)?.node;
if (node) {
return builder({ version: '1.0', encoding: 'UTF-8', standalone: true }, node);
}
return domDocument;
}
/**
* Attempt to retrieve valid cached XML document
* Returns null if cache doesn't exist or is stale
*/
private async retrieveFromCache(): Promise<XMLBuilder | null> {
const dataset: Dataset = this.config.model;
if (!this.cache) {
return null;
}
// Check if cache is still valid
const actuallyCached = await DocumentXmlCache.hasValidEntry(dataset.id, dataset.server_date_modified);
if (!actuallyCached) {
logger.debug(`Cache invalid for dataset ${dataset.id}`);
return null;
}
//cache is actual return cached document
try {
if (this.cache) {
return this.cache.getDomDocument();
} else {
return null;
}
} catch (error) {
logger.error(`Failed to retrieve cached document for dataset ${dataset.id}: ${error.message}`);
return null;
}
}
}

View file

@ -1,3 +1,6 @@
// import { Client } from 'guzzle';
// import { Log } from '@adonisjs/core/build/standalone';
// import { DoiInterface } from './interfaces/DoiInterface';
import DoiClientContract from '#app/Library/Doi/DoiClientContract';
import DoiClientException from '#app/exceptions/DoiClientException';
import { StatusCodes } from 'http-status-codes';
@ -9,14 +12,14 @@ export class DoiClient implements DoiClientContract {
public username: string;
public password: string;
public serviceUrl: string;
public apiUrl: string;
constructor() {
// const datacite_environment = process.env.DATACITE_ENVIRONMENT || 'debug';
this.username = process.env.DATACITE_USERNAME || '';
this.password = process.env.DATACITE_PASSWORD || '';
this.serviceUrl = process.env.DATACITE_SERVICE_URL || '';
this.apiUrl = process.env.DATACITE_API_URL || 'https://api.datacite.org';
// this.prefix = process.env.DATACITE_PREFIX || '';
// this.base_domain = process.env.BASE_DOMAIN || '';
if (this.username === '' || this.password === '' || this.serviceUrl === '') {
const message = 'issing configuration settings to properly initialize DOI client';
@ -87,240 +90,4 @@ export class DoiClient implements DoiClientContract {
throw new DoiClientException(error.response.status, error.response.data);
}
}
/**
* Retrieves DOI information from DataCite REST API
*
* @param doiValue The DOI identifier e.g. '10.5072/tethys.999'
* @returns Promise with DOI information or null if not found
*/
public async getDoiInfo(doiValue: string): Promise<any | null> {
try {
// Use configurable DataCite REST API URL
const dataciteApiUrl = `${this.apiUrl}/dois/${doiValue}`;
const response = await axios.get(dataciteApiUrl, {
headers: {
Accept: 'application/vnd.api+json',
},
});
if (response.status === 200 && response.data.data) {
return {
created: response.data.data.attributes.created,
registered: response.data.data.attributes.registered,
updated: response.data.data.attributes.updated,
published: response.data.data.attributes.published,
state: response.data.data.attributes.state,
url: response.data.data.attributes.url,
metadata: response.data.data.attributes,
};
}
} catch (error) {
if (error.response?.status === 404) {
logger.debug(`DOI ${doiValue} not found in DataCite`);
return null;
}
logger.debug(`DataCite REST API failed for ${doiValue}: ${error.message}`);
// Fallback to MDS API
return await this.getDoiInfoFromMds(doiValue);
}
return null;
}
/**
* Fallback method to get DOI info from MDS API
*
* @param doiValue The DOI identifier
* @returns Promise with basic DOI information or null
*/
private async getDoiInfoFromMds(doiValue: string): Promise<any | null> {
try {
const auth = {
username: this.username,
password: this.password,
};
// Get DOI URL
const doiResponse = await axios.get(`${this.serviceUrl}/doi/${doiValue}`, { auth });
if (doiResponse.status === 200) {
// Get metadata if available
try {
const metadataResponse = await axios.get(`${this.serviceUrl}/metadata/${doiValue}`, {
auth,
headers: {
Accept: 'application/xml',
},
});
return {
url: doiResponse.data.trim(),
metadata: metadataResponse.data,
created: new Date().toISOString(), // MDS doesn't provide creation dates
registered: new Date().toISOString(), // Use current time as fallback
source: 'mds',
};
} catch (metadataError) {
// Return basic info even if metadata fetch fails
return {
url: doiResponse.data.trim(),
created: new Date().toISOString(),
registered: new Date().toISOString(),
source: 'mds',
};
}
}
} catch (error) {
if (error.response?.status === 404) {
logger.debug(`DOI ${doiValue} not found in DataCite MDS`);
return null;
}
logger.debug(`DataCite MDS API failed for ${doiValue}: ${error.message}`);
}
return null;
}
/**
* Checks if a DOI exists in DataCite
*
* @param doiValue The DOI identifier
* @returns Promise<boolean> True if DOI exists
*/
public async doiExists(doiValue: string): Promise<boolean> {
const doiInfo = await this.getDoiInfo(doiValue);
return doiInfo !== null;
}
/**
* Gets the last modification date of a DOI
*
* @param doiValue The DOI identifier
* @returns Promise<Date | null> Last modification date or creation date if never updated, null if not found
*/
public async getDoiLastModified(doiValue: string): Promise<Date | null> {
const doiInfo = await this.getDoiInfo(doiValue);
if (doiInfo) {
// Use updated date if available, otherwise fall back to created/registered date
const dateToUse = doiInfo.updated || doiInfo.registered || doiInfo.created;
if (dateToUse) {
logger.debug(
`DOI ${doiValue}: Using ${doiInfo.updated ? 'updated' : doiInfo.registered ? 'registered' : 'created'} date: ${dateToUse}`,
);
return new Date(dateToUse);
}
}
return null;
}
/**
* Makes a DOI unfindable (registered but not discoverable)
* Note: DOIs cannot be deleted, only made unfindable
* await doiClient.makeDoiUnfindable('10.21388/tethys.231');
*
* @param doiValue The DOI identifier e.g. '10.5072/tethys.999'
* @returns Promise<AxiosResponse<any>> The http response
*/
public async makeDoiUnfindable(doiValue: string): Promise<AxiosResponse<any>> {
const auth = {
username: this.username,
password: this.password,
};
try {
// First, check if DOI exists
const exists = await this.doiExists(doiValue);
if (!exists) {
throw new DoiClientException(404, `DOI ${doiValue} not found`);
}
// Delete the DOI URL mapping to make it unfindable
// This removes the URL but keeps the metadata registered
const response = await axios.delete(`${this.serviceUrl}/doi/${doiValue}`, { auth });
// Response Codes for DELETE /doi/{doi}
// 200 OK: operation successful
// 401 Unauthorized: no login
// 403 Forbidden: login problem, quota exceeded
// 404 Not Found: DOI does not exist
if (response.status !== 200) {
const message = `Unexpected DataCite MDS response code ${response.status}`;
logger.error(message);
throw new DoiClientException(response.status, message);
}
logger.info(`DOI ${doiValue} successfully made unfindable`);
return response;
} catch (error) {
logger.error(`Failed to make DOI ${doiValue} unfindable: ${error.message}`);
if (error instanceof DoiClientException) {
throw error;
}
throw new DoiClientException(error.response?.status || 500, error.response?.data || error.message);
}
}
/**
* Makes a DOI findable again by re-registering the URL
* await doiClient.makeDoiFindable(
* '10.21388/tethys.231',
* 'https://doi.dev.tethys.at/10.21388/tethys.231'
* );
*
* @param doiValue The DOI identifier e.g. '10.5072/tethys.999'
* @param landingPageUrl The landing page URL
* @returns Promise<AxiosResponse<any>> The http response
*/
public async makeDoiFindable(doiValue: string, landingPageUrl: string): Promise<AxiosResponse<any>> {
const auth = {
username: this.username,
password: this.password,
};
try {
// Re-register the DOI with its URL to make it findable again
const response = await axios.put(`${this.serviceUrl}/doi/${doiValue}`, `doi=${doiValue}\nurl=${landingPageUrl}`, { auth });
// Response Codes for PUT /doi/{doi}
// 201 Created: operation successful
// 400 Bad Request: request body must be exactly two lines: DOI and URL
// 401 Unauthorized: no login
// 403 Forbidden: login problem, quota exceeded
// 412 Precondition failed: metadata must be uploaded first
if (response.status !== 201) {
const message = `Unexpected DataCite MDS response code ${response.status}`;
logger.error(message);
throw new DoiClientException(response.status, message);
}
logger.info(`DOI ${doiValue} successfully made findable again`);
return response;
} catch (error) {
logger.error(`Failed to make DOI ${doiValue} findable: ${error.message}`);
if (error instanceof DoiClientException) {
throw error;
}
throw new DoiClientException(error.response?.status || 500, error.response?.data || error.message);
}
}
/**
* Gets the current state of a DOI (draft, registered, findable)
* const state = await doiClient.getDoiState('10.21388/tethys.231');
* console.log(`Current state: ${state}`); // 'findable'
*
* @param doiValue The DOI identifier
* @returns Promise<string | null> The DOI state or null if not found
*/
public async getDoiState(doiValue: string): Promise<string | null> {
const doiInfo = await this.getDoiInfo(doiValue);
return doiInfo?.state || null;
}
}

View file

@ -2,7 +2,7 @@ import Dataset from '#models/dataset';
import { Client } from '@opensearch-project/opensearch';
import { create } from 'xmlbuilder2';
import SaxonJS from 'saxon-js';
import DatasetXmlSerializer from '#app/Library/DatasetXmlSerializer';
import XmlModel from '#app/Library/XmlModel';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import logger from '@adonisjs/core/services/logger';
import { readFileSync } from 'fs';
@ -72,42 +72,31 @@ export default {
}
},
/**
* Index a dataset document to OpenSearch/Elasticsearch
*/
async indexDocument(dataset: Dataset, index_name: string): Promise<void> {
try {
// Load XSLT transformation file
const xsltProc = readFileSync('public/assets2/solr.sef.json');
const proc = readFileSync('public/assets2/solr.sef.json');
const doc: string = await this.getTransformedString(dataset, proc);
// Transform dataset to JSON document
const jsonDoc: string = await this.getTransformedString(dataset, xsltProc);
const document = JSON.parse(jsonDoc);
// Index document to OpenSearch with doument json body
let document = JSON.parse(doc);
await this.client.index({
id: dataset.publish_id?.toString(),
index: index_name,
body: document,
refresh: true, // make immediately searchable
refresh: true,
});
logger.info(`Dataset ${dataset.publish_id} successfully indexed to ${index_name}`);
logger.info(`dataset with publish_id ${dataset.publish_id} successfully indexed`);
} catch (error) {
logger.error(`Failed to index dataset ${dataset.publish_id}: ${error.message}`);
throw error; // Re-throw to allow caller to handle
logger.error(`An error occurred while indexing datsaet with publish_id ${dataset.publish_id}.`);
}
},
/**
* Transform dataset XML to JSON using XSLT
*/
async getTransformedString(dataset: Dataset, proc: Buffer): Promise<string> {
// Generate XML string from dataset
const xmlString = await this.generateDatasetXml(dataset);
let xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
const datasetNode = xml.root().ele('Dataset');
await createXmlRecord(dataset, datasetNode);
const xmlString = xml.end({ prettyPrint: false });
try {
// Apply XSLT transformation
const result = await SaxonJS.transform({
stylesheetText: proc,
destination: 'serialized',
@ -119,18 +108,6 @@ export default {
return '';
}
},
/**
* Generate XML string from dataset model
*/
async generateDatasetXml(dataset: Dataset): Promise<string> {
const xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
const datasetNode = xml.root().ele('Dataset');
await createXmlRecord(dataset, datasetNode);
return xml.end({ prettyPrint: false });
},
};
/**
* Return the default global focus trap stack
@ -138,49 +115,74 @@ export default {
* @return {import('focus-trap').FocusTrap[]}
*/
/**
* Create complete XML record for dataset
* Handles caching and metadata enrichment
*/
// export const indexDocument = async (dataset: Dataset, index_name: string, proc: Buffer): Promise<void> => {
// try {
// const doc = await getJsonString(dataset, proc);
// let document = JSON.parse(doc);
// await client.index({
// id: dataset.publish_id?.toString(),
// index: index_name,
// body: document,
// refresh: true,
// });
// Logger.info(`dataset with publish_id ${dataset.publish_id} successfully indexed`);
// } catch (error) {
// Logger.error(`An error occurred while indexing datsaet with publish_id ${dataset.publish_id}.`);
// }
// };
// const getJsonString = async (dataset, proc): Promise<string> => {
// let xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
// const datasetNode = xml.root().ele('Dataset');
// await createXmlRecord(dataset, datasetNode);
// const xmlString = xml.end({ prettyPrint: false });
// try {
// const result = await transform({
// stylesheetText: proc,
// destination: 'serialized',
// sourceText: xmlString,
// });
// return result.principalResult;
// } catch (error) {
// Logger.error(`An error occurred while creating the user, error: ${error.message},`);
// return '';
// }
// };
const createXmlRecord = async (dataset: Dataset, datasetNode: XMLBuilder): Promise<void> => {
const domNode = await getDatasetXmlDomNode(dataset);
if (!domNode) {
throw new Error(`Failed to generate XML DOM node for dataset ${dataset.id}`);
}
// Enrich with landing page URL
if (dataset.publish_id) {
addLandingPageAttribute(domNode, dataset.publish_id.toString());
}
// Add data type specification
addSpecInformation(domNode, `data-type:${dataset.type}`);
// Add collection information
if (dataset.collections) {
for (const coll of dataset.collections) {
const collRole = coll.collectionRole;
addSpecInformation(domNode, `${collRole.oai_name}:${coll.number}`);
if (domNode) {
// add frontdoor url and data-type
dataset.publish_id && addLandingPageAttribute(domNode, dataset.publish_id.toString());
addSpecInformation(domNode, 'data-type:' + dataset.type);
if (dataset.collections) {
for (const coll of dataset.collections) {
const collRole = coll.collectionRole;
addSpecInformation(domNode, collRole.oai_name + ':' + coll.number);
}
}
}
datasetNode.import(domNode);
datasetNode.import(domNode);
}
};
const getDatasetXmlDomNode = async (dataset: Dataset): Promise<XMLBuilder | null> => {
const serializer = new DatasetXmlSerializer(dataset).enableCaching().excludeEmptyFields();
const xmlModel = new XmlModel(dataset);
// xmlModel.setModel(dataset);
// Load cache relationship if not already loaded
xmlModel.excludeEmptyFields();
xmlModel.caching = true;
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
// dataset.load('xmlCache');
await dataset.load('xmlCache');
if (dataset.xmlCache) {
serializer.setCache(dataset.xmlCache);
xmlModel.xmlCache = dataset.xmlCache;
}
// Generate or retrieve cached DOM document
const xmlDocument: XMLBuilder | null = await serializer.toXmlDocument();
return xmlDocument;
// return cache.getDomDocument();
const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
return domDocument;
};
const addLandingPageAttribute = (domNode: XMLBuilder, dataid: string) => {
@ -190,6 +192,6 @@ const addLandingPageAttribute = (domNode: XMLBuilder, dataid: string) => {
domNode.att('landingpage', url);
};
const addSpecInformation = (domNode: XMLBuilder, information: string) => {
const addSpecInformation= (domNode: XMLBuilder, information: string) => {
domNode.ele('SetSpec').att('Value', information);
};
};

129
app/Library/XmlModel.ts Normal file
View file

@ -0,0 +1,129 @@
import DocumentXmlCache from '#models/DocumentXmlCache';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import Dataset from '#models/dataset';
import Strategy from './Strategy.js';
import { DateTime } from 'luxon';
import { builder } from 'xmlbuilder2';
/**
* This is the description of the interface
*
* @interface Conf
* @member {Model} model holds the current dataset model
* @member {XMLBuilder} dom holds the current DOM representation
* @member {Array<string>} excludeFields List of fields to skip on serialization.
* @member {boolean} excludeEmpty True, if empty fields get excluded from serialization.
* @member {string} baseUri Base URI for xlink:ref elements
*/
export interface Conf {
model: Dataset;
dom?: XMLBuilder;
excludeFields: Array<string>;
excludeEmpty: boolean;
baseUri: string;
}
export default class XmlModel {
private config: Conf;
// private strategy = null;
private cache: DocumentXmlCache | null = null;
private _caching = false;
private strategy: Strategy;
constructor(dataset: Dataset) {
// $this->strategy = new Strategy();// Opus_Model_Xml_Version1;
// $this->config = new Conf();
// $this->strategy->setup($this->config);
this.config = {
excludeEmpty: false,
baseUri: '',
excludeFields: [],
model: dataset,
};
this.strategy = new Strategy({
excludeEmpty: true,
baseUri: '',
excludeFields: [],
model: dataset,
});
}
set model(model: Dataset) {
this.config.model = model;
}
public excludeEmptyFields(): void {
this.config.excludeEmpty = true;
}
get xmlCache(): DocumentXmlCache | null {
return this.cache;
}
set xmlCache(cache: DocumentXmlCache) {
this.cache = cache;
}
get caching(): boolean {
return this._caching;
}
set caching(caching: boolean) {
this._caching = caching;
}
public async getDomDocument(): Promise<XMLBuilder | null> {
const dataset = this.config.model;
let domDocument: XMLBuilder | null = await this.getDomDocumentFromXmlCache();
if (domDocument == null) {
domDocument = await this.strategy.createDomDocument();
// domDocument = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
if (this._caching) {
// caching is desired:
this.cache = this.cache || new DocumentXmlCache();
this.cache.document_id = dataset.id;
this.cache.xml_version = 1; // (int)$this->strategy->getVersion();
this.cache.server_date_modified = dataset.server_date_modified.toFormat('yyyy-MM-dd HH:mm:ss');
this.cache.xml_data = domDocument.end();
await this.cache.save();
}
const node = domDocument.find(
(n) => {
const test = n.node.nodeName == 'Rdr_Dataset';
return test;
},
false,
true,
)?.node;
if (node != undefined) {
domDocument = builder({ version: '1.0', encoding: 'UTF-8', standalone: true }, node);
}
}
return domDocument;
}
private async getDomDocumentFromXmlCache(): Promise<XMLBuilder | null> {
const dataset: Dataset = this.config.model;
if (!this.cache) {
return null;
}
//.toFormat('YYYY-MM-DD HH:mm:ss');
let date: DateTime = dataset.server_date_modified;
const actuallyCached: boolean = await DocumentXmlCache.hasValidEntry(dataset.id, date);
if (!actuallyCached) {
return null;
}
//cache is actual return it for oai:
try {
if (this.cache) {
return this.cache.getDomDocument();
} else {
return null;
}
} catch (error) {
return null;
}
}
}

View file

@ -1,54 +0,0 @@
// app/controllers/projects_controller.ts
import Project from '#models/project';
import type { HttpContext } from '@adonisjs/core/http';
import { createProjectValidator, updateProjectValidator } from '#validators/project';
export default class ProjectsController {
// GET /settings/projects
public async index({ inertia, auth }: HttpContext) {
const projects = await Project.all();
// return inertia.render('Admin/Project/Index', { projects });
return inertia.render('Admin/Project/Index', {
projects: projects,
can: {
edit: await auth.user?.can(['settings']),
create: await auth.user?.can(['settings']),
},
});
}
// GET /settings/projects/create
public async create({ inertia }: HttpContext) {
return inertia.render('Admin/Project/Create');
}
// POST /settings/projects
public async store({ request, response, session }: HttpContext) {
// Validate the request data
const data = await request.validateUsing(createProjectValidator);
await Project.create(data);
session.flash('success', 'Project created successfully');
return response.redirect().toRoute('settings.project.index');
}
// GET /settings/projects/:id/edit
public async edit({ params, inertia }: HttpContext) {
const project = await Project.findOrFail(params.id);
return inertia.render('Admin/Project/Edit', { project });
}
// PUT /settings/projects/:id
public async update({ params, request, response, session }: HttpContext) {
const project = await Project.findOrFail(params.id);
// Validate the request data
const data = await request.validateUsing(updateProjectValidator);
await project.merge(data).save();
session.flash('success', 'Project updated successfully');
return response.redirect().toRoute('settings.project.index');
}
}

View file

@ -1,43 +0,0 @@
// import { Exception } from '@adonisjs/core/exceptions'
import { HttpContext, ExceptionHandler } from '@adonisjs/core/http';
export default class DbHandlerException extends ExceptionHandler {
// constructor() {
// super(Logger)
// }
async handle(error: any, ctx: HttpContext) {
// Check for AggregateError type
if (error.type === 'AggregateError' && error.aggregateErrors) {
const dbErrors = error.aggregateErrors.some((err: any) => err.code === 'ECONNREFUSED' && err.port === 5432);
if (dbErrors) {
return ctx.response.status(503).json({
status: 'error',
message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
details: {
code: error.code,
type: error.type,
ports: error.aggregateErrors.map((err: any) => ({
port: err.port,
address: err.address,
})),
},
});
}
}
// Handle simple ECONNREFUSED errors
if (error.code === 'ECONNREFUSED') {
return ctx.response.status(503).json({
status: 'error',
message: 'Database connection failed. Please ensure PostgreSQL is running.',
code: error.code,
});
}
return super.handle(error, ctx);
}
static status = 500;
}

View file

@ -46,7 +46,6 @@ export default class HttpExceptionHandler extends ExceptionHandler {
// return view.render('./errors/server-error', { error });
// },
// };
protected statusPages: Record<StatusPageRange, StatusPageRenderer> = {
'404': (error, { inertia }) => {
return inertia.render('Errors/ServerError', {
@ -59,47 +58,9 @@ export default class HttpExceptionHandler extends ExceptionHandler {
return inertia.render('Errors/ServerError', {
error: error.message,
code: error.status,
});
},
// '500': (error, { inertia }) => {
// return inertia.render('Errors/postgres_error', {
// status: 'error',
// message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
// details: {
// code: error.code,
// type: error.status,
// ports: error.errors.map((err: any) => ({
// port: err.port,
// address: err.address,
// })),
// },
// });
// },
'500..599': (error, { inertia }) => {
if (error.code === 'ECONNREFUSED') {
const dbErrors = error.errors.some((err: any) => err.code === 'ECONNREFUSED' && err.port === 5432);
if (dbErrors) {
return inertia.render('Errors/postgres_error', {
status: 'error',
message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
details: {
code: error.code,
type: error.status,
ports: error.errors.map((err: any) => ({
port: err.port,
address: err.address,
})),
},
});
}
} else {
return inertia.render('Errors/ServerError', {
error: error.message,
code: error.status,
});
}
});
},
'500..599': (error, { inertia }) => inertia.render('Errors/ServerError', { error: error.message, code: error.status }),
};
// constructor() {
@ -107,7 +68,7 @@ export default class HttpExceptionHandler extends ExceptionHandler {
// }
public async handle(error: any, ctx: HttpContext) {
const { response, request, session, inertia } = ctx;
const { response, request, session } = ctx;
/**
* Handle failed authentication attempt
@ -121,47 +82,6 @@ export default class HttpExceptionHandler extends ExceptionHandler {
// return response.redirect('/dashboard');
// }
// Handle Axios errors
if (error.code === 'ECONNREFUSED') {
const dbErrors = error.errors.some((err: any) => err.code === 'ECONNREFUSED' && err.port === 5432);
if (dbErrors) {
// return ctx.response.status(503).json({
// status: 'error',
// message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
// details: {
// code: error.code,
// type: error.status,
// ports: error.errors.map((err: any) => ({
// port: err.port,
// address: err.address,
// })),
// },
// });
// return inertia.render('Errors/postgres_error', {
// status: 'error',
// message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
// details: {
// code: error.code,
// type: error.status,
// ports: error.errors.map((err: any) => ({
// port: err.port,
// address: err.address,
// })),
// },
// });
}
}
// Handle simple ECONNREFUSED errors
// if (error.code === 'ECONNREFUSED') {
// return ctx.response.status(503).json({
// status: 'error',
// message: 'Database connection failed. Please ensure PostgreSQL is running.',
// code: error.code,
// });
// }
// https://github.com/inertiajs/inertia-laravel/issues/56
// let test = response.getStatus(); //200
// let header = request.header('X-Inertia'); // true
@ -178,21 +98,12 @@ export default class HttpExceptionHandler extends ExceptionHandler {
// ->toResponse($request)
// ->setStatusCode($response->status());
}
// Handle simple ECONNREFUSED errors
// if (error.code === 'ECONNREFUSED') {
// return ctx.response.status(503).json({
// status: 'error',
// message: 'Database connection failed. Please ensure PostgreSQL is running.',
// code: error.code,
// });
// }
// Dynamically change the error templates based on the absence of X-Inertia header
// if (!ctx.request.header('X-Inertia')) {
// this.statusPages = {
// '401..403': (error, { inertia }) => inertia.render('Errors/ServerError', { error: error.message, code: error.status }),
// '404': (error, { inertia }) => inertia.render('Errors/ServerError', { error: error.message, code: error.status }),
// '500..599': (error, { inertia }) => inertia.render('Errors/ServerError', { error: error.message, code: error.status }),
// '401..403': (error, { view }) => view.render('./errors/unauthorized', { error }),
// '404': (error, { view }) => view.render('./errors/not-found', { error }),
// '500..599': (error, { view }) => view.render('./errors/server-error', { error }),
// };
// }

View file

@ -4,8 +4,7 @@ import { builder, create } from 'xmlbuilder2';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import db from '@adonisjs/lucid/services/db';
import { DateTime } from 'luxon';
import type { BelongsTo } from '@adonisjs/lucid/types/relations';
import logger from '@adonisjs/core/services/logger';
import type { BelongsTo } from "@adonisjs/lucid/types/relations";
export default class DocumentXmlCache extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();
@ -67,38 +66,33 @@ export default class DocumentXmlCache extends BaseModel {
}
/**
* Check if a valid (non-stale) cache entry exists
* Cache is valid only if it was created AFTER the dataset's last modification
* Check if a dataset in a specific xml version is already cached or not.
*
* @param datasetId - The dataset ID to check
* @param datasetServerDateModified - The dataset's last modification timestamp
* @returns true if valid cache exists, false otherwise
* @param mixed datasetId
* @param mixed serverDateModified
* @returns {Promise<boolean>} Returns true on cached hit else false.
*/
// public static async hasValidEntry(datasetId: number, datasetServerDateModified: DateTime): Promise<boolean> {
// // const formattedDate = dayjs(datasetServerDateModified).format('YYYY-MM-DD HH:mm:ss');
// const query = Database.from(this.table)
// .where('document_id', datasetId)
// .where('server_date_modified', '2023-08-17 16:51:03')
// .first();
// const row = await query;
// return !!row;
// }
// Assuming 'DocumentXmlCache' has a table with a 'server_date_modified' column in your database
public static async hasValidEntry(datasetId: number, datasetServerDateModified: DateTime): Promise<boolean> {
const serverDateModifiedString: string = datasetServerDateModified.toFormat('yyyy-MM-dd HH:mm:ss'); // Convert DateTime to ISO string
const row = await db
.from(this.table)
const query = db.from(this.table)
.where('document_id', datasetId)
.where('server_date_modified', '>', serverDateModifiedString) // Check if server_date_modified is newer or equal
.where('server_date_modified', '>=', serverDateModifiedString) // Check if server_date_modified is newer or equal
.first();
const isValid = !!row;
if (isValid) {
logger.debug(`Valid cache found for dataset ${datasetId}`);
} else {
logger.debug(`No valid cache for dataset ${datasetId} (dataset modified: ${serverDateModifiedString})`);
}
return isValid;
}
/**
* Invalidate (delete) cache entry
*/
public async invalidate(): Promise<void> {
await this.delete();
logger.debug(`Invalidated cache for document ${this.document_id}`);
const row = await query;
return !!row;
}
}

View file

@ -3,7 +3,7 @@ import { DateTime } from 'luxon';
import dayjs from 'dayjs';
import Dataset from './dataset.js';
import BaseModel from './base_model.js';
import type { ManyToMany } from '@adonisjs/lucid/types/relations';
import type { ManyToMany } from "@adonisjs/lucid/types/relations";
export default class Person extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();
@ -30,7 +30,7 @@ export default class Person extends BaseModel {
@column({})
public lastName: string;
@column({ columnName: 'identifier_orcid' })
@column({})
public identifierOrcid: string;
@column({})
@ -64,8 +64,9 @@ export default class Person extends BaseModel {
// return '2023-03-21 08:45:00';
// }
@computed({
serializeAs: 'dataset_count',
serializeAs: 'dataset_count',
})
public get datasetCount() {
const stock = this.$extras.datasets_count; //my pivot column name was "stock"
@ -78,16 +79,6 @@ export default class Person extends BaseModel {
return contributor_type;
}
@computed({ serializeAs: 'allow_email_contact' })
public get allowEmailContact() {
// If the datasets relation is missing or empty, return false instead of null.
if (!this.datasets || this.datasets.length === 0) {
return false;
}
// Otherwise return the pivot attribute from the first related dataset.
return this.datasets[0].$extras?.pivot_allow_email_contact;
}
@manyToMany(() => Dataset, {
pivotForeignKey: 'person_id',
pivotRelatedForeignKey: 'document_id',
@ -95,34 +86,4 @@ export default class Person extends BaseModel {
pivotColumns: ['role', 'sort_order', 'allow_email_contact'],
})
public datasets: ManyToMany<typeof Dataset>;
// public toJSON() {
// const json = super.toJSON();
// // Check if this person is loaded through a pivot relationship with sensitive roles
// const pivotRole = this.$extras?.pivot_role;
// if (pivotRole === 'author' || pivotRole === 'contributor') {
// // Remove sensitive information for public-facing roles
// delete json.email;
// // delete json.identifierOrcid;
// }
// return json;
// }
// @afterFind()
// public static async afterFindHook(person: Person) {
// if (person.$extras?.pivot_role === 'author' || person.$extras?.pivot_role === 'contributor') {
// person.email = undefined as any;
// }
// }
// @afterFetch()
// public static async afterFetchHook(persons: Person[]) {
// persons.forEach(person => {
// if (person.$extras?.pivot_role === 'author' || person.$extras?.pivot_role === 'contributor') {
// person.email = undefined as any;
// }
// });
// }
}

View file

@ -1,57 +0,0 @@
/**
* Qs module config
*/
type QueryStringConfig = {
depth?: number
allowPrototypes?: boolean
plainObjects?: boolean
parameterLimit?: number
arrayLimit?: number
ignoreQueryPrefix?: boolean
delimiter?: RegExp | string
allowDots?: boolean
charset?: 'utf-8' | 'iso-8859-1' | undefined
charsetSentinel?: boolean
interpretNumericEntities?: boolean
parseArrays?: boolean
comma?: boolean
}
/**
* Base config used by all types
*/
type BodyParserBaseConfig = {
encoding: string
limit: string | number
types: string[]
}
/**
* Body parser config for parsing JSON requests
*/
export type BodyParserJSONConfig = BodyParserBaseConfig & {
strict: boolean
convertEmptyStringsToNull: boolean
}
/**
* Parser config for parsing form data
*/
export type BodyParserFormConfig = BodyParserBaseConfig & {
queryString: QueryStringConfig
convertEmptyStringsToNull: boolean
}
/**
* Parser config for parsing raw body (untouched)
*/
export type BodyParserRawConfig = BodyParserBaseConfig
/**
* Body parser config for all supported form types
*/
export type BodyParserConfig = {
allowedMethods: string[]
json: BodyParserJSONConfig
form: BodyParserFormConfig
raw: BodyParserRawConfig
multipart: BodyParserMultipartConfig
}

View file

@ -89,11 +89,24 @@ export default class User extends compose(BaseModel, AuthFinder) {
@column({})
public avatar: string;
// @hasOne(() => TotpSecret, {
// foreignKey: 'user_id',
// })
// public totp_secret: HasOne<typeof TotpSecret>;
// @beforeSave()
// public static async hashPassword(user: User) {
// if (user.$dirty.password) {
// user.password = await hash.use('laravel').make(user.password);
// }
// }
public get isTwoFactorEnabled(): boolean {
return Boolean(this?.twoFactorSecret && this.state == TotpState.STATE_ENABLED);
// return Boolean(this.totp_secret?.twoFactorSecret);
}
@manyToMany(() => Role, {
pivotForeignKey: 'account_id',
pivotRelatedForeignKey: 'role_id',
@ -129,9 +142,7 @@ export default class User extends compose(BaseModel, AuthFinder) {
@beforeFind()
@beforeFetch()
public static preloadRoles(user: User) {
user.preload('roles', (builder) => {
builder.select(['id', 'name', 'display_name', 'description']);
});
user.preload('roles')
}
public async getBackupCodes(this: User): Promise<BackupCode[]> {

View file

@ -1,16 +1,3 @@
import { join, isAbsolute } from 'node:path';
import type { BodyParserConfig } from '#models/types';
import { createId } from '@paralleldrive/cuid2';
import { tmpdir } from 'node:os';
import config from '@adonisjs/core/services/config';
import Dataset from '#models/dataset';
import { TransactionClientContract } from '@adonisjs/lucid/types/database';
import Person from '#models/person';
interface Dictionary {
[index: string]: string;
}
export function sum(a: number, b: number): number {
return a + b;
}
@ -37,88 +24,3 @@ export function preg_match(regex: RegExp, str: string) {
const result: boolean = regex.test(str);
return result;
}
/**
* Returns the tmp path for storing the files temporarly
*/
export function getTmpPath(config: BodyParserConfig['multipart']): string {
if (typeof config.tmpFileName === 'function') {
const tmpPath = config.tmpFileName();
return isAbsolute(tmpPath) ? tmpPath : join(tmpdir(), tmpPath);
}
return join(tmpdir(), createId());
}
/**
* Returns config for a given type
*/
export function getConfigFor<K extends keyof BodyParserConfig>(type: K): BodyParserConfig[K] {
const bodyParserConfig: BodyParserConfig = config.get('bodyparser');
const configType = bodyParserConfig[type];
return configType;
}
export function parseBytesSize(size: string): number {
const units: Record<string, number> = {
kb: 1024,
mb: 1024 * 1024,
gb: 1024 * 1024 * 1024,
tb: 1024 * 1024 * 1024 * 1024,
};
const match = size.match(/^(\d+)(kb|mb|gb|tb)$/i); // Regex to match size format
if (!match) {
throw new Error('Invalid size format');
}
const [, value, unit] = match;
return parseInt(value) * units[unit.toLowerCase()];
}
// Helper function to format bytes as human-readable text
export function formatBytes(bytes: number): string {
if (bytes === 0) return '0 Bytes';
const k = 1024;
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
}
export async function savePersons(dataset: Dataset, persons: any[], role: string, trx: TransactionClientContract) {
for (const [key, person] of persons.entries()) {
const pivotData = {
role: role,
sort_order: key + 1,
allow_email_contact: false,
...extractPivotAttributes(person), // Merge pivot attributes here
};
if (person.id !== undefined) {
await dataset
.useTransaction(trx)
.related('persons')
.attach({
[person.id]: pivotData,
});
} else {
const dataPerson = new Person();
dataPerson.fill(person);
await dataset.useTransaction(trx).related('persons').save(dataPerson, false, pivotData);
}
}
}
// Helper function to extract pivot attributes from a person object
function extractPivotAttributes(person: any) {
const pivotAttributes: Dictionary = {};
for (const key in person) {
if (key.startsWith('pivot_')) {
// pivotAttributes[key] = person[key];
const cleanKey = key.replace('pivot_', ''); // Remove 'pivot_' prefix
pivotAttributes[cleanKey] = person[key];
}
}
return pivotAttributes;
}

View file

@ -40,8 +40,7 @@ export const createDatasetValidator = vine.compile(
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}),
)
// .minLength(2)
.arrayContainsTypes({ typeA: 'main', typeB: 'translated' }),
.minLength(1),
descriptions: vine
.array(
vine.object({
@ -55,8 +54,7 @@ export const createDatasetValidator = vine.compile(
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}),
)
// .minLength(1),
.arrayContainsTypes({ typeA: 'abstract', typeB: 'translated' }),
.minLength(1),
authors: vine
.array(
vine.object({
@ -67,9 +65,8 @@ export const createDatasetValidator = vine.compile(
.email()
.normalizeEmail()
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
first_name: vine.string().trim().minLength(3).maxLength(255).optional().requiredWhen('name_type', '=', 'Personal'),
first_name: vine.string().trim().minLength(3).maxLength(255),
last_name: vine.string().trim().minLength(3).maxLength(255),
identifier_orcid: vine.string().trim().maxLength(255).orcid().optional(),
}),
)
.minLength(1)
@ -84,10 +81,9 @@ export const createDatasetValidator = vine.compile(
.email()
.normalizeEmail()
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
first_name: vine.string().trim().minLength(3).maxLength(255).optional().requiredWhen('name_type', '=', 'Personal'),
first_name: vine.string().trim().minLength(3).maxLength(255),
last_name: vine.string().trim().minLength(3).maxLength(255),
pivot_contributor_type: vine.enum(Object.keys(ContributorTypes)),
identifier_orcid: vine.string().trim().maxLength(255).orcid().optional(),
}),
)
.distinct('email')
@ -191,8 +187,7 @@ export const updateDatasetValidator = vine.compile(
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}),
)
// .minLength(2)
.arrayContainsTypes({ typeA: 'main', typeB: 'translated' }),
.minLength(1),
descriptions: vine
.array(
vine.object({
@ -206,7 +201,7 @@ export const updateDatasetValidator = vine.compile(
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}),
)
.arrayContainsTypes({ typeA: 'abstract', typeB: 'translated' }),
.minLength(1),
authors: vine
.array(
vine.object({
@ -217,9 +212,8 @@ export const updateDatasetValidator = vine.compile(
.email()
.normalizeEmail()
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
first_name: vine.string().trim().minLength(3).maxLength(255).optional().requiredWhen('name_type', '=', 'Personal'),
first_name: vine.string().trim().minLength(3).maxLength(255),
last_name: vine.string().trim().minLength(3).maxLength(255),
identifier_orcid: vine.string().trim().maxLength(255).orcid().optional(),
}),
)
.minLength(1)
@ -234,9 +228,8 @@ export const updateDatasetValidator = vine.compile(
.email()
.normalizeEmail()
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
first_name: vine.string().trim().minLength(3).maxLength(255).optional().requiredWhen('name_type', '=', 'Personal'),
first_name: vine.string().trim().minLength(3).maxLength(255),
last_name: vine.string().trim().minLength(3).maxLength(255),
identifier_orcid: vine.string().trim().maxLength(255).orcid().optional(),
pivot_contributor_type: vine.enum(Object.keys(ContributorTypes)),
}),
)
@ -310,149 +303,21 @@ export const updateDatasetValidator = vine.compile(
.fileScan({ removeInfected: true }),
)
.dependentArrayMinLength({ dependentArray: 'fileInputs', min: 1 }),
fileInputs: vine
.array(
vine.object({
label: vine.string().trim().maxLength(100),
}),
)
.optional(),
fileInputs: vine.array(
vine.object({
label: vine.string().trim().maxLength(100),
//extnames: extensions,
}),
),
}),
);
export const updateEditorDatasetValidator = vine.compile(
vine.object({
// first step
language: vine
.string()
.trim()
.regex(/^[a-zA-Z0-9]+$/),
licenses: vine.array(vine.number()).minLength(1), // define at least one license for the new dataset
rights: vine.string().in(['true']),
// second step
type: vine.string().trim().minLength(3).maxLength(255),
creating_corporation: vine.string().trim().minLength(3).maxLength(255),
titles: vine
.array(
vine.object({
value: vine.string().trim().minLength(3).maxLength(255),
type: vine.enum(Object.values(TitleTypes)),
language: vine
.string()
.trim()
.minLength(2)
.maxLength(255)
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}),
)
// .minLength(2)
.arrayContainsTypes({ typeA: 'main', typeB: 'translated' }),
descriptions: vine
.array(
vine.object({
value: vine.string().trim().minLength(3).maxLength(2500),
type: vine.enum(Object.values(DescriptionTypes)),
language: vine
.string()
.trim()
.minLength(2)
.maxLength(255)
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}),
)
.arrayContainsTypes({ typeA: 'abstract', typeB: 'translated' }),
authors: vine
.array(
vine.object({
email: vine
.string()
.trim()
.maxLength(255)
.email()
.normalizeEmail()
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
first_name: vine.string().trim().minLength(3).maxLength(255).optional().requiredWhen('name_type', '=', 'Personal'),
last_name: vine.string().trim().minLength(3).maxLength(255),
identifier_orcid: vine.string().trim().maxLength(255).orcid().optional(),
}),
)
.minLength(1)
.distinct('email'),
contributors: vine
.array(
vine.object({
email: vine
.string()
.trim()
.maxLength(255)
.email()
.normalizeEmail()
.isUniquePerson({ table: 'persons', column: 'email', idField: 'id' }),
first_name: vine.string().trim().minLength(3).maxLength(255).optional().requiredWhen('name_type', '=', 'Personal'),
last_name: vine.string().trim().minLength(3).maxLength(255),
identifier_orcid: vine.string().trim().maxLength(255).orcid().optional(),
pivot_contributor_type: vine.enum(Object.keys(ContributorTypes)),
}),
)
.distinct('email')
.optional(),
// third step
project_id: vine.number().optional(),
// embargo_date: schema.date.optional({ format: 'yyyy-MM-dd' }, [rules.after(10, 'days')]),
embargo_date: vine
.date({
formats: ['YYYY-MM-DD'],
})
.afterOrEqual((_field) => {
return dayjs().add(10, 'day').format('YYYY-MM-DD');
})
.optional(),
coverage: vine.object({
x_min: vine.number(),
x_max: vine.number(),
y_min: vine.number(),
y_max: vine.number(),
elevation_absolut: vine.number().positive().optional(),
elevation_min: vine.number().positive().optional().requiredIfExists('elevation_max'),
elevation_max: vine.number().positive().optional().requiredIfExists('elevation_min'),
// type: vine.enum(Object.values(DescriptionTypes)),
depth_absolut: vine.number().negative().optional(),
depth_min: vine.number().negative().optional().requiredIfExists('depth_max'),
depth_max: vine.number().negative().optional().requiredIfExists('depth_min'),
time_abolute: vine.date({ formats: { utc: true } }).optional(),
time_min: vine
.date({ formats: { utc: true } })
.beforeField('time_max')
.optional()
.requiredIfExists('time_max'),
time_max: vine
.date({ formats: { utc: true } })
.afterField('time_min')
.optional()
.requiredIfExists('time_min'),
}),
references: vine
.array(
vine.object({
value: vine.string().trim().minLength(3).maxLength(255).validateReference({ typeField: 'type' }),
type: vine.enum(Object.values(ReferenceIdentifierTypes)),
relation: vine.enum(Object.values(RelationTypes)),
label: vine.string().trim().minLength(2).maxLength(255),
}),
)
.optional(),
subjects: vine
.array(
vine.object({
value: vine.string().trim().minLength(3).maxLength(255),
// pivot_contributor_type: vine.enum(Object.keys(ContributorTypes)),
language: vine.string().trim().minLength(2).maxLength(255),
}),
)
.minLength(3)
.distinct('value'),
}),
);
// files: schema.array([rules.minLength(1)]).members(
// schema.file({
// size: '512mb',
// extnames: ['jpg', 'gif', 'png', 'tif', 'pdf', 'zip', 'fgb', 'nc', 'qml', 'ovr', 'gpkg', 'gml', 'gpx', 'kml', 'kmz', 'json'],
// }),
// ),
let messagesProvider = new SimpleMessagesProvider({
'minLength': '{{ field }} must be at least {{ min }} characters long',
@ -504,10 +369,8 @@ let messagesProvider = new SimpleMessagesProvider({
'files.array.minLength': 'At least {{ min }} file upload is required.',
'files.*.size': 'file size is to big',
'files.*.extnames': 'file extension is not supported',
'embargo_date.date.afterOrEqual': `Embargo date must be on or after ${dayjs().add(10, 'day').format('DD.MM.YYYY')}`,
});
createDatasetValidator.messagesProvider = messagesProvider;
updateDatasetValidator.messagesProvider = messagesProvider;
updateEditorDatasetValidator.messagesProvider = messagesProvider;
// export default createDatasetValidator;

View file

@ -1,28 +0,0 @@
// app/validators/project.ts
import vine from '@vinejs/vine';
export const createProjectValidator = vine.compile(
vine.object({
label: vine.string().trim().minLength(1).maxLength(50) .regex(/^[a-z0-9-]+$/),
name: vine
.string()
.trim()
.minLength(3)
.maxLength(255)
.regex(/^[a-zA-Z0-9äöüßÄÖÜ\s-]+$/),
description: vine.string().trim().maxLength(255).minLength(5).optional(),
}),
);
export const updateProjectValidator = vine.compile(
vine.object({
// label is NOT included since it's readonly
name: vine
.string()
.trim()
.minLength(3)
.maxLength(255)
.regex(/^[a-zA-Z0-9äöüßÄÖÜ\s-]+$/),
description: vine.string().trim().maxLength(255).minLength(5).optional(),
}),
);

View file

@ -8,20 +8,20 @@ export const createRoleValidator = vine.compile(
vine.object({
name: vine
.string()
.isUnique({ table: 'roles', column: 'name' })
.trim()
.minLength(3)
.maxLength(255)
.isUnique({ table: 'roles', column: 'name' })
.regex(/^[a-zA-Z0-9]+$/), // Must be alphanumeric
.regex(/^[a-zA-Z0-9]+$/), //Must be alphanumeric with hyphens or underscores
display_name: vine
.string()
.isUnique({ table: 'roles', column: 'display_name' })
.trim()
.minLength(3)
.maxLength(255)
.isUnique({ table: 'roles', column: 'display_name' })
.regex(/^[a-zA-Z0-9]+$/),
description: vine.string().trim().escape().minLength(3).maxLength(255).optional(),
permissions: vine.array(vine.number()).minLength(1), // At least one permission required
permissions: vine.array(vine.number()).minLength(1), // define at least one permission for the new role
}),
);
@ -29,28 +29,21 @@ export const updateRoleValidator = vine.withMetaData<{ roleId: number }>().compi
vine.object({
name: vine
.string()
.trim()
.minLength(3)
.maxLength(255)
// .unique(async (db, value, field) => {
// const result = await db.from('roles').select('id').whereNot('id', field.meta.roleId).where('name', value).first();
// return result.length ? false : true;
// })
.isUnique({
table: 'roles',
column: 'name',
whereNot: (field) => field.meta.roleId,
})
.regex(/^[a-zA-Z0-9]+$/),
display_name: vine
.string()
.trim()
.minLength(3)
.maxLength(255)
.isUnique({
table: 'roles',
column: 'display_name',
whereNot: (field) => field.meta.roleId,
})
.regex(/^[a-zA-Z0-9]+$/),
.maxLength(255),
description: vine.string().trim().escape().minLength(3).maxLength(255).optional(),
permissions: vine.array(vine.number()).minLength(1), // At least one permission required
permissions: vine.array(vine.number()).minLength(1), // define at least one permission for the new role
}),
);

View file

@ -16,7 +16,7 @@ export const createUserValidator = vine.compile(
first_name: vine.string().trim().minLength(3).maxLength(255),
last_name: vine.string().trim().minLength(3).maxLength(255),
email: vine.string().maxLength(255).email().normalizeEmail().isUnique({ table: 'accounts', column: 'email' }),
new_password: vine.string().confirmed({ confirmationField: 'password_confirmation' }).trim().minLength(3).maxLength(60),
password: vine.string().confirmed().trim().minLength(3).maxLength(60),
roles: vine.array(vine.number()).minLength(1), // define at least one role for the new user
}),
);
@ -42,7 +42,7 @@ export const updateUserValidator = vine.withMetaData<{ objId: number }>().compil
.email()
.normalizeEmail()
.isUnique({ table: 'accounts', column: 'email', whereNot: (field) => field.meta.objId }),
new_password: vine.string().confirmed({ confirmationField: 'password_confirmation' }).trim().minLength(3).maxLength(60).optional(),
password: vine.string().confirmed().trim().minLength(3).maxLength(60).optional(),
roles: vine.array(vine.number()).minLength(1), // define at least one role for the new user
}),
);

View file

@ -5,23 +5,7 @@ LogSyslog no
LogVerbose yes
DatabaseDirectory /var/lib/clamav
LocalSocket /var/run/clamav/clamd.socket
# LocalSocketMode 666
# Optional: allow multiple threads
MaxThreads 20
# Disable TCP socket
# TCPSocket 0
# TCP port address.
# Default: no
# TCPSocket 3310
# TCP address.
# By default we bind to INADDR_ANY, probably not wise.
# Enable the following to provide some degree of protection
# from the outside world.
# Default: no
# TCPAddr 127.0.0.1
Foreground no
PidFile /var/run/clamav/clamd.pid
# LocalSocketGroup node # Changed from 'clamav'
# User node # Changed from 'clamav' - clamd runs as clamav user
LocalSocketGroup node
User node

View file

@ -1,482 +0,0 @@
/*
|--------------------------------------------------------------------------
| node ace make:command fix-dataset-cross-references
| DONE: create commands/fix_dataset_cross_references.ts
|--------------------------------------------------------------------------
*/
import { BaseCommand, flags } from '@adonisjs/core/ace';
import type { CommandOptions } from '@adonisjs/core/types/ace';
import { DateTime } from 'luxon';
import Dataset from '#models/dataset';
import DatasetReference from '#models/dataset_reference';
import AppConfig from '#models/appconfig';
// import env from '#start/env';
interface MissingCrossReference {
sourceDatasetId: number;
targetDatasetId: number;
sourcePublishId: number | null;
targetPublishId: number | null;
sourceDoi: string | null;
targetDoi: string | null;
referenceType: string;
relation: string;
doi: string | null;
reverseRelation: string;
sourceReferenceLabel: string | null;
}
export default class DetectMissingCrossReferences extends BaseCommand {
static commandName = 'detect:missing-cross-references';
static description = 'Detect missing bidirectional cross-references between versioned datasets';
public static needsApplication = true;
@flags.boolean({ alias: 'f', description: 'Fix missing cross-references automatically' })
public fix: boolean = false;
@flags.boolean({ alias: 'v', description: 'Verbose output' })
public verbose: boolean = false;
@flags.number({ alias: 'p', description: 'Filter by specific publish_id (source or target dataset)' })
public publish_id?: number;
// example: node ace detect:missing-cross-references --verbose -p 227 //if you want to filter by specific publish_id with details
// example: node ace detect:missing-cross-references --verbose
// example: node ace detect:missing-cross-references --fix -p 227 //if you want to filter by specific publish_id and fix it
// example: node ace detect:missing-cross-references
public static options: CommandOptions = {
startApp: true,
staysAlive: false,
};
// Define the allowed relations that we want to process
private readonly ALLOWED_RELATIONS = [
'IsNewVersionOf',
'IsPreviousVersionOf',
'IsVariantFormOf',
'IsOriginalFormOf',
'Continues',
'IsContinuedBy',
'HasPart',
'IsPartOf',
];
// private readonly ALLOWED_RELATIONS = ['IsPreviousVersionOf', 'IsOriginalFormOf'];
async run() {
this.logger.info('🔍 Detecting missing cross-references...');
this.logger.info(`📋 Processing only these relations: ${this.ALLOWED_RELATIONS.join(', ')}`);
if (this.publish_id) {
this.logger.info(`Filtering by publish_id: ${this.publish_id}`);
}
try {
const missingReferences = await this.findMissingCrossReferences();
// Store count in AppConfig if not fixing and count >= 1
if (!this.fix && missingReferences.length >= 1) {
await this.storeMissingCrossReferencesCount(missingReferences.length);
}
if (missingReferences.length === 0) {
const filterMsg = this.publish_id ? ` for publish_id ${this.publish_id}` : '';
this.logger.success(`All cross-references are properly linked for the specified relations${filterMsg}!`);
// Clear the count if no missing references
if (!this.fix) {
await this.storeMissingCrossReferencesCount(0);
}
return;
}
const filterMsg = this.publish_id ? ` (filtered by publish_id ${this.publish_id})` : '';
this.logger.warning(`Found ${missingReferences.length} missing cross-reference(s)${filterMsg}:`);
// Show brief list if not verbose mode
if (!this.verbose) {
for (const missing of missingReferences) {
const sourceDoi = missing.sourceDoi ? ` DOI: ${missing.sourceDoi}` : '';
const targetDoi = missing.targetDoi ? ` DOI: ${missing.targetDoi}` : '';
this.logger.info(
`Dataset ${missing.sourceDatasetId} (Publish ID: ${missing.sourcePublishId}${sourceDoi}) ${missing.relation} Dataset ${missing.targetDatasetId} (Publish ID: ${missing.targetPublishId}${targetDoi}) → missing reverse: ${missing.reverseRelation}`,
);
}
} else {
// Verbose mode - show detailed info
for (const missing of missingReferences) {
this.logger.info(
`Dataset ${missing.sourceDatasetId} references ${missing.targetDatasetId}, but reverse reference is missing`,
);
this.logger.info(` - Reference type: ${missing.referenceType}`);
this.logger.info(` - Relation: ${missing.relation}`);
this.logger.info(` - DOI: ${missing.doi}`);
}
}
if (this.fix) {
await this.fixMissingReferences(missingReferences);
// Clear the count after fixing
await this.storeMissingCrossReferencesCount(0);
this.logger.success('All missing cross-references have been fixed!');
} else {
if (this.verbose) {
this.printMissingReferencesList(missingReferences);
}
this.logger.info('💡 Run with --fix flag to automatically create missing cross-references');
if (this.publish_id) {
this.logger.info(`🎯 Currently filtering by publish_id: ${this.publish_id}`);
}
}
} catch (error) {
this.logger.error('Error detecting missing cross-references:', error);
process.exit(1);
}
}
private async storeMissingCrossReferencesCount(count: number): Promise<void> {
try {
await AppConfig.updateOrCreate(
{
appid: 'commands',
configkey: 'missing_cross_references_count',
},
{
configvalue: count.toString(),
},
);
this.logger.info(`📊 Stored missing cross-references count in database: ${count}`);
} catch (error) {
this.logger.error('Failed to store missing cross-references count:', error);
}
}
private async findMissingCrossReferences(): Promise<MissingCrossReference[]> {
const missingReferences: {
sourceDatasetId: number;
targetDatasetId: number;
sourcePublishId: number | null;
targetPublishId: number | null;
sourceDoi: string | null;
targetDoi: string | null;
referenceType: string;
relation: string;
doi: string | null;
reverseRelation: string;
sourceReferenceLabel: string | null;
}[] = [];
this.logger.info('📊 Querying dataset references...');
// Find all references that point to Tethys datasets (DOI or URL containing tethys DOI)
// Only from datasets that are published AND only for allowed relations
const tethysReferencesQuery = DatasetReference.query()
.whereIn('type', ['DOI', 'URL'])
.whereIn('relation', this.ALLOWED_RELATIONS) // Only process allowed relations
.where((query) => {
query.where('value', 'like', '%doi.org/10.24341/tethys.%').orWhere('value', 'like', '%tethys.at/dataset/%');
})
.preload('dataset', (datasetQuery) => {
datasetQuery.preload('identifier');
})
.whereHas('dataset', (datasetQuery) => {
datasetQuery.where('server_state', 'published');
});
if (typeof this.publish_id === 'number') {
tethysReferencesQuery.whereHas('dataset', (datasetQuery) => {
datasetQuery.where('publish_id', this.publish_id as number);
});
}
const tethysReferences = await tethysReferencesQuery.exec();
this.logger.info(`🔗 Found ${tethysReferences.length} Tethys references from published datasets (allowed relations only)`);
let processedCount = 0;
let skippedCount = 0;
for (const reference of tethysReferences) {
processedCount++;
// if (this.verbose && processedCount % 10 === 0) {
// this.logger.info(`📈 Processed ${processedCount}/${tethysReferences.length} references...`);
// }
// Double-check that this relation is in our allowed list (safety check)
if (!this.ALLOWED_RELATIONS.includes(reference.relation)) {
skippedCount++;
if (this.verbose) {
this.logger.info(`⏭️ Skipping relation "${reference.relation}" - not in allowed list`);
}
continue;
}
// Extract dataset publish_id from DOI or URL
// const targetDatasetPublish = this.extractDatasetPublishIdFromReference(reference.value);
// Extract DOI from reference URL
const doi = this.extractDoiFromReference(reference.value);
// if (!targetDatasetPublish) {
// if (this.verbose) {
// this.logger.warning(`Could not extract publish ID from: ${reference.value}`);
// }
// continue;
// }
if (!doi) {
if (this.verbose) {
this.logger.warning(`Could not extract DOI from: ${reference.value}`);
}
continue;
}
// // Check if target dataset exists and is published
// const targetDataset = await Dataset.query()
// .where('publish_id', targetDatasetPublish)
// .where('server_state', 'published')
// .preload('identifier')
// .first();
// Check if target dataset exists and is published by querying via identifier
const targetDataset = await Dataset.query()
.where('server_state', 'published')
.whereHas('identifier', (query) => {
query.where('value', doi);
})
.preload('identifier')
.first();
if (!targetDataset) {
if (this.verbose) {
this.logger.warning(`⚠️ Target dataset with publish_id ${doi} not found or not published`);
}
continue;
}
// Ensure we have a valid source dataset with proper preloading
if (!reference.dataset) {
this.logger.warning(`⚠️ Source dataset ${reference.document_id} not properly loaded, skipping...`);
continue;
}
// Check if reverse reference exists
const reverseReferenceExists = await this.checkReverseReferenceExists(
targetDataset.id,
reference.document_id,
reference.relation,
reference.dataset.identifier.value
);
if (!reverseReferenceExists) {
const reverseRelation = this.getReverseRelation(reference.relation);
if (reverseRelation) {
// Only add if we have a valid reverse relation
missingReferences.push({
sourceDatasetId: reference.document_id,
targetDatasetId: targetDataset.id,
sourcePublishId: reference.dataset.publish_id || null,
targetPublishId: targetDataset.publish_id || null,
referenceType: reference.type,
relation: reference.relation,
doi: reference.value,
reverseRelation: reverseRelation,
sourceDoi: reference.dataset.identifier ? reference.dataset.identifier.value : null,
targetDoi: targetDataset.identifier ? targetDataset.identifier.value : null,
sourceReferenceLabel: reference.label || null,
});
}
}
}
this.logger.info(`✅ Processed ${processedCount} references (${skippedCount} skipped due to relation filtering)`);
return missingReferences;
}
private extractDoiFromReference(reference: string): string | null {
// Match DOI pattern, with or without URL prefix
const doiPattern = /(?:https?:\/\/)?(?:doi\.org\/)?(.+)/i;
const match = reference.match(doiPattern);
if (match && match[1]) {
return match[1]; // Returns just "10.24341/tethys.99.2"
}
return null;
}
private extractDatasetPublishIdFromReference(value: string): number | null {
// Extract from DOI: https://doi.org/10.24341/tethys.107 -> 107
const doiMatch = value.match(/10\.24341\/tethys\.(\d+)/);
if (doiMatch) {
return parseInt(doiMatch[1]);
}
// Extract from URL: https://tethys.at/dataset/107 -> 107
const urlMatch = value.match(/tethys\.at\/dataset\/(\d+)/);
if (urlMatch) {
return parseInt(urlMatch[1]);
}
return null;
}
private async checkReverseReferenceExists(
targetDatasetId: number,
sourceDatasetId: number,
originalRelation: string,
sourceDatasetIdentifier: string | null,
): Promise<boolean> {
const reverseRelation = this.getReverseRelation(originalRelation);
if (!reverseRelation) {
return true; // If no reverse relation is defined, consider it as "exists" to skip processing
}
// Only check for reverse references where the source dataset is also published
const reverseReference = await DatasetReference.query()
// We don't filter by source document_id here to find any incoming reference from any published dataset
.where('document_id', targetDatasetId)
// .where('related_document_id', sourceDatasetId) // Ensure it's an incoming reference
.where('relation', reverseRelation)
.where('value', 'like', `%${sourceDatasetIdentifier}`) // Basic check to ensure it points back to source dataset
.first();
return !!reverseReference;
}
private getReverseRelation(relation: string): string | null {
const relationMap: Record<string, string> = {
IsNewVersionOf: 'IsPreviousVersionOf',
IsPreviousVersionOf: 'IsNewVersionOf',
IsVariantFormOf: 'IsOriginalFormOf',
IsOriginalFormOf: 'IsVariantFormOf',
Continues: 'IsContinuedBy',
IsContinuedBy: 'Continues',
HasPart: 'IsPartOf',
IsPartOf: 'HasPart',
};
// Only return reverse relation if it exists in our map, otherwise return null
return relationMap[relation] || null;
}
private printMissingReferencesList(missingReferences: MissingCrossReference[]) {
console.log('┌─────────────────────────────────────────────────────────────────────────────────┐');
console.log('│ MISSING CROSS-REFERENCES REPORT │');
console.log('│ (Published Datasets Only - Filtered Relations) │');
console.log('└─────────────────────────────────────────────────────────────────────────────────┘');
console.log();
missingReferences.forEach((missing, index) => {
console.log(
`${index + 1}. Dataset ${missing.sourceDatasetId} (Publish ID: ${missing.sourcePublishId} Identifier: ${missing.sourceDoi})
${missing.relation} Dataset ${missing.targetDatasetId} (Publish ID: ${missing.targetPublishId} Identifier: ${missing.targetDoi})`,
);
console.log(` ├─ Current relation: "${missing.relation}"`);
console.log(` ├─ Missing reverse relation: "${missing.reverseRelation}"`);
console.log(` ├─ Reference type: ${missing.referenceType}`);
console.log(` └─ DOI/URL: ${missing.doi}`);
console.log();
});
console.log('┌─────────────────────────────────────────────────────────────────────────────────┐');
console.log(`│ SUMMARY: ${missingReferences.length} missing reverse reference(s) detected │`);
console.log(`│ Processed relations: ${this.ALLOWED_RELATIONS.join(', ')}`);
console.log('└─────────────────────────────────────────────────────────────────────────────────┘');
}
private async fixMissingReferences(missingReferences: MissingCrossReference[]) {
this.logger.info('🔧 Creating missing cross-references in database...');
let fixedCount = 0;
let errorCount = 0;
for (const [index, missing] of missingReferences.entries()) {
try {
// Get both source and target datasets
const sourceDataset = await Dataset.query()
.where('id', missing.sourceDatasetId)
.where('server_state', 'published')
.preload('identifier')
.preload('titles') // Preload titles to get mainTitle
.first();
const targetDataset = await Dataset.query().where('id', missing.targetDatasetId).where('server_state', 'published').first();
if (!sourceDataset) {
this.logger.warning(`⚠️ Source dataset ${missing.sourceDatasetId} not found or not published, skipping...`);
errorCount++;
continue;
}
if (!targetDataset) {
this.logger.warning(`⚠️ Target dataset ${missing.targetDatasetId} not found or not published, skipping...`);
errorCount++;
continue;
}
// **NEW: Update the original reference if related_document_id is missing**
const originalReference = await DatasetReference.query()
.where('document_id', missing.sourceDatasetId)
.where('relation', missing.relation)
.where('value', 'like', `%${missing.targetDoi}%`)
.first();
if (originalReference && !originalReference.related_document_id) {
originalReference.related_document_id = missing.targetDatasetId;
await originalReference.save();
if (this.verbose) {
this.logger.info(`🔗 Updated original reference with related_document_id: ${missing.targetDatasetId}`);
}
}
// Create the reverse reference using the referenced_by relationship
// Example: If Dataset 297 IsNewVersionOf Dataset 144
// We create an incoming reference for Dataset 144 that shows Dataset 297 IsPreviousVersionOf it
const reverseReference = new DatasetReference();
// Don't set document_id - this creates an incoming reference via related_document_id
reverseReference.document_id = missing.targetDatasetId; //
reverseReference.related_document_id = missing.sourceDatasetId;
reverseReference.type = 'DOI';
reverseReference.relation = missing.reverseRelation;
// Use the source dataset's DOI for the value (what's being referenced)
if (sourceDataset.identifier?.value) {
reverseReference.value = `https://doi.org/${sourceDataset.identifier.value}`;
} else {
// Fallback to dataset URL if no DOI
reverseReference.value = `https://tethys.at/dataset/${sourceDataset.publish_id || missing.sourceDatasetId}`;
}
// Use the source dataset's main title for the label
//reverseReference.label = sourceDataset.mainTitle || `Dataset ${missing.sourceDatasetId}`;
// get label of forward reference
reverseReference.label = missing.sourceReferenceLabel || sourceDataset.mainTitle || `Dataset ${missing.sourceDatasetId}`;
// reverseReference.notes = `Auto-created by detect:missing-cross-references command on ${DateTime.now().toISO()} to fix missing bidirectional reference.`;
// Save the new reverse reference
// Also save 'server_date_modified' on target dataset to trigger any downstream updates (e.g. search index)
targetDataset.server_date_modified = DateTime.now();
await targetDataset.save();
await reverseReference.save();
fixedCount++;
if (this.verbose) {
this.logger.info(
`✅ [${index + 1}/${missingReferences.length}] Created reverse reference: Dataset ${missing.sourceDatasetId} -> ${missing.targetDatasetId} (${missing.reverseRelation})`,
);
} else if ((index + 1) % 10 === 0) {
this.logger.info(`📈 Fixed ${fixedCount}/${missingReferences.length} references...`);
}
} catch (error) {
this.logger.error(
`❌ Error creating reverse reference for datasets ${missing.targetDatasetId} -> ${missing.sourceDatasetId}:`,
error,
);
errorCount++;
}
}
this.logger.info(`📊 Fix completed: ${fixedCount} created, ${errorCount} errors`);
}
}

View file

@ -4,7 +4,7 @@
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import { create } from 'xmlbuilder2';
import Dataset from '#models/dataset';
import XmlModel from '#app/Library/DatasetXmlSerializer';
import XmlModel from '#app/Library/XmlModel';
import { readFileSync } from 'fs';
import SaxonJS from 'saxon-js';
import { Client } from '@opensearch-project/opensearch';
@ -12,8 +12,10 @@ import { getDomain } from '#app/utils/utility-functions';
import { BaseCommand, flags } from '@adonisjs/core/ace';
import { CommandOptions } from '@adonisjs/core/types/ace';
import env from '#start/env';
// import db from '@adonisjs/lucid/services/db';
// import { default as Dataset } from '#models/dataset';
import logger from '@adonisjs/core/services/logger';
import { DateTime } from 'luxon';
const opensearchNode = env.get('OPENSEARCH_HOST', 'localhost');
const client = new Client({ node: `${opensearchNode}` }); // replace with your OpenSearch endpoint
@ -28,10 +30,11 @@ export default class IndexDatasets extends BaseCommand {
public publish_id: number;
public static options: CommandOptions = {
startApp: true, // Ensures the IoC container is ready to use
staysAlive: false, // Command exits after running
startApp: true,
staysAlive: false,
};
async run() {
logger.debug('Hello world!');
// const { default: Dataset } = await import('#models/dataset');
@ -41,12 +44,10 @@ export default class IndexDatasets extends BaseCommand {
const index_name = 'tethys-records';
for (var dataset of datasets) {
const shouldUpdate = await this.shouldUpdateDataset(dataset, index_name);
if (shouldUpdate) {
await this.indexDocument(dataset, index_name, proc);
} else {
logger.info(`Dataset with publish_id ${dataset.publish_id} is up to date, skipping indexing`);
}
// Logger.info(`File publish_id ${dataset.publish_id}`);
// const jsonString = await this.getJsonString(dataset, proc);
// console.log(jsonString);
await this.indexDocument(dataset, index_name, proc);
}
}
@ -64,46 +65,6 @@ export default class IndexDatasets extends BaseCommand {
return await query.exec();
}
private async shouldUpdateDataset(dataset: Dataset, index_name: string): Promise<boolean> {
try {
// Check if publish_id exists before proceeding
if (!dataset.publish_id) {
// Return true to update since document doesn't exist in OpenSearch yet
return true;
}
// Get the existing document from OpenSearch
const response = await client.get({
index: index_name,
id: dataset.publish_id?.toString(),
});
const existingDoc = response.body._source;
// Compare server_date_modified
if (existingDoc && existingDoc.server_date_modified) {
// Convert Unix timestamp (seconds) to milliseconds for DateTime.fromMillis()
const existingModified = DateTime.fromMillis(Number(existingDoc.server_date_modified) * 1000);
const currentModified = dataset.server_date_modified;
// Only update if the dataset has been modified more recently
if (currentModified <= existingModified) {
return false;
}
}
return true;
} catch (error) {
// If document doesn't exist or other error, we should index it
if (error.statusCode === 404) {
logger.info(`Dataset with publish_id ${dataset.publish_id} not found in index, will create new document`);
return true;
}
logger.warn(`Error checking existing document for publish_id ${dataset.publish_id}: ${error.message}`);
return true; // Index anyway if we can't determine the status
}
}
private async indexDocument(dataset: Dataset, index_name: string, proc: Buffer): Promise<void> {
try {
const doc = await this.getJsonString(dataset, proc);
@ -117,8 +78,7 @@ export default class IndexDatasets extends BaseCommand {
});
logger.info(`dataset with publish_id ${dataset.publish_id} successfully indexed`);
} catch (error) {
logger.error(`An error occurred while indexing dataset with publish_id ${dataset.publish_id}.
Error: ${error.message}`);
logger.error(`An error occurred while indexing dataset with publish_id ${dataset.publish_id}.`);
}
}
@ -151,16 +111,19 @@ export default class IndexDatasets extends BaseCommand {
}
private async getDatasetXmlDomNode(dataset: Dataset): Promise<XMLBuilder | null> {
const serializer = new XmlModel(dataset).enableCaching().excludeEmptyFields();
const xmlModel = new XmlModel(dataset);
// xmlModel.setModel(dataset);
xmlModel.excludeEmptyFields();
xmlModel.caching = true;
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
// dataset.load('xmlCache');
if (dataset.xmlCache) {
serializer.setCache(dataset.xmlCache);
xmlModel.xmlCache = dataset.xmlCache;
}
// return cache.toXmlDocument();
const xmlDocument: XMLBuilder | null = await serializer.toXmlDocument();
return xmlDocument;
// return cache.getDomDocument();
const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
return domDocument;
}
private addSpecInformation(domNode: XMLBuilder, information: string) {

View file

@ -1,346 +0,0 @@
/*
|--------------------------------------------------------------------------
| node ace make:command list-updateable-datacite
| DONE: create commands/list_updeatable_datacite.ts
|--------------------------------------------------------------------------
*/
import { BaseCommand, flags } from '@adonisjs/core/ace';
import { CommandOptions } from '@adonisjs/core/types/ace';
import Dataset from '#models/dataset';
import { DoiClient } from '#app/Library/Doi/DoiClient';
import env from '#start/env';
import logger from '@adonisjs/core/services/logger';
import { DateTime } from 'luxon';
import pLimit from 'p-limit';
export default class ListUpdateableDatacite extends BaseCommand {
static commandName = 'list:updateable-datacite';
static description = 'List all datasets that need DataCite DOI updates';
public static needsApplication = true;
// private chunkSize = 100; // Set chunk size for pagination
@flags.boolean({ alias: 'v', description: 'Verbose output showing detailed information' })
public verbose: boolean = false;
@flags.boolean({ alias: 'c', description: 'Show only count of updatable datasets' })
public countOnly: boolean = false;
@flags.boolean({ alias: 'i', description: 'Show only publish IDs (useful for scripting)' })
public idsOnly: boolean = false;
@flags.number({ description: 'Chunk size for processing datasets (default: 50)' })
public chunkSize: number = 50;
//example: node ace list:updateable-datacite
//example: node ace list:updateable-datacite --verbose
//example: node ace list:updateable-datacite --count-only
//example: node ace list:updateable-datacite --ids-only
//example: node ace list:updateable-datacite --chunk-size 50
public static options: CommandOptions = {
startApp: true,
stayAlive: false,
};
async run() {
const prefix = env.get('DATACITE_PREFIX', '');
const base_domain = env.get('BASE_DOMAIN', '');
if (!prefix || !base_domain) {
logger.error('Missing DATACITE_PREFIX or BASE_DOMAIN environment variables');
return;
}
// Prevent conflicting flags
if ((this.verbose && this.countOnly) || (this.verbose && this.idsOnly)) {
logger.error('Flags --verbose cannot be combined with --count-only or --ids-only');
return;
}
const chunkSize = this.chunkSize || 50;
let page = 1;
let hasMoreDatasets = true;
let totalProcessed = 0;
const updatableDatasets: Dataset[] = [];
if (!this.countOnly && !this.idsOnly) {
logger.info(`Processing datasets in chunks of ${chunkSize}...`);
}
while (hasMoreDatasets) {
const datasets = await this.getDatasets(page, chunkSize);
if (datasets.length === 0) {
hasMoreDatasets = false;
break;
}
if (!this.countOnly && !this.idsOnly) {
logger.info(`Processing chunk ${page} (${datasets.length} datasets)...`);
}
const chunkUpdatableDatasets = await this.processChunk(datasets);
updatableDatasets.push(...chunkUpdatableDatasets);
totalProcessed += datasets.length;
page += 1;
if (datasets.length < chunkSize) {
hasMoreDatasets = false;
}
}
if (!this.countOnly && !this.idsOnly) {
logger.info(`Processed ${totalProcessed} datasets total, found ${updatableDatasets.length} that need updates`);
}
if (this.countOnly) {
console.log(updatableDatasets.length);
} else if (this.idsOnly) {
updatableDatasets.forEach((dataset) => console.log(dataset.publish_id));
} else if (this.verbose) {
await this.showVerboseOutput(updatableDatasets);
} else {
this.showSimpleOutput(updatableDatasets);
}
}
/**
* Processes a chunk of datasets to determine which ones need DataCite updates
*
* This method handles parallel processing of datasets within a chunk, providing
* efficient error handling and filtering of results.
*
* @param datasets - Array of Dataset objects to process
* @returns Promise<Dataset[]> - Array of datasets that need updates
*/
// private async processChunk(datasets: Dataset[]): Promise<Dataset[]> {
// // Process datasets in parallel using Promise.allSettled for better error handling
// //
// // Why Promise.allSettled vs Promise.all?
// // - Promise.all fails fast: if ANY promise rejects, the entire operation fails
// // - Promise.allSettled waits for ALL promises: some can fail, others succeed
// // - This is crucial for batch processing where we don't want one bad dataset
// // to stop processing of the entire chunk
// const results = await Promise.allSettled(
// datasets.map(async (dataset) => {
// try {
// // Check if this specific dataset needs a DataCite update
// const needsUpdate = await this.shouldUpdateDataset(dataset);
// // Return the dataset if it needs update, null if it doesn't
// // This creates a sparse array that we'll filter later
// return needsUpdate ? dataset : null;
// } catch (error) {
// // Error handling for individual dataset checks
// //
// // Log warnings only if we're not in silent modes (count-only or ids-only)
// // This prevents log spam when running automated scripts
// if (!this.countOnly && !this.idsOnly) {
// logger.warn(`Error checking dataset ${dataset.publish_id}: ${error.message}`);
// }
// // IMPORTANT DECISION: Return the dataset anyway if we can't determine status
// //
// // Why? It's safer to include a dataset that might not need updating
// // than to miss one that actually does need updating. This follows the
// // "fail-safe" principle - if we're unsure, err on the side of caution
// return dataset;
// }
// }),
// );
// // Filter and extract results from Promise.allSettled response
// //
// // Promise.allSettled returns an array of objects with this structure:
// // - { status: 'fulfilled', value: T } for successful promises
// // - { status: 'rejected', reason: Error } for failed promises
// //
// // We need to:
// // 1. Only get fulfilled results (rejected ones are already handled above)
// // 2. Filter out null values (datasets that don't need updates)
// // 3. Extract the actual Dataset objects from the wrapper
// return results
// .filter(
// (result): result is PromiseFulfilledResult<Dataset | null> =>
// // Type guard: only include fulfilled results that have actual values
// // This filters out:
// // - Rejected promises (shouldn't happen due to try/catch, but safety first)
// // - Fulfilled promises that returned null (datasets that don't need updates)
// result.status === 'fulfilled' && result.value !== null,
// )
// .map((result) => result.value!); // Extract the Dataset from the wrapper
// // The ! is safe here because we filtered out null values above
// }
private async processChunk(datasets: Dataset[]): Promise<Dataset[]> {
// Limit concurrency to avoid API flooding (e.g., max 5 at once)
const limit = pLimit(5);
const tasks = datasets.map((dataset) =>
limit(async () => {
try {
const needsUpdate = await this.shouldUpdateDataset(dataset);
return needsUpdate ? dataset : null;
} catch (error) {
if (!this.countOnly && !this.idsOnly) {
logger.warn(
`Error checking dataset ${dataset.publish_id}: ${
error instanceof Error ? error.message : JSON.stringify(error)
}`,
);
}
// Fail-safe: include dataset if uncertain
return dataset;
}
}),
);
const results = await Promise.allSettled(tasks);
return results
.filter((result): result is PromiseFulfilledResult<Dataset | null> => result.status === 'fulfilled' && result.value !== null)
.map((result) => result.value!);
}
private async getDatasets(page: number, chunkSize: number): Promise<Dataset[]> {
return await Dataset.query()
.orderBy('publish_id', 'asc')
.preload('identifier')
.preload('xmlCache')
.preload('titles')
.where('server_state', 'published')
.whereHas('identifier', (identifierQuery) => {
identifierQuery.where('type', 'doi');
})
.forPage(page, chunkSize); // Get files for the current page
}
private async shouldUpdateDataset(dataset: Dataset): Promise<boolean> {
try {
let doiIdentifier = dataset.identifier;
if (!doiIdentifier) {
await dataset.load('identifier');
doiIdentifier = dataset.identifier;
}
if (!doiIdentifier || doiIdentifier.type !== 'doi') {
return false;
}
const datasetModified =
dataset.server_date_modified instanceof DateTime
? dataset.server_date_modified
: DateTime.fromJSDate(dataset.server_date_modified);
if (!datasetModified) {
return true;
}
if (datasetModified > DateTime.now()) {
return false;
}
const doiClient = new DoiClient();
const DOI_CHECK_TIMEOUT = 300; // ms
const doiLastModified = await Promise.race([
doiClient.getDoiLastModified(doiIdentifier.value),
this.createTimeoutPromise(DOI_CHECK_TIMEOUT),
]).catch(() => null);
if (!doiLastModified) {
// If uncertain, better include dataset for update
return true;
}
const doiModified = DateTime.fromJSDate(doiLastModified);
if (datasetModified > doiModified) {
const diffInSeconds = Math.abs(datasetModified.diff(doiModified, 'seconds').seconds);
const toleranceSeconds = 600;
return diffInSeconds > toleranceSeconds;
}
return false;
} catch (error) {
return true; // safer: include dataset if unsure
}
}
/**
* Create a timeout promise for API calls
*/
private createTimeoutPromise(timeoutMs: number): Promise<never> {
return new Promise((_, reject) => {
setTimeout(() => reject(new Error(`API call timeout after ${timeoutMs}ms`)), timeoutMs);
});
}
private showSimpleOutput(updatableDatasets: Dataset[]): void {
if (updatableDatasets.length === 0) {
console.log('No datasets need DataCite updates.');
return;
}
console.log(`\nFound ${updatableDatasets.length} dataset(s) that need DataCite updates:\n`);
updatableDatasets.forEach((dataset) => {
console.log(`publish_id ${dataset.publish_id} needs update - ${dataset.mainTitle || 'Untitled'}`);
});
console.log(`\nTo update these datasets, run:`);
console.log(` node ace update:datacite`);
console.log(`\nOr update specific datasets:`);
console.log(` node ace update:datacite -p <publish_id>`);
}
private async showVerboseOutput(updatableDatasets: Dataset[]): Promise<void> {
if (updatableDatasets.length === 0) {
console.log('No datasets need DataCite updates.');
return;
}
console.log(`\nFound ${updatableDatasets.length} dataset(s) that need DataCite updates:\n`);
for (const dataset of updatableDatasets) {
await this.showDatasetDetails(dataset);
}
console.log(`\nSummary: ${updatableDatasets.length} datasets need updates`);
}
private async showDatasetDetails(dataset: Dataset): Promise<void> {
try {
let doiIdentifier = dataset.identifier;
if (!doiIdentifier) {
await dataset.load('identifier');
doiIdentifier = dataset.identifier;
}
const doiValue = doiIdentifier?.value || 'N/A';
const datasetModified = dataset.server_date_modified;
// Get DOI info from DataCite
const doiClient = new DoiClient();
const doiLastModified = await doiClient.getDoiLastModified(doiValue);
const doiState = await doiClient.getDoiState(doiValue);
console.log(`┌─ Dataset ${dataset.publish_id} ───────────────────────────────────────────────────────────────`);
console.log(`│ Title: ${dataset.mainTitle || 'Untitled'}`);
console.log(`│ DOI: ${doiValue}`);
console.log(`│ DOI State: ${doiState || 'Unknown'}`);
console.log(`│ Dataset Modified: ${datasetModified ? datasetModified.toISO() : 'N/A'}`);
console.log(`│ DOI Modified: ${doiLastModified ? DateTime.fromJSDate(doiLastModified).toISO() : 'N/A'}`);
console.log(`│ Status: NEEDS UPDATE`);
console.log(`└─────────────────────────────────────────────────────────────────────────────────────────────\n`);
} catch (error) {
console.log(`┌─ Dataset ${dataset.publish_id} ───────────────────────────────────────────────────────────────`);
console.log(`│ Title: ${dataset.mainTitle || 'Untitled'}`);
console.log(`│ DOI: ${dataset.identifier?.value || 'N/A'}`);
console.log(`│ Error: ${error.message}`);
console.log(`│ Status: NEEDS UPDATE (Error checking)`);
console.log(`└─────────────────────────────────────────────────────────────────────────────────────────────\n`);
}
}
}

View file

@ -1,266 +0,0 @@
/*
|--------------------------------------------------------------------------
| node ace make:command update-datacite
| DONE: create commands/update_datacite.ts
|--------------------------------------------------------------------------
*/
import { BaseCommand, flags } from '@adonisjs/core/ace';
import { CommandOptions } from '@adonisjs/core/types/ace';
import Dataset from '#models/dataset';
import { DoiClient } from '#app/Library/Doi/DoiClient';
import DoiClientException from '#app/exceptions/DoiClientException';
import Index from '#app/Library/Utils/Index';
import env from '#start/env';
import logger from '@adonisjs/core/services/logger';
import { DateTime } from 'luxon';
import { getDomain } from '#app/utils/utility-functions';
export default class UpdateDatacite extends BaseCommand {
static commandName = 'update:datacite';
static description = 'Update DataCite DOI records for published datasets';
public static needsApplication = true;
@flags.number({ alias: 'p', description: 'Specific publish_id to update' })
public publish_id: number;
@flags.boolean({ alias: 'f', description: 'Force update all records regardless of modification date' })
public force: boolean = false;
@flags.boolean({ alias: 'd', description: 'Dry run - show what would be updated without making changes' })
public dryRun: boolean = false;
@flags.boolean({ alias: 's', description: 'Show detailed stats for each dataset that needs updating' })
public stats: boolean = false;
//example: node ace update:datacite -p 123 --force --dry-run
public static options: CommandOptions = {
startApp: true, // Whether to boot the application before running the command
stayAlive: false, // Whether to keep the process alive after the command has executed
};
async run() {
logger.info('Starting DataCite update process...');
const prefix = env.get('DATACITE_PREFIX', '');
const base_domain = env.get('BASE_DOMAIN', '');
const apiUrl = env.get('DATACITE_API_URL', 'https://api.datacite.org');
if (!prefix || !base_domain) {
logger.error('Missing DATACITE_PREFIX or BASE_DOMAIN environment variables');
return;
}
logger.info(`Using DataCite API: ${apiUrl}`);
const datasets = await this.getDatasets();
logger.info(`Found ${datasets.length} datasets to process`);
let updated = 0;
let skipped = 0;
let errors = 0;
for (const dataset of datasets) {
try {
const shouldUpdate = this.force || (await this.shouldUpdateDataset(dataset));
if (this.stats) {
// Stats mode: show detailed information for datasets that need updating
if (shouldUpdate) {
await this.showDatasetStats(dataset);
updated++;
} else {
skipped++;
}
continue;
}
if (!shouldUpdate) {
logger.info(`Dataset ${dataset.publish_id}: Up to date, skipping`);
skipped++;
continue;
}
if (this.dryRun) {
logger.info(`Dataset ${dataset.publish_id}: Would update DataCite record (dry run)`);
updated++;
continue;
}
await this.updateDataciteRecord(dataset, prefix, base_domain);
logger.info(`Dataset ${dataset.publish_id}: Successfully updated DataCite record`);
updated++;
} catch (error) {
logger.error(`Dataset ${dataset.publish_id}: Failed to update - ${error.message}`);
errors++;
}
}
if (this.stats) {
logger.info(`\nDataCite Stats Summary: ${updated} datasets need updating, ${skipped} are up to date`);
} else {
logger.info(`DataCite update completed. Updated: ${updated}, Skipped: ${skipped}, Errors: ${errors}`);
}
}
private async getDatasets(): Promise<Dataset[]> {
const query = Dataset.query()
.preload('identifier')
.preload('xmlCache')
.where('server_state', 'published')
.whereHas('identifier', (identifierQuery) => {
identifierQuery.where('type', 'doi');
});
if (this.publish_id) {
query.where('publish_id', this.publish_id);
}
return await query.exec();
}
private async shouldUpdateDataset(dataset: Dataset): Promise<boolean> {
try {
let doiIdentifier = dataset.identifier;
if (!doiIdentifier) {
await dataset.load('identifier');
doiIdentifier = dataset.identifier;
}
if (!doiIdentifier || doiIdentifier.type !== 'doi') {
return false;
}
const datasetModified = dataset.server_date_modified;
const now = DateTime.now();
if (!datasetModified) {
return true; // Update if modification date is missing
}
if (datasetModified > now) {
return false; // Skip invalid future dates
}
// Check DataCite DOI modification date
const doiClient = new DoiClient();
const doiLastModified = await doiClient.getDoiLastModified(doiIdentifier.value);
if (!doiLastModified) {
return false; // not Update if we can't get DOI info
}
const doiModified = DateTime.fromJSDate(doiLastModified);
if (datasetModified > doiModified) {
// if dataset was modified after DOI creation
// Calculate the difference in seconds
const diffInSeconds = Math.abs(datasetModified.diff(doiModified, 'seconds').seconds);
// Define tolerance threshold (60 seconds = 1 minute)
const toleranceSeconds = 60;
// Only update if the difference is greater than the tolerance
// This prevents unnecessary updates for minor timestamp differences
return diffInSeconds > toleranceSeconds;
} else {
return false; // No update needed
}
} catch (error) {
return false; // not update if we can't determine status or other error
}
}
private async updateDataciteRecord(dataset: Dataset, prefix: string, base_domain: string): Promise<void> {
try {
// Get the DOI identifier (HasOne relationship)
let doiIdentifier = dataset.identifier;
if (!doiIdentifier) {
await dataset.load('identifier');
doiIdentifier = dataset.identifier;
}
if (!doiIdentifier || doiIdentifier.type !== 'doi') {
throw new Error('No DOI identifier found for dataset');
}
// Generate XML metadata
const xmlMeta = (await Index.getDoiRegisterString(dataset)) as string;
if (!xmlMeta) {
throw new Error('Failed to generate XML metadata');
}
// Construct DOI value and landing page URL
const doiValue = doiIdentifier.value; // Use existing DOI value
const landingPageUrl = `https://doi.${getDomain(base_domain)}/${doiValue}`;
// Update DataCite record
const doiClient = new DoiClient();
const dataciteResponse = await doiClient.registerDoi(doiValue, xmlMeta, landingPageUrl);
if (dataciteResponse?.status === 201) {
// // Update dataset modification date
// dataset.server_date_modified = DateTime.now();
// await dataset.save();
// // Update search index
// const index_name = 'tethys-records';
// await Index.indexDocument(dataset, index_name);
logger.debug(`Dataset ${dataset.publish_id}: DataCite record and search index updated successfully`);
} else {
throw new DoiClientException(
dataciteResponse?.status || 500,
`Unexpected DataCite response code: ${dataciteResponse?.status}`,
);
}
} catch (error) {
if (error instanceof DoiClientException) {
throw error;
}
throw new Error(`Failed to update DataCite record: ${error.message}`);
}
}
/**
* Shows detailed statistics for a dataset that needs updating
*/
private async showDatasetStats(dataset: Dataset): Promise<void> {
try {
let doiIdentifier = dataset.identifier;
if (!doiIdentifier) {
await dataset.load('identifier');
doiIdentifier = dataset.identifier;
}
const doiValue = doiIdentifier?.value || 'N/A';
const doiStatus = doiIdentifier?.status || 'N/A';
const datasetModified = dataset.server_date_modified;
// Get DOI info from DataCite
const doiClient = new DoiClient();
const doiLastModified = await doiClient.getDoiLastModified(doiValue);
const doiState = await doiClient.getDoiState(doiValue);
console.log(`
Dataset ${dataset.publish_id}
DOI Value: ${doiValue}
DOI Status (DB): ${doiStatus}
DOI State (DataCite): ${doiState || 'Unknown'}
Dataset Modified: ${datasetModified ? datasetModified.toISO() : 'N/A'}
DOI Modified: ${doiLastModified ? DateTime.fromJSDate(doiLastModified).toISO() : 'N/A'}
Needs Update: YES - Dataset newer than DOI
`);
} catch (error) {
console.log(`
Dataset ${dataset.publish_id}
DOI Value: ${dataset.identifier?.value || 'N/A'}
Error: ${error.message}
Needs Update: YES - Error checking status
`);
}
}
}

18
components.d.ts vendored
View file

@ -11,21 +11,3 @@ declare module '@vue/runtime-core' {
NInput: (typeof import('naive-ui'))['NInput'];
}
}
// types/leaflet-src-dom-DomEvent.d.ts
declare module 'leaflet/src/dom/DomEvent' {
export type DomEventHandler = (e?: any) => void;
// Attach event listeners. `obj` can be any DOM node or object with event handling.
export function on(obj: any, types: string, fn: DomEventHandler, context?: any): void;
// Detach event listeners.
export function off(obj: any, types: string, fn?: DomEventHandler, context?: any): void;
// Prevent default on native events
export function preventDefault(ev?: Event | undefined): void;
// Optional: other helpers you might need later
export function stopPropagation(ev?: Event | undefined): void;
export function stop(ev?: Event | undefined): void;
}

View file

@ -128,7 +128,7 @@ allowedMethods: ['POST', 'PUT', 'PATCH', 'DELETE'],
| projects/:id/file
| ```
*/
processManually: ['/submitter/dataset/submit', '/submitter/dataset/:id/update'],
processManually: [],
/*
|--------------------------------------------------------------------------
@ -185,8 +185,8 @@ allowedMethods: ['POST', 'PUT', 'PATCH', 'DELETE'],
| and fields data.
|
*/
limit: '513mb',
//limit: env.get('UPLOAD_LIMIT', '513mb'),
// limit: '20mb',
limit: env.get('UPLOAD_LIMIT', '513mb'),
/*
|--------------------------------------------------------------------------

View file

@ -16,7 +16,7 @@ const mailConfig = defineConfig({
host: env.get('SMTP_HOST', ''),
port: env.get('SMTP_PORT'),
secure: false,
ignoreTLS: true,
// ignoreTLS: true,
requireTLS: false,
/**

View file

@ -21,7 +21,6 @@ export enum ServerStates {
rejected_reviewer = 'rejected_reviewer',
rejected_editor = 'rejected_editor',
reviewed = 'reviewed',
rejected_to_reviewer = 'rejected_to_reviewer',
}
// for table dataset_titles

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -86,22 +86,3 @@ export default class Documents extends BaseSchema {
// CONSTRAINT documents_server_state_check CHECK (server_state::text = ANY (ARRAY['deleted'::character varying::text, 'inprogress'::character varying::text, 'published'::character varying::text, 'released'::character varying::text, 'editor_accepted'::character varying::text, 'approved'::character varying::text, 'rejected_reviewer'::character varying::text, 'rejected_editor'::character varying::text, 'reviewed'::character varying::text])),
// CONSTRAINT documents_type_check CHECK (type::text = ANY (ARRAY['analysisdata'::character varying::text, 'measurementdata'::character varying::text, 'monitoring'::character varying::text, 'remotesensing'::character varying::text, 'gis'::character varying::text, 'models'::character varying::text, 'mixedtype'::character varying::text]))
// )
// ALTER TABLE documents DROP CONSTRAINT documents_server_state_check;
// ALTER TABLE documents
// ADD CONSTRAINT documents_server_state_check CHECK (
// server_state::text = ANY (ARRAY[
// 'deleted',
// 'inprogress',
// 'published',
// 'released',
// 'editor_accepted',
// 'approved',
// 'rejected_reviewer',
// 'rejected_editor',
// 'reviewed',
// 'rejected_to_reviewer' -- new value added
// ]::text[])
// );

View file

@ -32,21 +32,3 @@ export default class CollectionsRoles extends BaseSchema {
// visible_oai boolean NOT NULL DEFAULT true,
// CONSTRAINT collections_roles_pkey PRIMARY KEY (id)
// )
// change to normal intzeger:
// ALTER TABLE collections_roles ALTER COLUMN id DROP DEFAULT;
// DROP SEQUENCE IF EXISTS collections_roles_id_seq;
// -- Step 1: Temporarily change one ID to a value not currently used
// UPDATE collections_roles SET id = 99 WHERE name = 'ccs';
// -- Step 2: Change 'ddc' ID to 2 (the old 'ccs' ID)
// UPDATE collections_roles SET id = 2 WHERE name = 'ddc';
// -- Step 3: Change the temporary ID (99) to 3 (the old 'ddc' ID)
// UPDATE collections_roles SET id = 3 WHERE name = 'ccs';
// UPDATE collections_roles SET id = 99 WHERE name = 'bk';
// UPDATE collections_roles SET id = 1 WHERE name = 'institutes';
// UPDATE collections_roles SET id = 4 WHERE name = 'pacs';
// UPDATE collections_roles SET id = 7 WHERE name = 'bk';

View file

@ -5,7 +5,7 @@ export default class Collections extends BaseSchema {
public async up() {
this.schema.createTable(this.tableName, (table) => {
table.increments('id');//.defaultTo("nextval('collections_id_seq')");
table.increments('id').defaultTo("nextval('collections_id_seq')");
table.integer('role_id').unsigned();
table
.foreign('role_id', 'collections_role_id_foreign')
@ -25,8 +25,6 @@ export default class Collections extends BaseSchema {
.onUpdate('CASCADE');
table.boolean('visible').notNullable().defaultTo(true);
table.boolean('visible_publish').notNullable().defaultTo(true);
table.integer('left_id').unsigned();
table.integer('right_id').unsigned();
});
}
@ -61,26 +59,3 @@ export default class Collections extends BaseSchema {
// change to normal intzeger:
// ALTER TABLE collections ALTER COLUMN id DROP DEFAULT;
// DROP SEQUENCE IF EXISTS collections_id_seq;
// ALTER TABLE collections
// ADD COLUMN left_id INTEGER;
// COMMENT ON COLUMN collections.left_id IS 'comment';
// ALTER TABLE collections
// ADD COLUMN right_id INTEGER;
// COMMENT ON COLUMN collections.right_id IS 'comment';
// -- Step 1: Drop the existing default
// ALTER TABLE collections
// ALTER COLUMN visible DROP DEFAULT,
// ALTER COLUMN visible_publish DROP DEFAULT;
// -- Step 2: Change column types with proper casting
// ALTER TABLE collections
// ALTER COLUMN visible TYPE smallint USING CASE WHEN visible THEN 1 ELSE 0 END,
// ALTER COLUMN visible_publish TYPE smallint USING CASE WHEN visible_publish THEN 1 ELSE 0 END;
// -- Step 3: Set new defaults as smallint
// ALTER TABLE collections
// ALTER COLUMN visible SET DEFAULT 1,
// ALTER COLUMN visible_publish SET DEFAULT 1;

View file

@ -1,74 +1,47 @@
#!/bin/bash
# # Run freshclam to update virus definitions
# freshclam
# # Sleep for a few seconds to give ClamAV time to start
# sleep 5
# # Start the ClamAV daemon
# /etc/init.d/clamav-daemon start
# bootstrap clam av service and clam av database updater
set -m
echo "Starting ClamAV services..."
function process_file() {
if [[ ! -z "$1" ]]; then
local SETTING_LIST=$(echo "$1" | tr ',' '\n' | grep "^[A-Za-z][A-Za-z]*=.*$")
local SETTING
for SETTING in ${SETTING_LIST}; do
# Remove any existing copies of this setting. We do this here so that
# settings with multiple values (e.g. ExtraDatabase) can still be added
# multiple times below
local KEY=${SETTING%%=*}
sed -i $2 -e "/^${KEY} /d"
done
for SETTING in ${SETTING_LIST}; do
# Split on first '='
local KEY=${SETTING%%=*}
local VALUE=${SETTING#*=}
echo "${KEY} ${VALUE}" >> "$2"
done
fi
}
# Try to download database if missing
# if [ ! "$(ls -A /var/lib/clamav 2>/dev/null)" ]; then
# echo "Downloading ClamAV database (this may take a while)..."
# # Simple freshclam run without complex config
# if freshclam --datadir=/var/lib/clamav --quiet; then
# echo "✓ Database downloaded successfully"
# else
# echo "⚠ Database download failed - creating minimal setup"
# # Create a dummy file so clamd doesn't immediately fail
# touch /var/lib/clamav/.dummy
# fi
# fi
# Start freshclam daemon for automatic updates
echo "Starting freshclam daemon for automatic updates..."
# sg clamav -c "freshclam -d" &
# Added --daemon-notify to freshclam - This notifies clamd when the database updates
freshclam -d --daemon-notify=/etc/clamav/clamd.conf &
#freshclam -d &
# process_file "${CLAMD_SETTINGS_CSV}" /etc/clamav/clamd.conf
# process_file "${FRESHCLAM_SETTINGS_CSV}" /etc/clamav/freshclam.conf
# start in background
freshclam -d &
# /etc/init.d/clamav-freshclam start &
# Start clamd in background
# Start clamd in foreground (so dumb-init can supervise it)
clamd
# /etc/init.d/clamav-daemon start &
# Give freshclam a moment to start
sleep 2
# Start clamd daemon in background using sg
echo "Starting ClamAV daemon..."
# sg clamav -c "clamd" &
# Use sg to run clamd with proper group permissions
# sg clamav -c "clamd" &
# clamd --config-file=/etc/clamav/clamd.conf &
clamd &
# Give services time to start
echo "Waiting for services to initialize..."
sleep 8
# simple check
if pgrep clamd > /dev/null; then
echo "✓ ClamAV daemon is running"
else
echo "⚠ ClamAV daemon status uncertain, but continuing..."
fi
# Check if freshclam daemon is running
if pgrep freshclam > /dev/null; then
echo "✓ Freshclam daemon is running"
else
echo "⚠ Freshclam daemon status uncertain, but continuing..."
fi
# # Optional: Test socket connectivity
# if [ -S /var/run/clamav/clamd.socket ]; then
# echo "✓ ClamAV socket exists"
# else
# echo "⚠ WARNING: ClamAV socket not found - services may still be starting"
# fi
# # change back to CMD of dockerfile
echo "✓ ClamAV setup complete"
echo "Starting main application..."
# exec dumb-init -- "$@"
# change back to CMD of dockerfile
exec "$@"

View file

@ -1,278 +0,0 @@
# Dataset Indexing Command
AdonisJS Ace command for indexing and synchronizing published datasets with OpenSearch for search functionality.
## Overview
The `index:datasets` command processes published datasets and creates/updates corresponding search index documents in OpenSearch. It intelligently compares modification timestamps to only re-index datasets when necessary, optimizing performance while maintaining search index accuracy.
## Command Syntax
```bash
node ace index:datasets [options]
```
## Options
| Flag | Alias | Description |
|------|-------|-------------|
| `--publish_id <number>` | `-p` | Index a specific dataset by publish_id |
## Usage Examples
### Basic Operations
```bash
# Index all published datasets that have been modified since last indexing
node ace index:datasets
# Index a specific dataset by publish_id
node ace index:datasets --publish_id 231
node ace index:datasets -p 231
```
## How It Works
### 1. **Dataset Selection**
The command processes datasets that meet these criteria:
- `server_state = 'published'` - Only published datasets
- Has preloaded `xmlCache` relationship for metadata transformation
- Optionally filtered by specific `publish_id`
### 2. **Smart Update Detection**
For each dataset, the command:
- Checks if the dataset exists in the OpenSearch index
- Compares `server_date_modified` timestamps
- Only re-indexes if the dataset is newer than the indexed version
### 3. **Document Processing**
The indexing process involves:
1. **XML Generation**: Creates structured XML from dataset metadata
2. **XSLT Transformation**: Converts XML to JSON using Saxon-JS processor
3. **Index Update**: Updates or creates the document in OpenSearch
4. **Logging**: Records success/failure for each operation
## Index Structure
### Index Configuration
- **Index Name**: `tethys-records`
- **Document ID**: Dataset `publish_id`
- **Refresh**: `true` (immediate availability)
### Document Fields
The indexed documents contain:
- **Metadata Fields**: Title, description, authors, keywords
- **Identifiers**: DOI, publish_id, and other identifiers
- **Temporal Data**: Publication dates, coverage periods
- **Geographic Data**: Spatial coverage information
- **Technical Details**: Data formats, access information
- **Timestamps**: Creation and modification dates
## Example Output
### Successful Run
```bash
node ace index:datasets
```
```
Found 150 published datasets to process
Dataset with publish_id 231 successfully indexed
Dataset with publish_id 245 is up to date, skipping indexing
Dataset with publish_id 267 successfully indexed
An error occurred while indexing dataset with publish_id 289. Error: Invalid XML metadata
Processing completed: 148 indexed, 1 skipped, 1 error
```
### Specific Dataset
```bash
node ace index:datasets --publish_id 231
```
```
Found 1 published dataset to process
Dataset with publish_id 231 successfully indexed
Processing completed: 1 indexed, 0 skipped, 0 errors
```
## Update Logic
The command uses intelligent indexing to avoid unnecessary processing:
| Condition | Action | Reason |
|-----------|--------|--------|
| Dataset not in index | ✅ Index | New dataset needs indexing |
| Dataset newer than indexed version | ✅ Re-index | Dataset has been updated |
| Dataset same/older than indexed version | ❌ Skip | Already up to date |
| OpenSearch document check fails | ✅ Index | Better safe than sorry |
| Invalid XML metadata | ❌ Skip + Log Error | Cannot process invalid data |
### Timestamp Comparison
```typescript
// Example comparison logic
const existingModified = DateTime.fromMillis(Number(existingDoc.server_date_modified) * 1000);
const currentModified = dataset.server_date_modified;
if (currentModified <= existingModified) {
// Skip - already up to date
return false;
}
// Proceed with indexing
```
## XML Transformation Process
### 1. **XML Generation**
```xml
<?xml version="1.0" encoding="UTF-8" standalone="true"?>
<root>
<Dataset>
<!-- Dataset metadata fields -->
<title>Research Dataset Title</title>
<description>Dataset description...</description>
<!-- Additional metadata -->
</Dataset>
</root>
```
### 2. **XSLT Processing**
The command uses Saxon-JS with a compiled stylesheet (`solr.sef.json`) to transform XML to JSON:
```javascript
const result = await SaxonJS.transform({
stylesheetText: proc,
destination: 'serialized',
sourceText: xmlString,
});
```
### 3. **Final JSON Document**
```json
{
"id": "231",
"title": "Research Dataset Title",
"description": "Dataset description...",
"authors": ["Author Name"],
"server_date_modified": 1634567890,
"publish_id": 231
}
```
## Configuration Requirements
### Environment Variables
```bash
# OpenSearch Configuration
OPENSEARCH_HOST=localhost:9200
# For production:
# OPENSEARCH_HOST=your-opensearch-cluster:9200
```
### Required Files
- **XSLT Stylesheet**: `public/assets2/solr.sef.json` - Compiled Saxon-JS stylesheet for XML transformation
### Database Relationships
The command expects these model relationships:
```typescript
// Dataset model must have:
@hasOne(() => XmlCache, { foreignKey: 'dataset_id' })
public xmlCache: HasOne<typeof XmlCache>
```
## Error Handling
The command handles various error scenarios gracefully:
### Common Errors and Solutions
| Error | Cause | Solution |
|-------|-------|----------|
| `XSLT transformation failed` | Invalid XML or missing stylesheet | Check XML structure and stylesheet path |
| `OpenSearch connection error` | Service unavailable | Verify OpenSearch is running and accessible |
| `JSON parse error` | Malformed transformation result | Check XSLT stylesheet output format |
| `Missing xmlCache relationship` | Data integrity issue | Ensure xmlCache exists for dataset |
### Error Logging
```bash
# Typical error log entry
An error occurred while indexing dataset with publish_id 231.
Error: XSLT transformation failed: Invalid XML structure at line 15
```
## Performance Considerations
### Batch Processing
- Processes datasets sequentially to avoid overwhelming OpenSearch
- Each dataset is committed individually for reliability
- Failed indexing of one dataset doesn't stop processing others
### Resource Usage
- **Memory**: XML/JSON transformations require temporary memory
- **Network**: OpenSearch API calls for each dataset
- **CPU**: XSLT transformations are CPU-intensive
### Optimization Tips
```bash
# Index only recently modified datasets (run regularly)
node ace index:datasets
# Index specific datasets when needed
node ace index:datasets --publish_id 231
# Consider running during off-peak hours for large batches
```
## Integration with Other Systems
### Search Functionality
The indexed documents power:
- **Dataset Search**: Full-text search across metadata
- **Faceted Browsing**: Filter by authors, keywords, dates
- **Geographic Search**: Spatial query capabilities
- **Auto-complete**: Suggest dataset titles and keywords
### Related Commands
- [`update:datacite`](update-datacite.md) - Often run after indexing to sync DOI metadata
- **Database migrations** - May require re-indexing after schema changes
### API Integration
The indexed data is consumed by:
- **Search API**: `/api/search` endpoints
- **Browse API**: `/api/datasets` with filtering
- **Recommendations**: Related dataset suggestions
## Monitoring and Maintenance
### Regular Tasks
```bash
# Daily indexing (recommended cron job)
0 2 * * * cd /path/to/project && node ace index:datasets
# Weekly full re-index (if needed)
0 3 * * 0 cd /path/to/project && node ace index:datasets --force
```
### Health Checks
- Monitor OpenSearch cluster health
- Check for failed indexing operations in logs
- Verify search functionality is working
- Compare dataset counts between database and index
### Troubleshooting
```bash
# Check specific dataset indexing
node ace index:datasets --publish_id 231
# Verify OpenSearch connectivity
curl -X GET "localhost:9200/_cluster/health"
# Check index statistics
curl -X GET "localhost:9200/tethys-records/_stats"
```
## Best Practices
1. **Regular Scheduling**: Run the command regularly (daily) to keep the search index current
2. **Monitor Logs**: Watch for transformation errors or OpenSearch issues
3. **Backup Strategy**: Include OpenSearch indices in backup procedures
4. **Resource Management**: Monitor OpenSearch cluster resources during bulk operations
5. **Testing**: Verify search functionality after major indexing operations
6. **Coordination**: Run indexing before DataCite updates when both are needed

View file

@ -1,216 +0,0 @@
# DataCite Update Command
AdonisJS Ace command for updating DataCite DOI records for published datasets.
## Overview
The `update:datacite` command synchronizes your local dataset metadata with DataCite DOI records. It intelligently compares modification dates to only update records when necessary, reducing unnecessary API calls and maintaining data consistency.
## Command Syntax
```bash
node ace update:datacite [options]
```
## Options
| Flag | Alias | Description |
|------|-------|-------------|
| `--publish_id <number>` | `-p` | Update a specific dataset by publish_id |
| `--force` | `-f` | Force update all records regardless of modification date |
| `--dry-run` | `-d` | Preview what would be updated without making changes |
| `--stats` | `-s` | Show detailed statistics for datasets that need updating |
## Usage Examples
### Basic Operations
```bash
# Update all datasets that have been modified since their DOI was last updated
node ace update:datacite
# Update a specific dataset
node ace update:datacite --publish_id 231
node ace update:datacite -p 231
# Force update all datasets with DOIs (ignores modification dates)
node ace update:datacite --force
```
### Preview and Analysis
```bash
# Preview what would be updated (dry run)
node ace update:datacite --dry-run
# Show detailed statistics for datasets that need updating
node ace update:datacite --stats
# Show stats for a specific dataset
node ace update:datacite --stats --publish_id 231
```
### Combined Options
```bash
# Dry run for a specific dataset
node ace update:datacite --dry-run --publish_id 231
# Show stats for all datasets (including up-to-date ones)
node ace update:datacite --stats --force
```
## Command Modes
### 1. **Normal Mode** (Default)
Updates DataCite records for datasets that have been modified since their DOI was last updated.
**Example Output:**
```
Using DataCite API: https://api.test.datacite.org
Found 50 datasets to process
Dataset 231: Successfully updated DataCite record
Dataset 245: Up to date, skipping
Dataset 267: Successfully updated DataCite record
DataCite update completed. Updated: 15, Skipped: 35, Errors: 0
```
### 2. **Dry Run Mode** (`--dry-run`)
Shows what would be updated without making any changes to DataCite.
**Use Case:** Preview updates before running the actual command.
**Example Output:**
```
Dataset 231: Would update DataCite record (dry run)
Dataset 267: Would update DataCite record (dry run)
Dataset 245: Up to date, skipping
DataCite update completed. Updated: 2, Skipped: 1, Errors: 0
```
### 3. **Stats Mode** (`--stats`)
Shows detailed information for each dataset that needs updating, including why it needs updating.
**Use Case:** Debug synchronization issues, monitor dataset/DOI status, generate reports.
**Example Output:**
```
┌─ Dataset 231 ─────────────────────────────────────────────────────────
│ DOI Value: 10.21388/tethys.231
│ DOI Status (DB): findable
│ DOI State (DataCite): findable
│ Dataset Modified: 2024-09-15T10:30:00.000Z
│ DOI Modified: 2024-09-10T08:15:00.000Z
│ Needs Update: YES - Dataset newer than DOI
└───────────────────────────────────────────────────────────────────────
┌─ Dataset 267 ─────────────────────────────────────────────────────────
│ DOI Value: 10.21388/tethys.267
│ DOI Status (DB): findable
│ DOI State (DataCite): findable
│ Dataset Modified: 2024-09-18T14:20:00.000Z
│ DOI Modified: 2024-09-16T12:45:00.000Z
│ Needs Update: YES - Dataset newer than DOI
└───────────────────────────────────────────────────────────────────────
DataCite Stats Summary: 2 datasets need updating, 48 are up to date
```
## Update Logic
The command uses intelligent update detection:
1. **Compares modification dates**: Dataset `server_date_modified` vs DOI last modification date from DataCite
2. **Validates data integrity**: Checks for missing or future dates
3. **Handles API failures gracefully**: Updates anyway if DataCite info can't be retrieved
4. **Uses dual API approach**: DataCite REST API (primary) with MDS API fallback
### When Updates Happen
| Condition | Action | Reason |
|-----------|--------|--------|
| Dataset modified > DOI modified | ✅ Update | Dataset has newer changes |
| Dataset modified ≤ DOI modified | ❌ Skip | DOI is up to date |
| Dataset date in future | ❌ Skip | Invalid data, needs investigation |
| Dataset date missing | ✅ Update | Can't determine staleness |
| DataCite API error | ✅ Update | Better safe than sorry |
| `--force` flag used | ✅ Update | Override all logic |
## Environment Configuration
Required environment variables:
```bash
# DataCite Credentials
DATACITE_USERNAME=your_username
DATACITE_PASSWORD=your_password
# API Endpoints (environment-specific)
DATACITE_API_URL=https://api.test.datacite.org # Test environment
DATACITE_SERVICE_URL=https://mds.test.datacite.org # Test MDS
DATACITE_API_URL=https://api.datacite.org # Production
DATACITE_SERVICE_URL=https://mds.datacite.org # Production MDS
# Project Configuration
DATACITE_PREFIX=10.21388 # Your DOI prefix
BASE_DOMAIN=tethys.at # Your domain
```
## Error Handling
The command handles various error scenarios:
- **Invalid modification dates**: Logs errors but continues processing other datasets
- **DataCite API failures**: Falls back to MDS API, then to safe update
- **Missing DOI identifiers**: Skips datasets without DOI identifiers
- **Network issues**: Continues with next dataset after logging error
## Integration
The command integrates with:
- **Dataset Model**: Uses `server_date_modified` for change detection
- **DatasetIdentifier Model**: Reads DOI values and status
- **OpenSearch Index**: Updates search index after DataCite update
- **DoiClient**: Handles all DataCite API interactions
## Common Workflows
### Daily Maintenance
```bash
# Update any datasets modified today
node ace update:datacite
```
### Pre-Deployment Check
```bash
# Check what would be updated before deployment
node ace update:datacite --dry-run
```
### Debugging Sync Issues
```bash
# Investigate why specific dataset isn't syncing
node ace update:datacite --stats --publish_id 231
```
### Full Resync
```bash
# Force update all DOI records (use with caution)
node ace update:datacite --force
```
### Monitoring Report
```bash
# Generate sync status report
node ace update:datacite --stats > datacite-sync-report.txt
```
## Best Practices
1. **Regular Updates**: Run daily or after bulk dataset modifications
2. **Test First**: Use `--dry-run` or `--stats` before bulk operations
3. **Monitor Logs**: Check for data integrity warnings
4. **Environment Separation**: Use correct API URLs for test vs production
5. **Rate Limiting**: The command handles DataCite rate limits automatically

View file

@ -1,47 +1,229 @@
##
## Container-optimized freshclam configuration
## Example config file for freshclam
## Please read the freshclam.conf(5) manual before editing this file.
##
# Database directory
# Comment or remove the line below.
# Path to the database directory.
# WARNING: It must match clamd.conf's directive!
# Default: hardcoded (depends on installation options)
DatabaseDirectory /var/lib/clamav
# Log to stdout for container logging
# Path to the log file (make sure it has proper permissions)
# Default: disabled
# UpdateLogFile /dev/stdout
# Basic logging settings
# Maximum size of the log file.
# Value of 0 disables the limit.
# You may use 'M' or 'm' for megabytes (1M = 1m = 1048576 bytes)
# and 'K' or 'k' for kilobytes (1K = 1k = 1024 bytes).
# in bytes just don't use modifiers. If LogFileMaxSize is enabled,
# log rotation (the LogRotate option) will always be enabled.
# Default: 1M
#LogFileMaxSize 2M
# Log time with each message.
# Default: no
LogTime yes
# Enable verbose logging.
# Default: no
LogVerbose yes
# Use system logger (can work together with UpdateLogFile).
# Default: no
LogSyslog no
# PID file location
# Specify the type of syslog messages - please refer to 'man syslog'
# for facility names.
# Default: LOG_LOCAL6
#LogFacility LOG_MAIL
# Enable log rotation. Always enabled when LogFileMaxSize is enabled.
# Default: no
#LogRotate yes
# This option allows you to save the process identifier of the daemon
# Default: disabled
#PidFile /var/run/freshclam.pid
PidFile /var/run/clamav/freshclam.pid
# Database owner
# By default when started freshclam drops privileges and switches to the
# "clamav" user. This directive allows you to change the database owner.
# Default: clamav (may depend on installation options)
DatabaseOwner node
# Mirror settings for Austria
# Use DNS to verify virus database version. Freshclam uses DNS TXT records
# to verify database and software versions. With this directive you can change
# the database verification domain.
# WARNING: Do not touch it unless you're configuring freshclam to use your
# own database verification domain.
# Default: current.cvd.clamav.net
#DNSDatabaseInfo current.cvd.clamav.net
# Uncomment the following line and replace XY with your country
# code. See http://www.iana.org/cctld/cctld-whois.htm for the full list.
# You can use db.XY.ipv6.clamav.net for IPv6 connections.
DatabaseMirror db.at.clamav.net
# database.clamav.net is a round-robin record which points to our most
# reliable mirrors. It's used as a fall back in case db.XY.clamav.net is
# not working. DO NOT TOUCH the following line unless you know what you
# are doing.
DatabaseMirror database.clamav.net
# How many attempts to make before giving up.
# Default: 3 (per mirror)
#MaxAttempts 5
# With this option you can control scripted updates. It's highly recommended
# to keep it enabled.
# Default: yes
# Update settings
ScriptedUpdates yes
#ScriptedUpdates yes
# By default freshclam will keep the local databases (.cld) uncompressed to
# make their handling faster. With this option you can enable the compression;
# the change will take effect with the next database update.
# Default: no
#CompressLocalDatabase no
# With this option you can provide custom sources (http:// or file://) for
# database files. This option can be used multiple times.
# Default: no custom URLs
#DatabaseCustomURL http://myserver.com/mysigs.ndb
#DatabaseCustomURL file:///mnt/nfs/local.hdb
# This option allows you to easily point freshclam to private mirrors.
# If PrivateMirror is set, freshclam does not attempt to use DNS
# to determine whether its databases are out-of-date, instead it will
# use the If-Modified-Since request or directly check the headers of the
# remote database files. For each database, freshclam first attempts
# to download the CLD file. If that fails, it tries to download the
# CVD file. This option overrides DatabaseMirror, DNSDatabaseInfo
# and ScriptedUpdates. It can be used multiple times to provide
# fall-back mirrors.
# Default: disabled
#PrivateMirror mirror1.mynetwork.com
#PrivateMirror mirror2.mynetwork.com
# Number of database checks per day.
# Default: 12 (every two hours)
Checks 12
#Checks 24
# Don't fork (good for containers)
# Proxy settings
# Default: disabled
#HTTPProxyServer myproxy.com
#HTTPProxyPort 1234
#HTTPProxyUsername myusername
#HTTPProxyPassword mypass
# If your servers are behind a firewall/proxy which applies User-Agent
# filtering you can use this option to force the use of a different
# User-Agent header.
# Default: clamav/version_number
#HTTPUserAgent SomeUserAgentIdString
# Use aaa.bbb.ccc.ddd as client address for downloading databases. Useful for
# multi-homed systems.
# Default: Use OS'es default outgoing IP address.
#LocalIPAddress aaa.bbb.ccc.ddd
# Send the RELOAD command to clamd.
# Default: no
#NotifyClamd /path/to/clamd.conf
# Run command after successful database update.
# Default: disabled
#OnUpdateExecute command
# Run command when database update process fails.
# Default: disabled
#OnErrorExecute command
# Run command when freshclam reports outdated version.
# In the command string %v will be replaced by the new version number.
# Default: disabled
#OnOutdatedExecute command
# Don't fork into background.
# Default: no
Foreground no
# Connection timeouts
ConnectTimeout 60
ReceiveTimeout 60
# Enable debug messages in libclamav.
# Default: no
#Debug yes
# Test databases before using them
TestDatabases yes
# Timeout in seconds when connecting to database server.
# Default: 30
#ConnectTimeout 60
# Enable bytecode signatures
Bytecode yes
# Timeout in seconds when reading from database server.
# Default: 30
#ReceiveTimeout 60
# With this option enabled, freshclam will attempt to load new
# databases into memory to make sure they are properly handled
# by libclamav before replacing the old ones.
# Default: yes
#TestDatabases yes
# When enabled freshclam will submit statistics to the ClamAV Project about
# the latest virus detections in your environment. The ClamAV maintainers
# will then use this data to determine what types of malware are the most
# detected in the field and in what geographic area they are.
# Freshclam will connect to clamd in order to get recent statistics.
# Default: no
#SubmitDetectionStats /path/to/clamd.conf
# Country of origin of malware/detection statistics (for statistical
# purposes only). The statistics collector at ClamAV.net will look up
# your IP address to determine the geographical origin of the malware
# reported by your installation. If this installation is mainly used to
# scan data which comes from a different location, please enable this
# option and enter a two-letter code (see http://www.iana.org/domains/root/db/)
# of the country of origin.
# Default: disabled
#DetectionStatsCountry country-code
# This option enables support for our "Personal Statistics" service.
# When this option is enabled, the information on malware detected by
# your clamd installation is made available to you through our website.
# To get your HostID, log on http://www.stats.clamav.net and add a new
# host to your host list. Once you have the HostID, uncomment this option
# and paste the HostID here. As soon as your freshclam starts submitting
# information to our stats collecting service, you will be able to view
# the statistics of this clamd installation by logging into
# http://www.stats.clamav.net with the same credentials you used to
# generate the HostID. For more information refer to:
# http://www.clamav.net/documentation.html#cctts
# This feature requires SubmitDetectionStats to be enabled.
# Default: disabled
#DetectionStatsHostID unique-id
# This option enables support for Google Safe Browsing. When activated for
# the first time, freshclam will download a new database file (safebrowsing.cvd)
# which will be automatically loaded by clamd and clamscan during the next
# reload, provided that the heuristic phishing detection is turned on. This
# database includes information about websites that may be phishing sites or
# possible sources of malware. When using this option, it's mandatory to run
# freshclam at least every 30 minutes.
# Freshclam uses the ClamAV's mirror infrastructure to distribute the
# database and its updates but all the contents are provided under Google's
# terms of use. See http://www.google.com/transparencyreport/safebrowsing
# and http://www.clamav.net/documentation.html#safebrowsing
# for more information.
# Default: disabled
#SafeBrowsing yes
# This option enables downloading of bytecode.cvd, which includes additional
# detection mechanisms and improvements to the ClamAV engine.
# Default: enabled
#Bytecode yes
# Download an additional 3rd party signature database distributed through
# the ClamAV mirrors.
# This option can be used multiple times.
#ExtraDatabase dbname1
#ExtraDatabase dbname2

6
index.d.ts vendored
View file

@ -183,9 +183,3 @@ declare module 'saxon-js' {
export function transform(options: ITransformOptions): Promise<ITransformOutput> | ITransformOutput;
}
declare global {
interface File {
sort_order?: number;
}
}

4893
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -58,7 +58,7 @@
"eslint-plugin-prettier": "^5.0.0-alpha.2",
"hot-hook": "^0.4.0",
"numeral": "^2.0.6",
"pinia": "^3.0.2",
"pinia": "^2.0.30",
"pino-pretty": "^13.0.0",
"postcss-loader": "^8.1.1",
"prettier": "^3.4.2",
@ -76,8 +76,7 @@
},
"dependencies": {
"@adonisjs/auth": "^9.2.4",
"@adonisjs/bodyparser": "^10.0.1",
"@adonisjs/core": "6.17.2",
"@adonisjs/core": "^6.17.0",
"@adonisjs/cors": "^2.2.1",
"@adonisjs/drive": "^3.2.0",
"@adonisjs/inertia": "^2.1.3",
@ -116,7 +115,7 @@
"notiwind": "^2.0.0",
"pg": "^8.9.0",
"qrcode": "^1.5.3",
"redis": "^5.0.0",
"redis": "^4.6.10",
"reflect-metadata": "^0.2.1",
"saxon-js": "^2.5.0",
"toastify-js": "^1.12.0",

View file

@ -1,34 +0,0 @@
import { ApplicationService } from '@adonisjs/core/types';
export default class RuleProvider {
constructor(protected app: ApplicationService) {}
public register() {
// Register your own bindings
}
public async boot() {
// IoC container is ready
// await import("../src/rules/index.js");
await import('#start/rules/unique');
await import('#start/rules/translated_language');
await import('#start/rules/unique_person');
// () => import('#start/rules/file_length'),
// () => import('#start/rules/file_scan'),
// () => import('#start/rules/allowed_extensions_mimetypes'),
await import('#start/rules/dependent_array_min_length');
await import('#start/rules/referenceValidation');
await import('#start/rules/valid_mimetype');
await import('#start/rules/array_contains_types');
await import('#start/rules/orcid');
}
public async ready() {
// App is ready
}
public async shutdown() {
// Cleanup, since app is going down
}
}

View file

@ -6,16 +6,17 @@
import type { ApplicationService } from '@adonisjs/core/types';
import vine, { symbols, BaseLiteralType, Vine } from '@vinejs/vine';
import type { FieldContext, FieldOptions } from '@vinejs/vine/types';
// import type { MultipartFile, FileValidationOptions } from '@adonisjs/bodyparser/types';
import type { MultipartFile } from '@adonisjs/core/bodyparser';
import type { FileValidationOptions } from '@adonisjs/core/types/bodyparser';
import { Request, RequestValidator } from '@adonisjs/core/http';
import MimeType from '#models/mime_type';
/**
* Validation options accepted by the "file" rule
*/
export type FileRuleValidationOptions = Partial<FileValidationOptions> | ((field: FieldContext) => Partial<FileValidationOptions>);
/**
* Extend VineJS
*/
@ -24,7 +25,6 @@ declare module '@vinejs/vine' {
myfile(options?: FileRuleValidationOptions): VineMultipartFile;
}
}
/**
* Extend HTTP request class
*/
@ -36,54 +36,19 @@ declare module '@adonisjs/core/http' {
* Checks if the value is an instance of multipart file
* from bodyparser.
*/
export function isBodyParserFile(file: MultipartFile | unknown): file is MultipartFile {
export function isBodyParserFile(file: MultipartFile | unknown): boolean {
return !!(file && typeof file === 'object' && 'isMultipartFile' in file);
}
export async function getEnabledExtensions() {
const enabledExtensions = await MimeType.query().select('file_extension').where('enabled', true).exec();
const extensions = enabledExtensions
.map((extension) => {
return extension.file_extension.split('|');
})
.flat();
/**
* Cache for enabled extensions to reduce database queries
*/
let extensionsCache: string[] | null = null;
let cacheTimestamp = 0;
const CACHE_DURATION = 5 * 60 * 1000; // 5 minutes
/**
* Get enabled extensions with caching
*/
export async function getEnabledExtensions(): Promise<string[]> {
const now = Date.now();
if (extensionsCache && now - cacheTimestamp < CACHE_DURATION) {
return extensionsCache;
}
try {
const enabledExtensions = await MimeType.query().select('file_extension').where('enabled', true).exec();
const extensions = enabledExtensions
.map((extension) => extension.file_extension.split('|'))
.flat()
.map((ext) => ext.toLowerCase().trim())
.filter((ext) => ext.length > 0);
extensionsCache = [...new Set(extensions)]; // Remove duplicates
cacheTimestamp = now;
return extensionsCache;
} catch (error) {
console.error('Error fetching enabled extensions:', error);
return extensionsCache || [];
}
return extensions;
}
/**
* Clear extensions cache
*/
export function clearExtensionsCache(): void {
extensionsCache = null;
cacheTimestamp = 0;
}
/**
* VineJS validation rule that validates the file to be an
* instance of BodyParser MultipartFile class.
@ -100,7 +65,6 @@ const isMultipartFile = vine.createRule(async (file: MultipartFile | unknown, op
// At this point, you can use type assertion to explicitly tell TypeScript that file is of type MultipartFile
const validatedFile = file as MultipartFile;
const validationOptions = typeof options === 'function' ? options(field) : options;
/**
* Set size when it's defined in the options and missing
* on the file instance
@ -108,29 +72,30 @@ const isMultipartFile = vine.createRule(async (file: MultipartFile | unknown, op
if (validatedFile.sizeLimit === undefined && validationOptions.size) {
validatedFile.sizeLimit = validationOptions.size;
}
/**
* Set extensions when it's defined in the options and missing
* on the file instance
*/
if (validatedFile.allowedExtensions === undefined) {
if (validationOptions.extnames !== undefined) {
validatedFile.allowedExtensions = validationOptions.extnames;
} else {
validatedFile.allowedExtensions = await getEnabledExtensions();
}
// if (validatedFile.allowedExtensions === undefined && validationOptions.extnames) {
// validatedFile.allowedExtensions = validationOptions.extnames;
// }
if (validatedFile.allowedExtensions === undefined && validationOptions.extnames !== undefined) {
validatedFile.allowedExtensions = validationOptions.extnames; // await getEnabledExtensions();
} else if (validatedFile.allowedExtensions === undefined && validationOptions.extnames === undefined) {
validatedFile.allowedExtensions = await getEnabledExtensions();
}
/**
* wieder löschen
* Set extensions when it's defined in the options and missing
* on the file instance
*/
// if (file.clientNameSizeLimit === undefined && validationOptions.clientNameSizeLimit) {
// file.clientNameSizeLimit = validationOptions.clientNameSizeLimit;
// }
/**
* Validate file
*/
try {
validatedFile.validate();
} catch (error) {
field.report(`File validation failed: ${error.message}`, 'file.validation_error', field, validationOptions);
return;
}
validatedFile.validate();
/**
* Report errors
*/
@ -142,37 +107,36 @@ const isMultipartFile = vine.createRule(async (file: MultipartFile | unknown, op
const MULTIPART_FILE: typeof symbols.SUBTYPE = symbols.SUBTYPE;
export class VineMultipartFile extends BaseLiteralType<MultipartFile, MultipartFile, MultipartFile> {
[MULTIPART_FILE]: string;
public validationOptions?: FileRuleValidationOptions;
// constructor(validationOptions?: FileRuleValidationOptions, options?: FieldOptions) {
// super(options, [isMultipartFile(validationOptions || {})]);
// this.validationOptions = validationOptions;
// this.#private = true;
// }
// clone(): this {
// return new VineMultipartFile(this.validationOptions, this.cloneOptions()) as this;
// }
// #private;
// constructor(validationOptions?: FileRuleValidationOptions, options?: FieldOptions, validations?: Validation<any>[]);
// clone(): this;
public validationOptions;
// extnames: (18) ['gpkg', 'htm', 'html', 'csv', 'txt', 'asc', 'c', 'cc', 'h', 'srt', 'tiff', 'pdf', 'png', 'zip', 'jpg', 'jpeg', 'jpe', 'xlsx']
// size: '512mb'
// public constructor(validationOptions?: FileRuleValidationOptions, options?: FieldOptions, validations?: Validation<any>[]) {
public constructor(validationOptions?: FileRuleValidationOptions, options?: FieldOptions) {
// super(options, validations);
super(options, [isMultipartFile(validationOptions || {})]);
this.validationOptions = validationOptions;
}
public clone(): any {
// return new VineMultipartFile(this.validationOptions, this.cloneOptions(), this.cloneValidations());
return new VineMultipartFile(this.validationOptions, this.cloneOptions());
}
/**
* Set maximum file size
*/
public maxSize(size: string | number): this {
const newOptions = { ...this.validationOptions, size };
return new VineMultipartFile(newOptions, this.cloneOptions()) as this;
}
/**
* Set allowed extensions
*/
public extensions(extnames: string[]): this {
const newOptions = { ...this.validationOptions, extnames };
return new VineMultipartFile(newOptions, this.cloneOptions()) as this;
}
}
export default class VinejsProvider {
@ -191,8 +155,13 @@ export default class VinejsProvider {
/**
* The container bindings have booted
*/
boot(): void {
Vine.macro('myfile', function (this: Vine, options?: FileRuleValidationOptions) {
// VineString.macro('translatedLanguage', function (this: VineString, options: Options) {
// return this.use(translatedLanguageRule(options));
// });
Vine.macro('myfile', function (this: Vine, options) {
return new VineMultipartFile(options);
});
@ -206,41 +175,6 @@ export default class VinejsProvider {
}
return new RequestValidator(this.ctx).validateUsing(...args);
});
// Ensure MIME validation macros are loaded
this.loadMimeValidationMacros();
this.loadFileScanMacros();
this.loadFileLengthMacros();
}
/**
* Load MIME validation macros - called during boot to ensure they're available
*/
private async loadMimeValidationMacros(): Promise<void> {
try {
// Dynamically import the MIME validation rule to ensure macros are registered
await import('#start/rules/allowed_extensions_mimetypes');
} catch (error) {
console.warn('Could not load MIME validation macros:', error);
}
}
private async loadFileScanMacros(): Promise<void> {
try {
// Dynamically import the MIME validation rule to ensure macros are registered
await import('#start/rules/file_scan');
} catch (error) {
console.warn('Could not load MIME validation macros:', error);
}
}
private async loadFileLengthMacros(): Promise<void> {
try {
// Dynamically import the MIME validation rule to ensure macros are registered
await import('#start/rules/file_length');
} catch (error) {
console.warn('Could not load MIME validation macros:', error);
}
}
/**
@ -256,7 +190,5 @@ export default class VinejsProvider {
/**
* Preparing to shutdown the app
*/
async shutdown() {
clearExtensionsCache();
}
async shutdown() {}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.8 KiB

File diff suppressed because one or more lines are too long

View file

@ -111,14 +111,7 @@
<!--5 server_date_modified -->
<xsl:if test="ServerDateModified/@UnixTimestamp != ''">
<xsl:text>"server_date_modified": "</xsl:text>
<xsl:value-of select="ServerDateModified/@UnixTimestamp" />
<xsl:text>",</xsl:text>
</xsl:if>
<!--5 embargo_date -->
<xsl:if test="EmbargoDate/@UnixTimestamp != ''">
<xsl:text>"embargo_date": "</xsl:text>
<xsl:value-of select="EmbargoDate/@UnixTimestamp" />
<xsl:value-of select="/ServerDateModified/@UnixTimestamp" />
<xsl:text>",</xsl:text>
</xsl:if>
@ -207,8 +200,7 @@
<!--17 +18 uncontrolled subject (swd) -->
<xsl:variable name="subjects">
<!-- <xsl:for-each select="Subject[@Type = 'Uncontrolled']"> -->
<xsl:for-each select="Subject[@Type = 'Uncontrolled' or @Type = 'Geoera']">
<xsl:for-each select="Subject[@Type = 'Uncontrolled']">
<xsl:text>"</xsl:text>
<xsl:value-of select="fn:escapeQuotes(@Value)"/>
<xsl:text>"</xsl:text>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 526 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.2 KiB

View file

@ -1,3 +0,0 @@
[ZoneTransfer]
ZoneId=3
HostUrl=https://sea1.geoinformation.dev/favicon-32x32.png

BIN
public/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

BIN
public/favicon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 952 KiB

View file

@ -1 +0,0 @@
{"name":"","short_name":"","icons":[{"src":"/android-chrome-192x192.png","sizes":"192x192","type":"image/png"},{"src":"/android-chrome-512x512.png","sizes":"512x512","type":"image/png"}],"theme_color":"#ffffff","background_color":"#ffffff","display":"standalone"}

174
readme.md
View file

@ -11,8 +11,6 @@ Welcome to the Tethys Research Repository Backend System! This is the backend co
- [Configuration](#configuration)
- [Database](#database)
- [API Documentation](#api-documentation)
- [Commands](#commands)
- [Documentation](#documentation)
- [Contributing](#contributing)
- [License](#license)
@ -31,175 +29,5 @@ Before you begin, ensure you have met the following requirements:
1. Clone this repository:
```bash
git clone git clone https://gitea.geologie.ac.at/geolba/tethys.backend.git
cd tethys-backend
git clone https://gitea.geologie.ac.at/geolba/tethys.backend.git
```
2. Install dependencies:
```bash
npm install
```
3. Configure environment variables (see [Configuration](#configuration))
4. Run database migrations:
```bash
node ace migration:run
```
5. Start the development server:
```bash
npm run dev
```
## Usage
The Tethys Backend provides RESTful APIs for managing research datasets, user authentication, DOI registration, and search functionality.
## Configuration
Copy the `.env.example` file to `.env` and configure the following variables:
### Database Configuration
```bash
DB_CONNECTION=pg
DB_HOST=localhost
DB_PORT=5432
DB_USER=your_username
DB_PASSWORD=your_password
DB_DATABASE=tethys_db
```
### DataCite Configuration
```bash
# DataCite Credentials
DATACITE_USERNAME=your_datacite_username
DATACITE_PASSWORD=your_datacite_password
DATACITE_PREFIX=10.21388
# Environment-specific API endpoints
DATACITE_API_URL=https://api.test.datacite.org # Test environment
DATACITE_SERVICE_URL=https://mds.test.datacite.org # Test MDS
# For production:
# DATACITE_API_URL=https://api.datacite.org
# DATACITE_SERVICE_URL=https://mds.datacite.org
```
### OpenSearch Configuration
```bash
OPENSEARCH_HOST=localhost:9200
```
### Application Configuration
```bash
BASE_DOMAIN=tethys.at
APP_KEY=your_app_key
```
## Database
The system uses PostgreSQL with Lucid ORM. Key models include:
- **Dataset**: Research dataset metadata
- **DatasetIdentifier**: DOI and other identifiers for datasets
- **User**: User management and authentication
- **XmlCache**: Cached XML metadata
Run migrations and seeders:
```bash
# Run migrations
node ace migration:run
# Run seeders (if available)
node ace db:seed
```
## API Documentation
API endpoints are available for:
- Dataset management (`/api/datasets`)
- User authentication (`/api/auth`)
- DOI registration (`/api/doi`)
- Search functionality (`/api/search`)
*Detailed API documentation can be found in the `/docs/api` directory.*
## Commands
The system includes several Ace commands for maintenance and data management:
### Dataset Indexing
```bash
# Index all published datasets to OpenSearch
node ace index:datasets
# Index a specific dataset
node ace index:datasets --publish_id 123
```
### DataCite DOI Management
```bash
# Update DataCite records for modified datasets
node ace update:datacite
# Show detailed statistics for datasets needing updates
node ace update:datacite --stats
# Preview what would be updated (dry run)
node ace update:datacite --dry-run
# Force update all DOI records
node ace update:datacite --force
# Update a specific dataset
node ace update:datacite --publish_id 123
```
*For detailed command documentation, see the [Commands Documentation](docs/commands/)*
## Documentation
Comprehensive documentation is available in the `/docs` directory:
- **[Commands Documentation](docs/commands/)** - Detailed guides for Ace commands
- [DataCite Update Command](docs/commands/update-datacite.md) - DOI synchronization and management
- [Dataset Indexing Command](docs/commands/index-datasets.md) - Search index management
- **[API Documentation](docs/api/)** - REST API endpoints and usage
- **[Deployment Guide](docs/deployment/)** - Production deployment instructions
- **[Configuration Guide](docs/configuration/)** - Environment setup and configuration options
## Contributing
1. Fork the repository
2. Create a feature branch (`git checkout -b feature/amazing-feature`)
3. Commit your changes (`git commit -m 'Add some amazing feature'`)
4. Push to the branch (`git push origin feature/amazing-feature`)
5. Open a Pull Request
### Development Guidelines
- Follow the existing code style and conventions
- Write tests for new features
- Update documentation for any API changes
- Ensure all commands and migrations work properly
### Testing Commands
```bash
# Run tests
npm test
# Test specific commands
node ace update:datacite --dry-run --publish_id 123
node ace index:datasets --publish_id 123
```
## License
This project is licensed under the [MIT License](LICENSE).

View file

@ -1,7 +1,7 @@
/* @import url('https://fonts.googleapis.com/css2?family=Poppins:wght@400;500&display=swap'); */
/* @import url('https://fonts.googleapis.com/css?family=Roboto:400,400i,600,700'); */
/* @import '_checkbox-radio-switch.css'; */
@import '_checkbox-radio-switch.css';
@import '_progress.css';
@import '_scrollbars.css';
@import '_table.css';

View file

@ -1,5 +1,5 @@
<script lang="ts" setup>
import { computed, PropType } from 'vue';
<script setup>
import { computed } from 'vue';
import { Link } from '@inertiajs/vue3';
// import { Link } from '@inertiajs/inertia-vue3';
import { getButtonColor } from '@/colors';
@ -30,8 +30,8 @@ const props = defineProps({
type: String,
default: null,
},
color: {
type: String as PropType<'white' | 'contrast' | 'light' | 'success' | 'danger' | 'warning' | 'info' | 'modern'>,
color: {
type: String,
default: 'white',
},
as: {
@ -45,18 +45,11 @@ const props = defineProps({
roundedFull: Boolean,
});
const emit = defineEmits(['click']);
const is = computed(() => {
if (props.as) {
return props.as;
}
// If disabled, always render as button or span to prevent navigation
if (props.disabled) {
return props.routeName || props.href ? 'span' : 'button';
}
if (props.routeName) {
return Link;
}
@ -76,105 +69,47 @@ const computedType = computed(() => {
return null;
});
// Only provide href/routeName when not disabled
const computedHref = computed(() => {
if (props.disabled) return null;
return props.routeName || props.href;
});
// Only provide target when not disabled and has href
const computedTarget = computed(() => {
if (props.disabled || !props.href) return null;
return props.target;
});
// Only provide disabled attribute for actual button elements
const computedDisabled = computed(() => {
if (is.value === 'button') {
return props.disabled;
}
return null;
});
const labelClass = computed(() => (props.small && props.icon ? 'px-1' : 'px-2'));
const componentClass = computed(() => {
const base = [
'inline-flex',
'cursor-pointer',
'justify-center',
'items-center',
'whitespace-nowrap',
'focus:outline-none',
'transition-colors',
'focus:ring-2',
'duration-150',
'border',
props.roundedFull ? 'rounded-full' : 'rounded',
props.active ? 'ring ring-black dark:ring-white' : 'ring-blue-700',
getButtonColor(props.color, props.outline, !props.disabled),
];
// Only add focus ring styles when not disabled
if (!props.disabled) {
base.push('focus:ring-2');
base.push(props.active ? 'ring ring-black dark:ring-white' : 'ring-blue-700');
}
// Add button colors
// Add button colors - handle both string and array returns
// const buttonColors = getButtonColor(props.color, props.outline, !props.disabled);
base.push(getButtonColor(props.color, props.outline, !props.disabled));
// if (Array.isArray(buttonColors)) {
// base.push(...buttonColors);
// } else {
// base.push(buttonColors);
// }
// Add size classes
if (props.small) {
base.push('text-sm', props.roundedFull ? 'px-3 py-1' : 'p-1');
} else {
base.push('py-2', props.roundedFull ? 'px-6' : 'px-3');
}
// Add disabled/enabled specific classes
if (props.disabled) {
base.push(
'cursor-not-allowed',
'opacity-60',
'pointer-events-none', // This prevents all interactions
);
} else {
base.push('cursor-pointer');
// Add hover effects only when not disabled
if (is.value === 'button' || is.value === 'a' || is.value === Link) {
base.push('hover:opacity-80');
}
base.push('cursor-not-allowed', props.outline ? 'opacity-50' : 'opacity-70');
}
return base;
});
// Handle click events with disabled check
const handleClick = (event) => {
if (props.disabled) {
event.preventDefault();
event.stopPropagation();
return;
}
emit('click', event);
};
</script>
<template>
<component
:is="is"
:class="componentClass"
:href="computedHref"
:to="props.disabled ? null : props.routeName"
:href="routeName ? routeName : href"
:type="computedType"
:target="computedTarget"
:disabled="computedDisabled"
:tabindex="props.disabled ? -1 : null"
:aria-disabled="props.disabled ? 'true' : null"
@click="handleClick"
:target="target"
:disabled="disabled"
>
<BaseIcon v-if="icon" :path="icon" />
<span v-if="label" :class="labelClass">{{ label }}</span>

View file

@ -67,7 +67,7 @@ const submit = (e) => {
<BaseIcon v-if="icon" :path="icon" class="mr-3" />
{{ title }}
</div>
<button v-if="showHeaderIcon" class="flex items-center py-3 px-4 justify-center ring-blue-700 focus:ring" @click.stop="headerIconClick">
<button v-if="showHeaderIcon" class="flex items-center py-3 px-4 justify-center ring-blue-700 focus:ring" @click="headerIconClick">
<BaseIcon :path="computedHeaderIcon" />
</button>
</header>

View file

@ -39,10 +39,6 @@ const props = defineProps({
type: String,
default: null,
},
allowEmailContact: {
type: Boolean,
default: false,
}
});
const pillType = computed(() => {
@ -85,8 +81,9 @@ const pillType = computed(() => {
<h4 class="text-xl text-ellipsis">
{{ name }}
</h4>
<p class="text-gray-500 dark:text-slate-400">
<div v-if="props.allowEmailContact"> {{ email }}</div>
<p class="text-gray-500 dark:text-slate-400">
<!-- {{ date }} @ {{ login }} -->
{{ email }}
</p>
</div>
</BaseLevel>

View file

@ -61,10 +61,10 @@ const cancel = () => confirmCancel('cancel');
<CardBox
v-show="value"
:title="title"
class="p-4 shadow-lg max-h-modal w-11/12 md:w-3/5 lg:w-2/5 xl:w-4/12 z-50"
class="shadow-lg max-h-modal w-11/12 md:w-3/5 lg:w-2/5 xl:w-4/12 z-50"
:header-icon="mdiClose"
modal
@header-icon-click="cancel"
@header-icon-click="cancel"
>
<div class="space-y-3">
<h1 v-if="largeTitle" class="text-2xl">

View file

@ -1,75 +0,0 @@
<script setup lang="ts">
import { computed, useSlots } from 'vue';
const props = defineProps({
title: {
type: String,
default: null,
},
icon: {
type: String,
default: null,
},
showHeaderIcon: {
type: Boolean,
default: true,
},
headerIcon: {
type: String,
default: null,
},
rounded: {
type: String,
default: 'rounded-xl',
},
hasFormData: Boolean,
empty: Boolean,
form: Boolean,
hoverable: Boolean,
modal: Boolean,
});
const emit = defineEmits(['header-icon-click', 'submit']);
const is = computed(() => (props.form ? 'form' : 'div'));
const slots = useSlots();
// const footer = computed(() => slots.footer && !!slots.footer());
const componentClass = computed(() => {
const base = [props.rounded, props.modal ? 'dark:bg-slate-900' : 'dark:bg-slate-900/70'];
if (props.hoverable) {
base.push('hover:shadow-lg transition-shadow duration-500');
}
return base;
});
// const headerIconClick = () => {
// emit('header-icon-click');
// };
// const submit = (e) => {
// emit('submit', e);
// };
</script>
<template>
<component :is="is" :class="componentClass" class="bg-white flex flex-col border border-gray-100 dark:border-slate-800 mb-4">
<div v-if="empty" class="text-center py-24 text-gray-500 dark:text-slate-400">
<p>Nothing's here</p>
</div>
<div v-else class="flex-1" :class="[!hasFormData && 'p-6']">
<slot />
</div>
</component>
</template>

View file

@ -1,4 +1,4 @@
<script lang="ts" setup>
<script setup>
import { mdiCog } from '@mdi/js';
import CardBox from '@/Components/CardBox.vue';
import NumberDynamic from '@/Components/NumberDynamic.vue';
@ -49,9 +49,6 @@ defineProps({
<PillTagTrend :trend="trend" :trend-type="trendType" small />
<BaseButton :icon="mdiCog" icon-w="w-4" icon-h="h-4" color="white" small />
</BaseLevel>
<BaseLevel v-else class="mb-3" mobile>
<BaseIcon v-if="icon" :path="icon" size="48" w="w-4" h="h-4" :class="color" />
</BaseLevel>
<BaseLevel mobile>
<div>
<h3 class="text-lg leading-tight text-gray-500 dark:text-slate-400">

View file

@ -17,15 +17,6 @@
<p class="text-lg text-blue-700">Drop files to upload</p>
</div>
<!-- Loading Spinner when processing big files -->
<div v-if="isLoading" class="absolute inset-0 z-60 flex items-center justify-center bg-gray-500 bg-opacity-50">
<svg class="animate-spin h-12 w-12 text-white" xmlns="http://www.w3.org/2000/svg" fill="none"
viewBox="0 0 24 24">
<circle class="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" stroke-width="4"></circle>
<path class="opacity-75" fill="currentColor" d="M12 2a10 10 0 0110 10h-4a6 6 0 00-6-6V2z"></path>
</svg>
</div>
<!-- scroll area -->
<div class="h-full p-8 w-full h-full flex flex-col">
<header class="flex items-center justify-center w-full">
@ -41,9 +32,9 @@
<p class="mb-2 text-sm text-gray-500 dark:text-gray-400">
<span class="font-semibold">Click to upload</span> or drag and drop
</p>
<!-- <p class="text-xs text-gray-500 dark:text-gray-400">SVG, PNG, JPG or GIF (MAX. 800x400px)</p> -->
</div>
<input id="dropzone-file" type="file" class="hidden" @click="showSpinner" @change="onChangeFile"
@cancel="cancelSpinner" multiple="true" />
<input id="dropzone-file" type="file" class="hidden" @change="onChangeFile" multiple="true" />
</label>
</header>
@ -191,7 +182,7 @@
<!-- sticky footer -->
<footer class="flex justify-end px-8 pb-8 pt-4">
<button v-if="showClearButton" id="cancel"
<button id="cancel"
class="ml-3 rounded-sm px-3 py-1 hover:bg-gray-300 focus:shadow-outline focus:outline-none"
@click="clearAllFiles">
Clear
@ -250,8 +241,6 @@ class FileUploadComponent extends Vue {
@Ref('overlay') overlay: HTMLDivElement;
public isLoading: boolean = false;
private counter: number = 0;
// @Prop() files: Array<TestFile>;
@ -268,12 +257,6 @@ class FileUploadComponent extends Vue {
})
filesToDelete: Array<TethysFile>;
@Prop({
type: Boolean,
default: true,
})
showClearButton: boolean;
// // deletetFiles: Array<TethysFile> = [];
get deletetFiles(): Array<TethysFile> {
return this.filesToDelete;
@ -281,7 +264,7 @@ class FileUploadComponent extends Vue {
set deletetFiles(values: Array<TethysFile>) {
// this.modelValue = value;
this.filesToDelete.length = 0;
this.filesToDelete.push(...values);
this.filesToDelete.push(...values);
}
get items(): Array<TethysFile | File> {
@ -359,10 +342,10 @@ class FileUploadComponent extends Vue {
}
// reset counter and append file to gallery when file is dropped
public dropHandler(event: DragEvent): void {
event.preventDefault();
const dataTransfer = event.dataTransfer;
// let bigFileFound = false;
if (dataTransfer) {
for (const file of event.dataTransfer?.files) {
// let fileName = String(file.name.replace(/\.[^/.]+$/, ''));
@ -370,73 +353,28 @@ class FileUploadComponent extends Vue {
// if (file.type.match('image.*')) {
// this.generateURL(file);
// }
// if (file.size > 62914560) { // 60 MB in bytes
// bigFileFound = true;
// }
this._addFile(file);
}
this.overlay.classList.remove('draggedover');
this.counter = 0;
}
// if (bigFileFound) {
// this.isLoading = true;
// // Assume file processing delay; adjust timeout as needed or rely on async processing completion.
// setTimeout(() => {
// this.isLoading = false;
// }, 1500);
// }
}
public showSpinner() {
// event.preventDefault();
this.isLoading = true;
}
public cancelSpinner() {
// const target = event.target as HTMLInputElement;
// // If no files were selected, remove spinner
// if (!target.files || target.files.length === 0) {
// this.isLoading = false;
// }
this.isLoading = false;
}
public onChangeFile(event: Event) {
event.preventDefault();
let target = event.target as HTMLInputElement;
// let uploadedFile = event.target.files[0];
// let fileName = String(event.target.files[0].name.replace(/\.[^/.]+$/, ''));
if (target && target.files) {
for (const file of event.target.files) {
// let fileName = String(event.target.files[0].name.replace(/\.[^/.]+$/, ''));
// file.label = fileName;
// if (file.type.match('image.*')) {
// this.generateURL(file);
// }
// Immediately set spinner if any file is large (over 100 MB)
// for (const file of target.files) {
// if (file.size > 62914560) { // 100 MB
// bigFileFound = true;
// break;
// }
// }
// if (bigFileFound) {
// this.isLoading = true;
// }
this._addFile(file);
}
for (const file of event.target.files) {
// let fileName = String(event.target.files[0].name.replace(/\.[^/.]+$/, ''));
// file.label = fileName;
// if (file.type.match('image.*')) {
// this.generateURL(file);
// }
this._addFile(file);
}
// if (bigFileFound) {
// this.isLoading = true;
// setTimeout(() => {
// this.isLoading = false;
// }, 1500);
// }
// this.overlay.classList.remove('draggedover');
this.counter = 0;
this.isLoading = false;
}
get errors(): IDictionary {
@ -458,9 +396,7 @@ class FileUploadComponent extends Vue {
public clearAllFiles(event: Event) {
event.preventDefault();
if (this.showClearButton == true) {
this.items.splice(0);
}
this.items.splice(0);
}
public removeFile(key: number) {
@ -509,7 +445,7 @@ class FileUploadComponent extends Vue {
let localUrl: string = '';
if (file instanceof File) {
localUrl = URL.createObjectURL(file as Blob);
}
}
// else if (file.fileData) {
// // const blob = new Blob([file.fileData]);
// // localUrl = URL.createObjectURL(blob);
@ -529,6 +465,17 @@ class FileUploadComponent extends Vue {
return localUrl;
}
// private async downloadFile(id: number): Promise<string> {
// const response = await axios.get<Blob>(`/api/download/${id}`, {
// responseType: 'blob',
// });
// const url = URL.createObjectURL(response.data);
// setTimeout(() => {
// URL.revokeObjectURL(url);
// }, 1000);
// return url;
// }
public getFileSize(file: File) {
if (file.size > 1024) {
if (file.size > 1048576) {
@ -541,6 +488,17 @@ class FileUploadComponent extends Vue {
}
}
// private _addFile(file) {
// // const isImage = file.type.match('image.*');
// // const objectURL = URL.createObjectURL(file);
// // this.files[objectURL] = file;
// // let test: TethysFile = { upload: file, label: "dfdsfs", sorting: 0 };
// // file.sorting = this.files.length;
// file.sort_order = (this.items.length + 1),
// this.files.push(file);
// }
private _addFile(file: File) {
// const reader = new FileReader();
// reader.onload = (event) => {
@ -572,11 +530,14 @@ class FileUploadComponent extends Vue {
// this.items.push(test);
this.items[this.items.length] = test;
} else {
file.sort_order = this.items.length + 1;
this.items.push(file);
}
}
// use to check if a file is being dragged
// private _hasFiles({ types = [] as Array<string> }) {
// return types.indexOf('Files') > -1;
// }
private _hasFiles(dataTransfer: DataTransfer | null): boolean {
return dataTransfer ? dataTransfer.items.length > 0 : false;
}

View file

@ -15,10 +15,9 @@ const year = computed(() => new Date().getFullYear());
<!-- Get more with <a href="https://tailwind-vue.justboil.me/" target="_blank" class="text-blue-600">Premium
version</a> -->
</div>
<div class="md:py-1">
<div class="md:py-3">
<a href="https://www.tethys.at" target="_blank">
<!-- <JustboilLogo class="w-auto h-8 md:h-6" /> -->
<JustboilLogo class="w-auto h-12 md:h-10 dark:invert" />
<JustboilLogo class="w-auto h-8 md:h-6" />
</a>
</div>
</BaseLevel>

View file

@ -1,165 +1,43 @@
<script setup lang="ts">
import { computed, watch, ref } from 'vue';
<script setup>
import { computed } from 'vue';
interface Props {
name: string;
type?: 'checkbox' | 'radio' | 'switch';
label?: string | null;
modelValue: Array<any> | string | number | boolean | null;
inputValue: string | number | boolean;
}
const props = defineProps<Props>();
const emit = defineEmits<{ (e: 'update:modelValue', value: Props['modelValue']): void }>();
// const computedValue = computed({
// get: () => props.modelValue,
// set: (value) => {
// emit('update:modelValue', props.type === 'radio' ? [value] : value);
// },
// });
const computedValue = computed({
get: () => {
if (props.type === 'radio') {
// For radio buttons, return boolean indicating if this option is selected
if (Array.isArray(props.modelValue)) {
return props.modelValue;
}
return [props.modelValue];
} else {
// For checkboxes, return boolean indicating if this option is included
if (Array.isArray(props.modelValue)) {
return props.modelValue.includes(props.inputValue);
}
return props.modelValue == props.inputValue;
}
const props = defineProps({
name: {
type: String,
required: true,
},
type: {
type: String,
default: 'checkbox',
validator: (value) => ['checkbox', 'radio', 'switch'].includes(value),
},
label: {
type: String,
default: null,
},
modelValue: {
type: [Array, String, Number, Boolean],
default: null,
},
inputValue: {
type: [String, Number, Boolean],
required: true,
},
set: (value: boolean) => {
if (props.type === 'radio') {
// When radio is selected, emit the new value as array
emit('update:modelValue', [value]);
} else {
// Handle checkboxes
let updatedValue = Array.isArray(props.modelValue) ? [...props.modelValue] : [];
if (value) {
if (!updatedValue.includes(props.inputValue)) {
updatedValue.push(props.inputValue);
}
} else {
updatedValue = updatedValue.filter(item => item != props.inputValue);
}
emit('update:modelValue', updatedValue);
}
}
});
const inputType = computed(() => (props.type === 'radio' ? 'radio' : 'checkbox'));
// Define isChecked for radio inputs: it's true when the current modelValue equals the inputValue
// const isChecked = computed(() => {
// if (Array.isArray(computedValue.value) && computedValue.value.length > 0) {
// return props.type === 'radio'
// ? computedValue.value[0] === props.inputValue
// : computedValue.value.includes(props.inputValue);
// }
// return computedValue.value === props.inputValue;
// });
// const isChecked = computed(() => {
// return computedValue.value[0] === props.inputValue;
// });
// Fix the isChecked computation with proper type handling
// const isChecked = computed(() => {
// if (props.type === 'radio') {
// // Use loose equality to handle string/number conversion
// return computedValue.value == props.inputValue;
// }
// return computedValue.value === true;
// });
// const isChecked = computed(() => {
// if (props.type === 'radio') {
// if (Array.isArray(props.modelValue)) {
// return props.modelValue.length > 0 && props.modelValue[0] == props.inputValue;
// }
// return props.modelValue == props.inputValue;
// }
// // For checkboxes
// if (Array.isArray(props.modelValue)) {
// return props.modelValue.includes(props.inputValue);
// }
// return props.modelValue == props.inputValue;
// });
// Use a ref for isChecked and update it with a watcher
const isChecked = ref(false);
// Calculate initial isChecked value
const calculateIsChecked = () => {
if (props.type === 'radio') {
if (Array.isArray(props.modelValue)) {
return props.modelValue.length > 0 && props.modelValue[0] == props.inputValue;
}
return props.modelValue == props.inputValue;
}
// For checkboxes
if (Array.isArray(props.modelValue)) {
return props.modelValue.includes(props.inputValue);
}
return props.modelValue == props.inputValue;
};
// Set initial value
isChecked.value = calculateIsChecked();
// Watch for changes in modelValue and recalculate isChecked
watch(
() => props.modelValue,
(newValue) => {
console.log('modelValue changed:', {
newValue,
inputValue: props.inputValue,
type: props.type
});
isChecked.value = calculateIsChecked();
const emit = defineEmits(['update:modelValue']);
const computedValue = computed({
get: () => props.modelValue,
set: (value) => {
emit('update:modelValue', value);
},
{ immediate: true, deep: true }
);
// Also watch inputValue in case it changes
watch(
() => props.inputValue,
() => {
isChecked.value = calculateIsChecked();
}
);
});
const inputType = computed(() => (props.type === 'radio' ? 'radio' : 'checkbox'));
</script>
<template>
<label v-if="type === 'radio'" :class="[type]"
class="mr-6 mb-3 last:mr-0 inline-flex items-center cursor-pointer relative">
<input
v-model="computedValue"
:type="inputType"
:name="name"
:value="inputValue"
class="absolute left-0 opacity-0 -z-1 focus:outline-none focus:ring-0" />
<span class="check border transition-colors duration-200 dark:bg-slate-800 block w-5 h-5 rounded-full" :class="{
'border-gray-700': !isChecked,
'bg-radio-checked bg-no-repeat bg-center bg-lime-600 border-lime-600 border-4': isChecked
}" />
<span class="pl-2 control-label">{{ label }}</span>
</label>
<label v-else-if="type === 'checkbox'" :class="[type]"
class="mr-6 mb-3 last:mr-0 inline-flex items-center cursor-pointer relative">
<input v-model="computedValue" :type="inputType" :name="name" :value="inputValue"
class="absolute left-0 opacity-0 -z-1 focus:outline-none focus:ring-0" />
<span class="check border transition-colors duration-200 dark:bg-slate-800 block w-5 h-5 rounded" :class="{
'border-gray-700': !isChecked,
'bg-checkbox-checked bg-no-repeat bg-center bg-lime-600 border-lime-600 border-4': isChecked
}" />
<span class="pl-2 control-label">{{ label }}</span>
<label :class="type" class="mr-6 mb-3 last:mr-0">
<input v-model="computedValue" :type="inputType" :name="name" :value="inputValue" />
<span class="check" />
<span class="pl-2">{{ label }}</span>
</label>
</template>

View file

@ -1,9 +1,9 @@
<script setup lang="ts">
import { computed, ref, PropType } from 'vue';
import { computed, ref } from 'vue';
import FormCheckRadio from '@/Components/FormCheckRadio.vue';
// import BaseButton from '@/Components/BaseButton.vue';
// import FormControl from '@/Components/FormControl.vue';
import BaseButton from '@/Components/BaseButton.vue';
import FormControl from '@/Components/FormControl.vue';
import { mdiPlusCircle } from '@mdi/js';
const props = defineProps({
options: {
type: Object,
@ -23,7 +23,7 @@ const props = defineProps({
required: true,
},
type: {
type: String as PropType<'checkbox' | 'radio' | 'switch'>,
type: String,
default: 'checkbox',
validator: (value: string) => ['checkbox', 'radio', 'switch'].includes(value),
},
@ -38,82 +38,32 @@ const props = defineProps({
},
});
const emit = defineEmits(['update:modelValue']);
// const computedValue = computed({
// // get: () => props.modelValue,
// get: () => {
// // const ids = props.modelValue.map((obj) => obj.id);
// // return ids;
// if (Array.isArray(props.modelValue)) {
// if (props.modelValue.every((item) => typeof item === 'number')) {
// return props.modelValue;
// } else if (props.modelValue.every((item) => hasIdAttribute(item))) {
// const ids = props.modelValue.map((obj) => obj.id);
// return ids;
// }
// return props.modelValue;
// }
// // return props.modelValue;
// },
// set: (value) => {
// emit('update:modelValue', value);
// },
// });
const computedValue = computed({
// get: () => props.modelValue,
get: () => {
// const ids = props.modelValue.map((obj) => obj.id);
// return ids;
if (Array.isArray(props.modelValue)) {
if (props.modelValue.every((item) => typeof item === 'number')) {
return props.modelValue;
} else if (props.modelValue.every((item) => hasIdAttribute(item))) {
const ids = props.modelValue.map((obj) => obj.id.toString());
return ids;
}
return props.modelValue;
}
// return props.modelValue;
},
set: (value) => {
emit('update:modelValue', value);
},
});
// Define a type guard to check if an object has an 'id' attribute
// function hasIdAttribute(obj: any): obj is { id: any } {
// return typeof obj === 'object' && 'id' in obj;
// }
const computedValue = computed({
get: () => {
if (!props.modelValue) return props.modelValue;
if (Array.isArray(props.modelValue)) {
// Handle empty array
if (props.modelValue.length === 0) return [];
// If all items are objects with id property
if (props.modelValue.every((item) => hasIdAttribute(item))) {
return props.modelValue.map((obj) => {
// Ensure we return the correct type based on the options keys
const id = obj.id;
// Check if options keys are numbers or strings
const optionKeys = Object.keys(props.options);
if (optionKeys.length > 0) {
// If option keys are numeric strings, return number
if (optionKeys.every(key => !isNaN(Number(key)))) {
return Number(id);
}
}
return String(id);
});
}
// If all items are numbers
if (props.modelValue.every((item) => typeof item === 'number')) {
return props.modelValue;
}
// If all items are strings that represent numbers
if (props.modelValue.every((item) => typeof item === 'string' && !isNaN(Number(item)))) {
// Convert to numbers if options keys are numeric
const optionKeys = Object.keys(props.options);
if (optionKeys.length > 0 && optionKeys.every(key => !isNaN(Number(key)))) {
return props.modelValue.map(item => Number(item));
}
return props.modelValue;
}
// Return as-is for other cases
return props.modelValue;
}
return props.modelValue;
},
set: (value) => {
emit('update:modelValue', value);
},
});
const hasIdAttribute = (obj: any): obj is { id: any } => {
return typeof obj === 'object' && 'id' in obj;
};
@ -128,11 +78,11 @@ const addOption = () => {
const inputElClass = computed(() => {
const base = [
'px-3 py-2 max-w-full border-gray-700 rounded w-full',
'px-3 py-2 max-w-full focus:ring focus:outline-none border-gray-700 rounded w-full',
'dark:placeholder-gray-400',
'h-12',
'border',
'bg-transparent'
'bg-transparent'
// props.isReadOnly ? 'bg-gray-50 dark:bg-slate-600' : 'bg-white dark:bg-slate-800',
];
// if (props.icon) {
@ -158,9 +108,7 @@ const inputElClass = computed(() => {
d="M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm1 17h-2v-6H5v-2h6V5h2v6h6v2h-6v6z" />
</svg>
</div>
<!-- <FormCheckRadio v-for="(value, key) in options" :key="key" v-model="computedValue" :type="type"
:name="name" :input-value="key" :label="value" :class="componentClass" /> -->
<FormCheckRadio v-for="(value, key) in options" key="`${name}-${key}-${JSON.stringify(computedValue)}`" v-model="computedValue" :type="type"
:name="name" :input-value="isNaN(Number(key)) ? key : Number(key)" :label="value" :class="componentClass" />
<FormCheckRadio v-for="(value, key) in options" :key="key" v-model="computedValue" :type="type" :name="name"
:input-value="key" :label="value" :class="componentClass" />
</div>
</template>

View file

@ -67,28 +67,15 @@ const computedValue = computed({
emit('update:modelValue', value);
},
});
// focus:ring focus:outline-none border-gray-700
const inputElClass = computed(() => {
const base = [
'px-3 py-2 max-w-full rounded w-full',
'px-3 py-2 max-w-full focus:ring focus:outline-none border-gray-700 rounded w-full',
'dark:placeholder-gray-400',
props.extraHigh ? 'h-80' : (computedType.value === 'textarea' ? 'h-44' : 'h-12'),
props.borderless ? 'border-0' : 'border',
// // props.transparent && !props.isReadOnly ? 'bg-transparent' : 'bg-white dark:bg-slate-800',
// props.isReadOnly ? 'bg-gray-50 dark:bg-slate-600' : 'bg-white dark:bg-slate-800',
// props.transparent && !props.isReadOnly ? 'bg-transparent' : 'bg-white dark:bg-slate-800',
props.isReadOnly ? 'bg-gray-50 dark:bg-slate-600' : 'bg-white dark:bg-slate-800',
];
// Apply styles based on read-only state.
if (props.isReadOnly) {
// Read-only: no focus ring, grayed-out text and border, and disabled cursor.
base.push('bg-gray-50', 'dark:bg-slate-600', 'border', 'border-gray-300', 'dark:border-slate-600', 'text-gray-500', 'cursor-not-allowed', 'focus:outline-none' ,'focus:ring-0', 'focus:border-gray-300');
} else {
// Actionable field: focus ring, white/dark background, and darker border.
base.push('bg-white dark:bg-slate-800', 'focus:ring focus:outline-none', 'border', 'border-gray-700');
}
if (props.icon) {
base.push('pl-10', 'pr-10');
}

View file

@ -1,74 +0,0 @@
<script setup lang="ts">
import { computed } from 'vue';
import { mdiLicense } from '@mdi/js';
const props = defineProps({
path: {
type: String,
required: true
},
size: {
type: Number,
default: 24
},
viewBox: {
type: String,
default: '0 0 24 24'
},
color: {
type: String,
default: 'currentColor'
},
className: {
type: String,
default: ''
}
});
// Define all the SVG paths we need
const svgPaths = {
// Document/File icons
document: 'M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z',
documentPlus: 'M9 13h6m-3-3v6m5 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z',
// Communication icons
email: 'M2.003 5.884L10 9.882l7.997-3.998A2 2 0 0016 4H4a2 2 0 00-1.997 1.884z M18 8.118l-8 4-8-4V14a2 2 0 002 2h12a2 2 0 002-2V8.118z',
// Identity/User icons
idCard: '10 2a1 1 0 00-1 1v1a1 1 0 002 0V3a1 1 0 00-1-1zM4 4h3a3 3 0 006 0h3a2 2 0 012 2v9a2 2 0 01-2 2H4a2 2 0 01-2-2V6a2 2 0 012-2zm2.5 7a1.5 1.5 0 100-3 1.5 1.5 0 000 3zm2.45 4a2.5 2.5 0 10-4.9 0h4.9zM12 9a1 1 0 100 2h3a1 1 0 100-2h-3zm-1 4a1 1 0 011-1h2a1 1 0 110 2h-2a1 1 0 01-1-1z',
// Language/Translation icons
// language: 'M7 2a1 1 0 011 1v1h3a1 1 0 110 2H9.578a18.87 18.87 0 01-1.724 4.78c.29.354.596.696.914 1.026a1 1 0 11-1.44 1.389c-.188-.196-.373-.396-.554-.6a19.098 19.098 0 01-3.107 3.567 1 1 0 01-1.334-1.49 17.087 17.087 0 003.13-3.733 18.992 18.992 0 01-1.487-2.494 1 1 0 111.79-.89c.234.47.489.928.764 1.372.417-.934.752-1.913.997-2.927H3a1 1 0 110-2h3V3a1 1 0 011-1zm6 6a1 1 0 01.894.553l2.991 5.982a.869.869 0 01.02.037l.99 1.98a1 1 0 11-1.79.895L15.383 16h-4.764l-.724 1.447a1 1 0 11-1.788-.894l.99-1.98.019-.038 2.99-5.982A1 1 0 0113 8zm-1.382 6h2.764L13 11.236 11.618 14z',
language: 'M12 2a10 10 0 1 0 0 20a10 10 0 1 0 0-20zm0 0c2.5 0 4.5 4.5 4.5 10s-2 10-4.5 10-4.5-4.5-4.5-10 2-10 4.5-10zm0 0a10 10 0 0 1 0 20a10 10 0 0 1 0-20z',
// License/Legal icons
// license: 'M10 2a1 1 0 00-1 1v1.323l-3.954 1.582A1 1 0 004 6.32V16a1 1 0 001.555.832l3-1.2a1 1 0 01.8 0l3 1.2a1 1 0 001.555-.832V6.32a1 1 0 00-1.046-.894L9 4.877V3a1 1 0 00-1-1zm0 14.5a.5.5 0 01-.5-.5v-4a.5.5 0 011 0v4a.5.5 0 01-.5.5zm1.5-10.5a.5.5 0 11-1 0 .5.5 0 011 0z',
license: mdiLicense,
// Building/Organization icons
building: 'M4 4a2 2 0 012-2h8a2 2 0 012 2v12a1 1 0 110 2h-3a1 1 0 01-1-1v-2a1 1 0 00-1-1H9a1 1 0 00-1 1v2a1 1 0 01-1 1H4a1 1 0 110-2V4zm3 1h2v2H7V5zm2 4H7v2h2V9zm2-4h2v2h-2V5zm2 4h-2v2h2V9z',
// Book/Publication icons
book: 'M9 4.804A7.968 7.968 0 005.5 4c-1.255 0-2.443.29-3.5.804v10A7.969 7.969 0 015.5 14c1.669 0 3.218.51 4.5 1.385A7.962 7.962 0 0114.5 14c1.255 0 2.443.29 3.5.804v-10A7.968 7.968 0 0014.5 4c-1.255 0-2.443.29-3.5.804V12a1 1 0 11-2 0V4.804z',
// Download icon
download: 'M4 16v1a3 3 0 003 3h10a3 3 0 003-3v-1m-4-4l-4 4m0 0l-4-4m4 4V4'
};
const pathData = computed(() => {
return svgPaths[props.path] || props.path;
});
const sizeStyle = computed(() => {
return {
width: `${props.size}px`,
height: `${props.size}px`
};
});
</script>
<template>
<svg :style="sizeStyle" :class="className" :viewBox="viewBox" xmlns="http://www.w3.org/2000/svg" fill="none"
:stroke="color" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<path :d="pathData" />
</svg>
</template>

File diff suppressed because one or more lines are too long

View file

@ -1,124 +0,0 @@
<script setup>
import { onMounted, ref, watch } from 'vue';
import L from 'leaflet';
import 'leaflet/dist/leaflet.css';
const DEFAULT_BASE_LAYER_NAME = 'BaseLayer';
const DEFAULT_BASE_LAYER_ATTRIBUTION = '&copy; <a target="_blank" href="http://osm.org/copyright">OpenStreetMap</a> contributors';
const props = defineProps({
coverage: {
type: Object,
required: true
},
height: {
type: String,
default: '250px'
},
mapId: {
type: String,
default: 'view-map'
}
});
const map = ref(null);
const mapContainer = ref(null);
onMounted(() => {
initializeMap();
});
watch(() => props.coverage, (newCoverage) => {
if (map.value && newCoverage) {
updateBounds();
}
}, { deep: true });
const initializeMap = () => {
// Create the map with minimal controls
map.value = L.map(mapContainer.value, {
zoomControl: false,
attributionControl: false,
dragging: false,
scrollWheelZoom: false,
doubleClickZoom: false,
boxZoom: false,
tap: false,
keyboard: false,
touchZoom: false
});
// // Add a simple tile layer (OpenStreetMap)
let osmGgray = new L.TileLayer.WMS('https://ows.terrestris.de/osm-gray/service', {
format: 'image/png',
attribution: DEFAULT_BASE_LAYER_ATTRIBUTION,
layers: 'OSM-WMS',
});
let layerOptions = {
label: DEFAULT_BASE_LAYER_NAME,
visible: true,
layer: osmGgray,
};
layerOptions.layer.addTo(map.value);
// Add a light-colored rectangle for the coverage area
updateBounds();
};
const updateBounds = () => {
if (!props.coverage || !map.value) return;
// Clear any existing layers except the base tile layer
map.value.eachLayer(layer => {
if (layer instanceof L.Rectangle) {
map.value.removeLayer(layer);
}
});
// Create bounds from the coverage coordinates
const bounds = L.latLngBounds(
[props.coverage.y_min, props.coverage.x_min],
[props.coverage.y_max, props.coverage.x_max]
);
// Add a rectangle with emerald styling
L.rectangle(bounds, {
color: '#10b981', // emerald-500
weight: 2,
fillColor: '#d1fae5', // emerald-100
fillOpacity: 0.5
}).addTo(map.value);
// Fit the map to the bounds with some padding
map.value.fitBounds(bounds, {
padding: [20, 20]
});
};
</script>
<template>
<div class="map-container bg-emerald-50 dark:bg-emerald-900/30
rounded-lg shadow-sm overflow-hidden">
<div :id="mapId" ref="mapContainer" :style="{ height: height }" class="w-full"></div>
</div>
</template>
<style scoped>
/* Ensure the Leaflet container has proper styling */
:deep(.leaflet-container) {
background-color: #f0fdf4;
/* emerald-50 */
}
/* Dark mode adjustments */
@media (prefers-color-scheme: dark) {
:deep(.leaflet-container) {
background-color: rgba(6, 78, 59, 0.3);
/* emerald-900/30 */
}
:deep(.leaflet-tile) {
filter: brightness(0.8) contrast(1.2) grayscale(0.3);
}
}
</style>

View file

@ -1,11 +1,7 @@
// import type { LatLngBoundsExpression } from 'leaflet/src/geo/LatLngBounds';
// import type { LatLngExpression } from 'leaflet/src/geo/LatLng';
// import type { Layer } from 'leaflet/src/layer/Layer';
// import type { CRS } from 'leaflet/src/geo/crs/CRS';
import type { LatLngBoundsExpression } from 'leaflet';
import type { LatLngExpression } from 'leaflet';
import type { Layer } from 'leaflet';
import type { CRS } from 'leaflet';
import type { LatLngBoundsExpression } from 'leaflet/src/geo/LatLngBounds';
import type { LatLngExpression } from 'leaflet/src/geo/LatLng';
import type { Layer } from 'leaflet/src/layer/Layer';
import type { CRS } from 'leaflet/src/geo/crs/CRS';
export interface MapOptions {
preferCanvas?: boolean | undefined;

View file

@ -8,6 +8,7 @@ import { svg } from 'leaflet/src/layer/vector/SVG';
import axios from 'axios';
import { LatLngBoundsExpression } from 'leaflet/src/geo/LatLngBounds';
import { tileLayerWMS } from 'leaflet/src/layer/tile/TileLayer.WMS';
// import { TileLayer } from 'leaflet/src/layer/tile/TileLayer';
import { Attribution } from 'leaflet/src/control/Control.Attribution';
import DrawControlComponent from '@/Components/Map/draw.component.vue';
import ZoomControlComponent from '@/Components/Map/zoom.component.vue';
@ -16,7 +17,14 @@ import { LayerGroup } from 'leaflet/src/layer/LayerGroup';
import { OpensearchDocument } from '@/Dataset';
Map.include({
// @namespace Map; @method getRenderer(layer: Path): Renderer
// Returns the instance of `Renderer` that should be used to render the given
// `Path`. It will ensure that the `renderer` options of the map and paths
// are respected, and that the renderers do exist on the map.
getRenderer: function (layer) {
// @namespace Path; @option renderer: Renderer
// Use this specific instance of `Renderer` for this path. Takes
// precedence over the map's [default renderer](#map-renderer).
var renderer = layer.options.renderer || this._getPaneRenderer(layer.options.pane) || this.options.renderer || this._renderer;
if (!renderer) {
@ -43,18 +51,21 @@ Map.include({
},
_createRenderer: function (options) {
// @namespace Map; @option preferCanvas: Boolean = false
// Whether `Path`s should be rendered on a `Canvas` renderer.
// By default, all `Path`s are rendered in a `SVG` renderer.
return (this.options.preferCanvas && canvas(options)) || svg(options);
},
});
const DEFAULT_BASE_LAYER_NAME = 'BaseLayer';
const DEFAULT_BASE_LAYER_ATTRIBUTION = '&copy; <a target="_blank" href="http://osm.org/copyright">OpenStreetMap</a> contributors';
const OPENSEARCH_HOST = 'https://catalog.geosphere.at';
// const OPENSEARCH_HOST = `${process.env.OPENSEARCH_HOST}`;
// const OPENSEARCH_HOST = `http://${process.env.OPENSEARCH_PUBLIC_HOST}`;
let map: Map;
const props = defineProps({
checkable: Boolean,
dheckable: Boolean,
datasets: {
type: Array<OpensearchDocument>,
default: () => [],
@ -78,7 +89,10 @@ const items = computed({
get() {
return props.datasets;
},
// setter
set(value) {
// Note: we are using destructuring assignment syntax here.
props.datasets.length = 0;
props.datasets.push(...value);
},
@ -89,13 +103,15 @@ const fitBounds: LatLngBoundsExpression = [
[49.0390742051, 16.9796667823],
];
// const mapId = 'map';
const drawControl: Ref<DrawControlComponent | null> = ref(null);
const southWest = ref(null);
const northEast = ref(null);
const mapService = MapService();
const isLoading = ref(false);
const filterLayerGroup = new LayerGroup();
// Replace with your actual data
// const datasets: Ref<OpensearchDocument[]> = ref([]);
onMounted(() => {
initMap();
@ -106,6 +122,7 @@ onUnmounted(() => {
});
const initMap = async () => {
// init leaflet map
map = new Map('map', props.mapOptions);
mapService.setMap(props.mapId, map);
map.scrollWheelZoom.disable();
@ -123,6 +140,11 @@ const initMap = async () => {
layers: 'OSM-WMS',
});
// let baseAt = new TileLayer('https://{s}.wien.gv.at/basemap/bmapgrau/normal/google3857/{z}/{y}/{x}.png', {
// subdomains: ['maps', 'maps1', 'maps2', 'maps3', 'maps4'],
// attribution: DEFAULT_BASE_LAYER_ATTRIBUTION,
// });
let layerOptions = {
label: DEFAULT_BASE_LAYER_NAME,
visible: true,
@ -131,15 +153,62 @@ const initMap = async () => {
layerOptions.layer.addTo(map);
map.on('Draw.Event.CREATED', handleDrawEventCreated);
// // const query = {
// // query: {
// // term: {
// // id: "103"
// // }
// // }
// // };
// // to do : call extra method:
// const query = {
// // q: 'id:103'
// // q: 'author:"Iglseder, Christoph" OR title:"Datensatz"',
// // q: 'author:"Iglseder"',
// q: '*',
// _source: 'author,bbox_xmin,bbox_xmax,bbox_ymin,bbox_ymax,abstract,title',
// size: 1000
// // qf:"title^3 author^2 subject^1",
// }
// try {
// let response = await axios({
// method: 'GET',
// url: OPEN_SEARCH_HOST + '/tethys-records/_search',
// headers: { 'Content-Type': 'application/json' },
// params: query
// });
// // Loop through the hits in the response
// response.data.hits.hits.forEach(hit => {
// // Get the geo_location attribute
// // var geo_location = hit._source.geo_location;
// let xMin = hit._source.bbox_xmin;
// let xMax = hit._source.bbox_xmax;
// let yMin = hit._source.bbox_ymin;
// let yMax = hit._source.bbox_ymax;
// var bbox: LatLngBoundsExpression = [[yMin, xMin], [yMax, xMax]];
// // Parse the WKT string to get the bounding box coordinates
// // var bbox = wktToBbox(geo_location);
// // // Add the bounding box to the map as a rectangle
// new Rectangle(bbox, { color: "#ff7800", weight: 1 }).addTo(map);
// // console.log(hit._source);
// });
// } catch (error) {
// console.error(error);
// }
};
const handleDrawEventCreated = async (event) => {
isLoading.value = true;
filterLayerGroup.clearLayers();
items.value = [];
let layer = event.layer;
let bounds = layer.getBounds();
// coverage.x_min = bounds.getSouthWest().lng;
// coverage.y_min = bounds.getSouthWest().lat;
// coverage.x_max = bounds.getNorthEast().lng;
// coverage.y_max = bounds.getNorthEast().lat;
try {
let response = await axios({
@ -156,6 +225,7 @@ const handleDrawEventCreated = async (event) => {
filter: {
geo_shape: {
geo_location: {
// replace 'location' with your geo-point field name
shape: {
type: 'envelope',
coordinates: [
@ -167,12 +237,16 @@ const handleDrawEventCreated = async (event) => {
},
},
},
// _source: 'author,bbox_xmin,bbox_xmax,bbox_ymin,bbox_ymax,abstract,title',
// "size": 1000
},
},
},
});
// Loop through the hits in the response
response.data.hits.hits.forEach((hit) => {
// Get the geo_location attribute
// var geo_location = hit._source.geo_location;
let xMin = hit._source.bbox_xmin;
let xMax = hit._source.bbox_xmax;
let yMin = hit._source.bbox_ymin;
@ -181,255 +255,46 @@ const handleDrawEventCreated = async (event) => {
[yMin, xMin],
[yMax, xMax],
];
// Parse the WKT string to get the bounding box coordinates
// var bbox = wktToBbox(geo_location);
let rect = new Rectangle(bbox, {
color: '#65DC21',
weight: 2,
fillColor: '#65DC21',
fillOpacity: 0.2,
className: 'animated-rectangle',
});
// // Add the bounding box to the map as a rectangle
let rect = new Rectangle(bbox, { color: '#ff7800', weight: 1 });
filterLayerGroup.addLayer(rect);
// add to result list
items.value.push(hit._source);
});
} catch (error) {
console.error(error);
} finally {
isLoading.value = false;
}
};
</script>
<template>
<SectionMain>
<div class="map-container-wrapper">
<!-- Loading Overlay -->
<div v-if="isLoading" class="loading-overlay">
<div class="loading-spinner"></div>
<p class="loading-text">Searching datasets...</p>
</div>
<!-- Map Instructions Banner -->
<div class="map-instructions">
<svg class="instruction-icon" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<circle cx="12" cy="12" r="10" />
<path d="M12 16v-4M12 8h.01" />
</svg>
<p class="instruction-text">
<strong>Tip:</strong> Use the drawing tool to select an area on the map and discover datasets
</p>
</div>
<div id="map" class="map-container">
<SectionMain>
<div id="map" class="map-container mt-6 mb-6 rounded-2xl py-12 px-6 text-center dark:bg-slate-900 bg-white">
<ZoomControlComponent ref="zoomControl" :mapId="mapId"></ZoomControlComponent>
<DrawControlComponent ref="drawControl" :preserve="false" :mapId="mapId" :southWest="southWest" :northEast="northEast">
<DrawControlComponent ref="drawControl" :preserve="false" :mapId="mapId" :southWest="southWest"
:northEast="northEast">
</DrawControlComponent>
</div>
</div>
</SectionMain>
</div>
</SectionMain>
</template>
<style scoped>
.map-container-wrapper {
position: relative;
border-radius: 1rem;
overflow: hidden;
background: white;
box-shadow:
0 4px 6px -1px rgba(0, 0, 0, 0.1),
0 2px 4px -1px rgba(0, 0, 0, 0.06);
}
.dark .map-container-wrapper {
background: #1f2937;
}
/* Map Instructions Banner */
.map-instructions {
display: flex;
align-items: center;
gap: 0.75rem;
padding: 1rem 1.5rem;
background: linear-gradient(135deg, rgba(101, 220, 33, 0.1) 0%, rgba(53, 124, 6, 0.1) 100%);
border-bottom: 2px solid #e5e7eb;
}
.dark .map-instructions {
background: linear-gradient(135deg, rgba(101, 220, 33, 0.2) 0%, rgba(53, 124, 6, 0.2) 100%);
border-bottom-color: #374151;
}
.instruction-icon {
width: 1.5rem;
height: 1.5rem;
color: #65dc21;
flex-shrink: 0;
}
.instruction-text {
font-size: 0.875rem;
color: #4b5563;
margin: 0;
}
.dark .instruction-text {
color: #d1d5db;
}
.instruction-text strong {
color: #65dc21;
font-weight: 600;
}
/* Loading Overlay */
.loading-overlay {
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
background: rgba(255, 255, 255, 0.95);
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
z-index: 1000;
backdrop-filter: blur(4px);
}
.dark .loading-overlay {
background: rgba(31, 41, 55, 0.95);
}
.loading-spinner {
width: 3rem;
height: 3rem;
border: 4px solid #e5e7eb;
border-top-color: #65dc21;
border-radius: 50%;
animation: spin 1s linear infinite;
}
.dark .loading-spinner {
border-color: #374151;
border-top-color: #65dc21;
}
@keyframes spin {
to {
transform: rotate(360deg);
}
}
.loading-text {
margin-top: 1rem;
font-size: 0.875rem;
font-weight: 600;
color: #65dc21;
}
/* Map Container */
.map-container {
position: relative;
<style scoped lang="css">
/* .leaflet-container {
height: 600px;
width: 100%;
background: #f9fafb;
}
.dark .map-container {
background: #111827;
}
/* Leaflet Overrides */
:deep(.leaflet-container) {
background-color: transparent;
outline-offset: 1px;
} */
.leaflet-container {
height: 600px;
width: 100%;
background: transparent;
font-family: inherit;
}
:deep(.leaflet-container .leaflet-pane) {
z-index: 30 !important;
.leaflet-container .leaflet-pane {
z-index: 30!important;
}
/* Enhanced Rectangle Styling */
:deep(.animated-rectangle) {
animation: pulseRectangle 2s ease-in-out infinite;
}
@keyframes pulseRectangle {
0%,
100% {
opacity: 0.6;
}
50% {
opacity: 1;
}
}
/* Control Enhancements */
:deep(.leaflet-control) {
border-radius: 0.5rem;
box-shadow:
0 4px 6px -1px rgba(0, 0, 0, 0.1),
0 2px 4px -1px rgba(0, 0, 0, 0.06);
border: none;
}
:deep(.leaflet-bar a) {
border-radius: 0.5rem;
transition: all 0.2s ease;
}
:deep(.leaflet-bar a:hover) {
background: #65dc21;
color: white;
}
:deep(.leaflet-draw-toolbar a) {
background: white;
transition: all 0.2s ease;
}
.dark :deep(.leaflet-draw-toolbar a) {
background: #374151;
color: #d1d5db;
}
:deep(.leaflet-draw-toolbar a:hover) {
background: #65dc21;
}
/* Popup Enhancements */
:deep(.leaflet-popup-content-wrapper) {
border-radius: 0.75rem;
box-shadow:
0 10px 15px -3px rgba(0, 0, 0, 0.1),
0 4px 6px -2px rgba(0, 0, 0, 0.05);
}
:deep(.leaflet-popup-tip) {
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
}
/* Responsive Design */
@media (max-width: 768px) {
.map-container {
height: 400px;
}
.map-instructions {
padding: 0.75rem 1rem;
}
.instruction-text {
font-size: 0.8125rem;
}
}
@media (max-width: 640px) {
.map-container {
height: 350px;
}
}
</style>
</style>

View file

@ -1,29 +1,14 @@
<template>
<div class="draw-control-container">
<button
ref="drawButton"
class="draw-button"
:class="{ 'is-active': enabled }"
type="button"
@click.stop.prevent="toggleDraw"
:aria-label="enabled ? 'Stop drawing' : 'Start drawing'"
:aria-pressed="enabled"
>
<!-- Icon changes based on state -->
<!-- <BaseIcon
v-if="enabled"
:path="mdiClose"
:size="20"
/> -->
<BaseIcon
:path="mdiVectorRectangle"
:size="20"
/>
<!-- Status indicator -->
<!-- <span class="draw-status-badge" :class="{ 'is-active': enabled }">
{{ enabled ? 'Active' : 'Draw' }}
</span> -->
<div ref="drawControl" class="gba-control-draw btn-group-vertical map-control">
<!-- <button type="button" class="button is-light is-small" (click)="locateUser()" [ngClass]="isToggled ? 'is-primary': 'is-active'">
<fa-icon [icon]="faSearchLocation"></fa-icon>
</button> -->
<!-- -->
<button ref="inputDraw"
class="inline-flex cursor-pointer justify-center items-center whitespace-nowrap focus:outline-none transition-colors duration-150 border rounded ring-blue-700 text-black border-teal-50 hover:bg-gray-200 text-sm p-1"
type="button" :class="[_enabled ? 'cursor-not-allowed bg-cyan-200' : 'bg-teal-50 is-active']"
@click.prevent="toggleDraw">
<BaseIcon v-if="mdiDrawPen" :path="mdiDrawPen" />
</button>
</div>
</template>
@ -32,14 +17,16 @@
import { Component, Vue, Prop } from 'vue-facing-decorator';
import BaseIcon from '@/Components/BaseIcon.vue';
import { mdiVectorRectangle, mdiClose } from '@mdi/js';
import { mdiDrawPen } from '@mdi/js';
import { MapService } from '@/Stores/map.service';
import { Map } from 'leaflet';
import { Map } from 'leaflet/src/map/index';
// import { LayerGroup } from 'leaflet/src/layer/LayerGroup';
// import { LatLngBounds, Rectangle } from 'leaflet';
import { on, off, preventDefault } from 'leaflet/src/dom/DomEvent';
import { Rectangle } from 'leaflet';
import { LatLngBounds } from 'leaflet';
import { Rectangle } from 'leaflet/src/layer/vector/Rectangle';
import { LatLngBounds } from 'leaflet/src/geo/LatLngBounds';
import { LatLng } from 'leaflet';
import { LeafletMouseEvent } from 'leaflet';
@Component({
name: 'draw-control',
@ -47,19 +34,19 @@ import { LatLng } from 'leaflet';
BaseIcon,
},
})
export class DrawControlComponent extends Vue {
export default class DrawControlComponent extends Vue {
public TYPE = 'rectangle';
mdiVectorRectangle = mdiVectorRectangle;
mdiClose = mdiClose;
mdiDrawPen = mdiDrawPen;
// private featuresLayer;
options = {
shapeOptions: {
stroke: true,
color: '#65DC21',
color: '#22C55E',
weight: 4,
opacity: 0.5,
fill: true,
fillColor: '#65DC21',
fillColor: '#22C55E', //same as color by default
fillOpacity: 0.2,
clickable: true,
},
@ -69,6 +56,7 @@ export class DrawControlComponent extends Vue {
};
@Prop() public mapId: string;
// @Prop() public map: Map;
@Prop public southWest: LatLng;
@Prop public northEast: LatLng;
@Prop({
@ -77,17 +65,13 @@ export class DrawControlComponent extends Vue {
public preserve: boolean;
mapService = MapService();
private _enabled: boolean;
public _enabled: boolean;
private _map: Map;
private _isDrawing: boolean = false;
private _startLatLng: LatLng;
private _mapDraggable: boolean;
private _shape: Rectangle | undefined;
get enabled() {
return this._enabled;
}
enable() {
if (this._enabled) {
return this;
@ -109,35 +93,49 @@ export class DrawControlComponent extends Vue {
return this;
}
// enabled() {
// return !!this._enabled;
// }
enabled() {
return !!this._enabled;
}
// @Ref('inputDraw') private _inputDraw: HTMLElement;
private addHooks() {
// L.Draw.Feature.prototype.addHooks.call(this);
this._map = this.mapService.getMap(this.mapId);
if (this._map) {
this._mapDraggable = this._map.dragging.enabled();
if (this._mapDraggable) {
this._map.dragging.disable();
}
this._map.getContainer().style.cursor = 'crosshair';
//TODO refactor: move cursor to styles
// this._map.domElement.style.cursor = 'crosshair';
this._map._container.style.cursor = 'crosshair';
// this._tooltip.updateContent({text: this._initialLabelText});
this._map
.on('mousedown', this._onMouseDown, this)
.on('mousemove', this._onMouseMove, this)
.on('touchstart', this._onMouseDown, this)
.on('touchmove', this._onMouseMove, this);
.on('touchmove', this._onMouseMove, this);
// we should prevent default, otherwise default behavior (scrolling) will fire,
// and that will cause document.touchend to fire and will stop the drawing
// (circle, rectangle) in touch mode.
// (update): we have to send passive now to prevent scroll, because by default it is {passive: true} now, which means,
// handler can't event.preventDefault
// check the news https://developers.google.com/web/updates/2016/06/passive-event-listeners
// document.addEventListener('touchstart', preventDefault, { passive: false });
}
}
private removeHooks() {
// L.Draw.Feature.prototype.removeHooks.call(this);
if (this._map) {
if (this._mapDraggable) {
this._map.dragging.enable();
}
this._map.getContainer().style.cursor = '';
//TODO refactor: move cursor to styles
this._map._container.style.cursor = '';
this._map
.off('mousedown', this._onMouseDown, this)
@ -148,36 +146,46 @@ export class DrawControlComponent extends Vue {
off(document, 'mouseup', this._onMouseUp, this);
off(document, 'touchend', this._onMouseUp, this);
// document.removeEventListener('touchstart', preventDefault);
// If the box element doesn't exist they must not have moved the mouse, so don't need to destroy/return
if (this._shape && this.preserve == false) {
this._map.removeLayer(this._shape);
// delete this._shape;
this._shape = undefined;
}
}
this._isDrawing = false;
}
// private _onMouseDown(e: LeafletMouseEvent) {
private _onMouseDown(e: any) {
private _onMouseDown(e: LeafletMouseEvent) {
this._isDrawing = true;
this._startLatLng = e.latlng;
// DomEvent.on(document, 'mouseup', this._onMouseUp, this)
// .on(document, 'touchend', this._onMouseUp, this)
// .preventDefault(e.originalEvent);
on(document, 'mouseup', this._onMouseUp, this);
on(document, 'touchend', this._onMouseUp, this);
preventDefault(e.originalEvent);
}
// private _onMouseMove(e: LeafletMouseEvent) {
private _onMouseMove(e: any) {
private _onMouseMove(e: LeafletMouseEvent) {
var latlng = e.latlng;
// this._tooltip.updatePosition(latlng);
if (this._isDrawing) {
// this._tooltip.updateContent(this._getTooltipText());
this._drawShape(latlng);
}
}
private _onMouseUp() {
if (this._shape) {
this._fireCreatedEvent(this._shape);
}
// this.removeHooks();
this.disable();
if (this.options.repeatMode) {
this.enable();
@ -186,12 +194,14 @@ export class DrawControlComponent extends Vue {
private _fireCreatedEvent(shape: Rectangle) {
var rectangle = new Rectangle(shape.getBounds(), this.options.shapeOptions);
// L.Draw.SimpleShape.prototype._fireCreatedEvent.call(this, rectangle);
this._map.fire('Draw.Event.CREATED', { layer: rectangle, type: this.TYPE });
}
public removeShape() {
if (this._shape) {
this._map.removeLayer(this._shape);
// delete this._shape;
this._shape = undefined;
}
}
@ -200,6 +210,7 @@ export class DrawControlComponent extends Vue {
if (!this._shape) {
const bounds = new LatLngBounds(southWest, northEast);
this._shape = new Rectangle(bounds, this.options.shapeOptions);
// this._map.addLayer(this._shape);
this._map = this.mapService.getMap(this.mapId);
this._shape.addTo(this._map);
} else {
@ -207,10 +218,12 @@ export class DrawControlComponent extends Vue {
}
}
// from Draw Rectangle
private _drawShape(latlng: LatLng) {
if (!this._shape) {
const bounds = new LatLngBounds(this._startLatLng, latlng);
this._shape = new Rectangle(bounds, this.options.shapeOptions);
// this._map.addLayer(this._shape);
this._shape.addTo(this._map);
} else {
this._shape.setBounds(new LatLngBounds(this._startLatLng, latlng));
@ -224,336 +237,44 @@ export class DrawControlComponent extends Vue {
this.enable();
}
}
// private enable() {
// //if (this.map.mapTool) this.map.mapTool.on('editable:drawing:start', this.disable.bind(this));
// // dom.addClass(this.map.container, 'measure-enabled');
// //this.fireAndForward('showmeasure');
// this._startMarker(this.southWest, this.options);
// }
// private disable() {
// //if (this.map.mapTool) this.map.mapTool.off('editable:drawing:start', this.disable.bind(this));
// // dom.removeClass(this.map.container, 'measure-enabled');
// // this.featuresLayer.clearLayers();
// // //this.fireAndForward('hidemeasure');
// // if (this._drawingEditor) {
// // this._drawingEditor.cancelDrawing();
// // }
// }
}
export default DrawControlComponent;
</script>
<style scoped>
.draw-control-container {
position: absolute;
left: 1rem;
top: 8rem;
z-index: 1000;
<style lang="css">
.gba-control-draw {
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
}
.draw-button {
display: flex;
align-items: center;
gap: 0;
padding: 0.625rem;
background: white;
border: 2px solid #e5e7eb;
border-radius: 0.75rem;
color: #374151;
cursor: pointer;
transition: all 0.3s cubic-bezier(0.4, 0, 0.2, 1);
box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06);
outline: none;
font-size: 0.875rem;
font-weight: 600;
position: relative;
overflow: visible;
width: 2.5rem;
height: 2.5rem;
justify-content: center;
}
.dark .draw-button {
background: #1f2937;
border-color: #374151;
color: #d1d5db;
box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.3);
}
/* Inactive state hover */
.draw-button:not(.is-active):hover {
background: #f9fafb;
border-color: #65DC21;
color: #357C06;
transform: translateY(-2px);
/* box-shadow: 0 10px 15px -3px rgba(0, 0, 0, 0.1), 0 4px 6px -2px rgba(0, 0, 0, 0.05); */
width: auto;
padding: 0.625rem 1rem;
gap: 0.5rem;
}
.dark .draw-button:not(.is-active):hover {
background: #111827;
border-color: #65DC21;
color: #65DC21;
}
/* Active state */
.draw-button.is-active {
background: linear-gradient(135deg, #65DC21 0%, #357C06 100%);
border-color: #357C06;
color: white;
box-shadow: 0 10px 15px -3px rgba(101, 220, 33, 0.4), 0 4px 6px -2px rgba(101, 220, 33, 0.2);
width: auto;
padding: 0.625rem 1rem;
gap: 0.5rem;
}
.dark .draw-button.is-active {
box-shadow: 0 10px 15px -3px rgba(101, 220, 33, 0.5), 0 4px 6px -2px rgba(101, 220, 33, 0.3);
}
/* Active state hover */
.draw-button.is-active:hover {
background: linear-gradient(135deg, #429E04 0%, #295B09 100%);
transform: translateY(-2px);
box-shadow: 0 20px 25px -5px rgba(101, 220, 33, 0.4), 0 10px 10px -5px rgba(101, 220, 33, 0.2);
}
/* Active state press */
.draw-button:active {
transform: translateY(0) scale(0.98);
}
/* Focus state */
.draw-button:focus-visible {
outline: 3px solid rgba(101, 220, 33, 0.5);
outline-offset: 2px;
}
/* Icon styling */
.draw-button :deep(svg) {
width: 1.25rem;
height: 1.25rem;
transition: transform 0.3s ease;
}
/* .draw-button.is-active :deep(svg) {
transform: rotate(90deg);
} */
/* Status badge */
.draw-status-badge {
font-size: 0.75rem;
font-weight: 700;
text-transform: uppercase;
letter-spacing: 0.05em;
transition: all 0.3s ease;
max-width: 0;
opacity: 0;
overflow: hidden;
white-space: nowrap;
}
/* Show badge on hover when inactive */
.draw-button:not(.is-active):hover .draw-status-badge {
max-width: 100px;
opacity: 1;
}
/* Show badge when active */
.draw-button.is-active .draw-status-badge {
max-width: 100px;
opacity: 1;
}
/* Pulse animation for active state */
.draw-button.is-active::before {
content: '';
border-radius: 4px;
position: absolute;
top: 50%;
left: 50%;
width: 100%;
height: 100%;
background: rgba(255, 255, 255, 0.3);
border-radius: 0.75rem;
transform: translate(-50%, -50%) scale(0);
animation: pulse 2s ease-out infinite;
pointer-events: none;
left: 10px;
top: 100px;
z-index: 40;
}
/* @keyframes pulse {
0% {
transform: translate(-50%, -50%) scale(0);
opacity: 1;
}
100% {
transform: translate(-50%, -50%) scale(1.5);
opacity: 0;
}
} */
.btn-group-vertical button {
display: block;
/* Glow effect for active state */
.draw-button.is-active::after {
content: '';
position: absolute;
top: -2px;
left: -2px;
right: -2px;
bottom: -2px;
background: linear-gradient(135deg, #65DC21, #357C06);
border-radius: 0.75rem;
opacity: 0;
z-index: -1;
transition: opacity 0.3s ease;
filter: blur(8px);
}
.draw-button.is-active:hover::after {
opacity: 0.6;
}
/* Inactive state indicator */
.draw-button:not(.is-active) .draw-status-badge {
color: #6b7280;
}
.dark .draw-button:not(.is-active) .draw-status-badge {
color: #9ca3af;
}
.draw-button:not(.is-active):hover .draw-status-badge {
color: #357C06;
}
.dark .draw-button:not(.is-active):hover .draw-status-badge {
color: #65DC21;
}
/* Active state indicator */
.draw-button.is-active .draw-status-badge {
color: white;
text-shadow: 0 1px 2px rgba(0, 0, 0, 0.2);
}
/* Tooltip on hover */
.draw-button:hover::after {
content: attr(aria-label);
position: absolute;
bottom: calc(100% + 0.5rem);
left: 50%;
transform: translateX(-50%);
background: #1f2937;
color: white;
padding: 0.375rem 0.75rem;
border-radius: 0.375rem;
font-size: 0.75rem;
white-space: nowrap;
opacity: 0;
pointer-events: none;
transition: opacity 0.2s ease;
z-index: 1001;
animation: fadeInTooltip 0.2s ease 0.5s forwards;
}
/* @keyframes fadeInTooltip {
from {
opacity: 0;
transform: translateX(-50%) translateY(4px);
}
to {
opacity: 1;
transform: translateX(-50%) translateY(0);
}
} */
/* Ripple effect on click */
.draw-button::before {
content: '';
position: absolute;
top: 50%;
left: 50%;
width: 0;
height: 0;
border-radius: 50%;
background: rgba(255, 255, 255, 0.3);
transform: translate(-50%, -50%);
transition: width 0.6s, height 0.6s;
}
.draw-button:active::before {
width: 300px;
height: 300px;
}
/* Responsive design */
@media (max-width: 768px) {
.draw-control-container {
right: 0.75rem;
top: 0.75rem;
}
.draw-button {
width: 2.25rem;
height: 2.25rem;
padding: 0.5rem;
font-size: 0.8125rem;
}
.draw-button:not(.is-active):hover,
.draw-button.is-active {
padding: 0.5rem 0.875rem;
}
.draw-button :deep(svg) {
width: 1.125rem;
height: 1.125rem;
}
.draw-status-badge {
font-size: 0.6875rem;
}
/* Hide tooltip on mobile */
.draw-button:hover::after {
display: none;
}
}
/* @media (max-width: 640px) {
.draw-control-container {
right: 0.5rem;
top: 0.5rem;
}
.draw-button {
width: 2rem;
height: 2rem;
padding: 0.5rem;
}
.draw-button:not(.is-active):hover,
.draw-button.is-active {
padding: 0.5rem 0.75rem;
}
.draw-button :deep(svg) {
width: 1rem;
height: 1rem;
}
} */
/* Accessibility: reduce motion */
@media (prefers-reduced-motion: reduce) {
.draw-button,
.draw-button :deep(svg),
.draw-status-badge {
transition: none;
}
.draw-button.is-active::before,
.draw-button.is-active::after {
animation: none;
}
margin-left: 0;
margin-top: 0.5em;
}
</style>
<style>
/* Global styles for draw mode */
.leaflet-container.draw-mode-active {
cursor: crosshair !important;
}
.leaflet-container.draw-mode-active * {
cursor: crosshair !important;
}
</style>

View file

@ -1,72 +1,21 @@
<template>
<div class="relative w-full">
<!-- Map Container -->
<div
:id="mapId"
class="relative h-[600px] w-full bg-gray-50 dark:bg-gray-900 rounded-xl overflow-hidden"
>
<div class="relative w-full h-full">
<div style="position: relative">
<!-- <Map className="h-36" :center="state.center" :zoom="state.zoom"> // map component content </Map> -->
<div :id="mapId" class="rounded">
<div class="dark:bg-slate-900 bg-slate flex flex-col">
<ZoomControlComponent ref="zoom" :mapId="mapId" />
<DrawControlComponent ref="draw" :mapId="mapId" :southWest="southWest" :northEast="northEast" />
</div>
</div>
<!-- Validate Button -->
<div class="absolute left-4 top-44 z-[1000] select-none">
<div class="gba-control-validate btn-group-vertical">
<button
class="group flex items-center justify-center relative overflow-visible outline-none font-semibold text-sm transition-all duration-300 ease-in-out
w-10 h-10 rounded-xl border-2 shadow-md
focus-visible:outline focus-visible:outline-3 focus-visible:outline-offset-2"
:class="[
validBoundingBox
? 'bg-gradient-to-br from-lime-500 to-lime-700 border-lime-700 text-white shadow-lime-500/40 cursor-default gap-2 w-auto px-4 focus-visible:outline-lime-500/50'
: 'bg-white dark:bg-gray-800 border-red-500 text-red-600 dark:text-red-400 gap-0 hover:bg-red-50 dark:hover:bg-gray-900 hover:border-red-500 hover:text-red-700 dark:hover:text-red-300 hover:-translate-y-0.5 hover:shadow-lg hover:shadow-red-500/30 hover:w-auto hover:px-4 hover:gap-2 focus-visible:outline-red-500/50'
]"
class="min-w-27 inline-flex cursor-pointer justify-center items-center whitespace-nowrap focus:outline-none transition-colors duration-150 border rounded ring-blue-700 text-black text-sm p-1"
type="button"
@click.stop.prevent="validateBoundingBox"
:aria-label="validBoundingBox ? 'Bounding box is valid' : 'Validate bounding box'"
:class="[validBoundingBox ? 'cursor-not-allowed bg-green-500 is-active' : 'bg-red-500 ']"
>
<!-- Icon -->
<BaseIcon
v-if="mdiMapCheckOutline"
:path="mdiMapCheckOutline"
:size="20"
:class="[
'transition-transform duration-300',
validBoundingBox && 'animate-[checkPulse_2s_ease-in-out_infinite]'
]"
/>
<!-- Status badge -->
<span
class="text-xs font-bold uppercase tracking-wider whitespace-nowrap transition-all duration-300 overflow-hidden"
:class="[
validBoundingBox
? 'max-w-[100px] opacity-100 text-white drop-shadow'
: 'max-w-0 opacity-0 group-hover:max-w-[100px] group-hover:opacity-100'
]"
>
{{ label }}
</span>
<!-- Pulse animation for valid state -->
<span
v-if="validBoundingBox"
class="absolute top-1/2 left-1/2 w-full h-full bg-white/30 rounded-xl -translate-x-1/2 -translate-y-1/2 animate-[pulse_2s_ease-out_infinite] pointer-events-none"
></span>
<!-- Ripple effect on click -->
<span
class="absolute top-1/2 left-1/2 w-0 h-0 rounded-full bg-white/30 -translate-x-1/2 -translate-y-1/2 transition-all duration-[600ms] active:w-[300px] active:h-[300px]"
></span>
<!-- Tooltip -->
<span
v-if="!validBoundingBox"
class="absolute left-[calc(100%+0.5rem)] top-1/2 -translate-y-1/2 px-3 py-1.5 bg-gray-800 text-white text-xs rounded-md whitespace-nowrap opacity-0 pointer-events-none transition-opacity duration-200 z-[1001] group-hover:opacity-100 group-hover:animate-[fadeInTooltip_0.2s_ease_0.5s_forwards]"
>
Click to validate
</span>
<!-- <BaseIcon v-if="mdiMapCheckOutline" :path="mdiMapCheckOutline" /> -->
{{ label }}
</button>
</div>
</div>
@ -78,7 +27,8 @@ import { Component, Vue, Prop, Ref } from 'vue-facing-decorator';
import { Map } from 'leaflet/src/map/index';
import { Control } from 'leaflet/src/control/Control';
import { LatLngBoundsExpression, LatLngBounds } from 'leaflet/src/geo/LatLngBounds';
import { LatLng } from 'leaflet';
// import { toLatLng } from 'leaflet/src/geo/LatLng';
import { LatLng } from 'leaflet'; //'leaflet/src/geo/LatLng';
import { tileLayerWMS } from 'leaflet/src/layer/tile/TileLayer.WMS';
import { Attribution } from 'leaflet/src/control/Control.Attribution';
import { mdiMapCheckOutline } from '@mdi/js';
@ -87,8 +37,6 @@ import BaseIcon from '@/Components/BaseIcon.vue';
import { MapOptions } from './MapOptions';
import { LayerOptions, LayerMap } from './LayerOptions';
import { MapService } from '@/Stores/map.service';
// import ZoomControlComponent from '@/Components/Map/zoom.component.vue';
// import DrawControlComponent from '@/Components/Map/draw.component.vue';
import ZoomControlComponent from './zoom.component.vue';
import DrawControlComponent from './draw.component.vue';
import { Coverage } from '@/Dataset';
@ -97,7 +45,14 @@ import { svg } from 'leaflet/src/layer/vector/SVG';
import Notification from '@/utils/toast';
Map.include({
// @namespace Map; @method getRenderer(layer: Path): Renderer
// Returns the instance of `Renderer` that should be used to render the given
// `Path`. It will ensure that the `renderer` options of the map and paths
// are respected, and that the renderers do exist on the map.
getRenderer: function (layer) {
// @namespace Path; @option renderer: Renderer
// Use this specific instance of `Renderer` for this path. Takes
// precedence over the map's [default renderer](#map-renderer).
var renderer = layer.options.renderer || this._getPaneRenderer(layer.options.pane) || this.options.renderer || this._renderer;
if (!renderer) {
@ -124,11 +79,15 @@ Map.include({
},
_createRenderer: function (options) {
// @namespace Map; @option preferCanvas: Boolean = false
// Whether `Path`s should be rendered on a `Canvas` renderer.
// By default, all `Path`s are rendered in a `SVG` renderer.
return (this.options.preferCanvas && canvas(options)) || svg(options);
},
});
const DEFAULT_BASE_LAYER_NAME = 'BaseLayer';
// const DEFAULT_BASE_LAYER_URL = 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png';
const DEFAULT_BASE_LAYER_ATTRIBUTION = '&copy; <a target="_blank" href="http://osm.org/copyright">OpenStreetMap</a> contributors';
@Component({
@ -140,18 +99,32 @@ const DEFAULT_BASE_LAYER_ATTRIBUTION = '&copy; <a target="_blank" href="http://o
},
})
export default class MapComponent extends Vue {
/**
* A map with the given ID is created inside this component.
* This ID can be used the get the map instance over the map cache service.
*/
@Prop()
public mapId: string;
/**
* The corresponding leaflet map options (see: https://leafletjs.com/reference-1.3.4.html#map-option)
*/
@Prop()
public mapOptions: MapOptions;
@Prop()
public coverage: Coverage;
// markerService: MarkerService
/**
* Bounds for the map
*/
@Prop({ default: null })
public fitBounds: LatLngBoundsExpression;
/**
* Describes the the zoom control options (see: https://leafletjs.com/reference-1.3.4.html#control-zoom)
*/
@Prop()
public zoomControlOptions: Control.ZoomOptions;
@ -159,7 +132,7 @@ export default class MapComponent extends Vue {
public baseMaps: LayerMap;
get label(): string {
return this.validBoundingBox ? 'Valid' : 'Invalid';
return this.validBoundingBox ? ' valid' : 'invalid';
}
get validBoundingBox(): boolean {
@ -171,31 +144,35 @@ export default class MapComponent extends Vue {
let isBoundValid = true;
if (isValidNumber) {
let _southWest: LatLng = new LatLng(this.coverage.y_min!, this.coverage.x_min!);
let _northEast: LatLng = new LatLng(this.coverage.y_max!, this.coverage.x_max!);
let _southWest: LatLng = new LatLng(this.coverage.y_min, this.coverage.x_min);
let _northEast: LatLng = new LatLng(this.coverage.y_max, this.coverage.x_max);
const bounds = new LatLngBounds(this.southWest, this.northEast);
if (!bounds.isValid() || !(_southWest.lat < _northEast.lat && _southWest.lng < _northEast.lng)) {
// this.draw.removeShape();
// Notification.showTemporary('Bounds are not valid.');
isBoundValid = false;
}
}
return isValidNumber && isBoundValid;
}
@Ref('zoom')
private zoom: ZoomControlComponent;
@Ref('draw')
private draw: DrawControlComponent;
@Ref('zoom') private zoom: ZoomControlComponent;
@Ref('draw') private draw: DrawControlComponent;
// services:
mapService = MapService();
mdiMapCheckOutline = mdiMapCheckOutline;
southWest: LatLng;
northEast: LatLng;
/**
* Informs when initialization is done with map id.
*/
public onMapInitializedEvent: EventEmitter<string> = new EventEmitter<string>();
public map!: Map;
// protected drawnItems!: FeatureGroup<any>;
validateBoundingBox() {
if (this.validBoundingBox == false) {
@ -205,22 +182,53 @@ export default class MapComponent extends Vue {
}
this.map.control && this.map.control.disable();
var _this = this;
let _southWest: LatLng = new LatLng(this.coverage.y_min!, this.coverage.x_min!);
let _northEast: LatLng = new LatLng(this.coverage.y_max!, this.coverage.x_max!);
// // _this.locationErrors.length = 0;
// this.drawnItems.clearLayers();
// //var xmin = document.getElementById("xmin").value;
// var xmin = (<HTMLInputElement>document.getElementById("xmin")).value;
// // var ymin = document.getElementById("ymin").value;
// var ymin = (<HTMLInputElement>document.getElementById("ymin")).value;
// //var xmax = document.getElementById("xmax").value;
// var xmax = (<HTMLInputElement>document.getElementById("xmax")).value;
// //var ymax = document.getElementById("ymax").value;
// var ymax = (<HTMLInputElement>document.getElementById("ymax")).value;
// var bounds = [[ymin, xmin], [ymax, xmax]];
// let _southWest: LatLng;
// let _northEast: LatLng;
// if (this.coverage.x_min && this.coverage.y_min) {
let _southWest: LatLng = new LatLng(this.coverage.y_min, this.coverage.x_min);
// }
// if (this.coverage.x_max && this.coverage.y_max) {
let _northEast: LatLng = new LatLng(this.coverage.y_max, this.coverage.x_max);
// }
const bounds = new LatLngBounds(this.southWest, this.northEast);
if (!bounds.isValid() || !(_southWest.lat < _northEast.lat && _southWest.lng < _northEast.lng)) {
this.draw.removeShape();
Notification.showTemporary('Bounds are not valid.');
} else {
// this.draw.drawShape(_southWest, _northEast);
try {
this.draw.drawShape(_southWest, _northEast);
_this.map.fitBounds(bounds);
Notification.showSuccess('Valid bounding box');
// var boundingBox = L.rectangle(bounds, { color: "#005F6A", weight: 1 });
// // this.geolocation.xmin = xmin;
// // this.geolocation.ymin = ymin;
// // this.geolocation.xmax = xmax;
// // this.geolocation.ymax = ymax;
// _this.drawnItems.addLayer(boundingBox);
// _this.map.fitBounds(bounds);
// this.options.message = "valid bounding box";
// this.$toast.success("valid bounding box", this.options);
Notification.showSuccess('valid bounding box');
} catch (err) {
// this.options.message = e.message;
// // _this.errors.push(e);
// this.$toast.error(e.message, this.options);
Notification.showTemporary('An error occurred while drawing bounding box');
// generatingCodes.value = false;
throw err;
}
}
@ -234,11 +242,16 @@ export default class MapComponent extends Vue {
this.map.off('zoomend zoomlevelschange');
}
// @Emit(this.onMapInitializedEvent)
protected initMap(): void {
// let map: Map = (this.map = this.mapService.getMap(this.mapId));
let map: Map = (this.map = new Map(this.mapId, this.mapOptions));
this.mapService.setMap(this.mapId, map);
map.scrollWheelZoom.disable();
// return this.mapId;
// this.$emit("onMapInitializedEvent", this.mapId);
this.onMapInitializedEvent.emit(this.mapId);
this.addBaseMap();
@ -247,28 +260,45 @@ export default class MapComponent extends Vue {
map.on(
'Draw.Event.CREATED',
(event: any) => {
function (event) {
// drawnItems.clearLayers();
// var type = event.type;
var layer = event.layer;
// if (type === "rectancle") {
// layer.bindPopup("A popup!" + layer.getBounds().toBBoxString());
var bounds = layer.getBounds();
this.coverage.x_min = bounds.getSouthWest().lng;
this.coverage.y_min = bounds.getSouthWest().lat;
// console.log(this.geolocation.xmin);
this.coverage.x_max = bounds.getNorthEast().lng;
this.coverage.y_max = bounds.getNorthEast().lat;
// }
// drawnItems.addLayer(layer);
},
this,
);
// Initialise the FeatureGroup to store editable layers
// let drawnItems = (this.drawnItems = new FeatureGroup());
// map.addLayer(drawnItems);
this.map.on('zoomend zoomlevelschange', this.zoom.updateDisabled, this.zoom);
// if (this.fitBounds) {
// this.map.fitBounds(this.fitBounds);
// }
if (this.coverage.x_min && this.coverage.y_min) {
this.southWest = new LatLng(this.coverage.y_min!, this.coverage.x_min!);
this.southWest = new LatLng(this.coverage.y_min, this.coverage.x_min);
} else {
this.southWest = new LatLng(46.5, 9.9);
}
if (this.coverage.x_max && this.coverage.y_max) {
this.northEast = new LatLng(this.coverage.y_max!, this.coverage.x_max!);
this.northEast = new LatLng(this.coverage.y_max, this.coverage.x_max);
} else {
this.northEast = new LatLng(48.9, 16.9);
}
} // this.northEast = toLatLng(48.9, 16.9);
const bounds = new LatLngBounds(this.southWest, this.northEast);
map.fitBounds(bounds);
@ -288,6 +318,10 @@ export default class MapComponent extends Vue {
private addBaseMap(layerOptions?: LayerOptions): void {
if (this.map) {
if (!this.baseMaps || this.baseMaps.size === 0) {
// let bmapgrau = tileLayer('https://{s}.wien.gv.at/basemap/bmapgrau/normal/google3857/{z}/{y}/{x}.png', {
// subdomains: ['maps', 'maps1', 'maps2', 'maps3', 'maps4'],
// attribution: 'Datenquelle: <a href="http://www.basemap.at/">basemap.at</a>',
// });
let osmGgray = tileLayerWMS('https://ows.terrestris.de/osm-gray/service', {
format: 'image/png',
attribution: DEFAULT_BASE_LAYER_ATTRIBUTION,
@ -303,61 +337,45 @@ export default class MapComponent extends Vue {
}
}
}
// export default MapComponent;
</script>
<style scoped>
/* Leaflet container - only what can't be done with Tailwind */
:deep(.leaflet-container) {
<style scoped lang="css">
/* .leaflet-container {
height: 600px;
width: 100%;
background: transparent;
background-color: transparent;
outline-offset: 1px;
} */
.leaflet-container {
height: 600px;
width: 100%;
background: none;
}
:deep(.leaflet-container .leaflet-pane) {
z-index: 30 !important;
.gba-control-validate {
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
cursor: pointer;
border-radius: 4px;
position: absolute;
left: 10px;
top: 150px;
z-index: 999;
}
/* Custom animations */
@keyframes checkPulse {
0%, 100% {
transform: scale(1);
}
50% {
transform: scale(1.1);
}
.btn-group-vertical button {
display: block;
margin-left: 0;
margin-top: 0.5em;
}
@keyframes pulse {
0% {
transform: translate(-50%, -50%) scale(0);
opacity: 1;
}
100% {
transform: translate(-50%, -50%) scale(1.5);
opacity: 0;
}
.leaflet-container .leaflet-pane {
z-index: 30!important;
}
@keyframes fadeInTooltip {
from {
opacity: 0;
}
to {
opacity: 1;
}
}
/* Responsive adjustments */
@media (max-width: 768px) {
:deep(.leaflet-container) {
height: 100%;
}
}
@media (max-width: 640px) {
:deep(.leaflet-container) {
height: 100%;
}
}
</style>
/* .leaflet-pane {
z-index: 30;
} */
</style>

View file

@ -1,27 +1,21 @@
<template>
<div class="zoom-control-container">
<div class="gba-control-zoom btn-group-vertical">
<button
ref="inputPlus"
class="zoom-button zoom-button-plus"
class="disabled:bg-gray-200 inline-flex cursor-pointer justify-center items-center whitespace-nowrap focus:outline-none transition-colors duration-150 border rounded ring-blue-700 bg-teal-50 text-black border-teal-50 text-sm p-1"
type="button"
@click.stop.prevent="zoomIn"
:disabled="isZoomInDisabled"
aria-label="Zoom in"
>
<BaseIcon v-if="mdiPlus" :path="mdiPlus" :size="20" />
<BaseIcon v-if="mdiPlus" :path="mdiPlus" />
</button>
<div class="zoom-separator"></div>
<button
ref="inputMinus"
class="zoom-button zoom-button-minus"
class="disabled:bg-gray-200 inline-flex cursor-pointer justify-center items-center whitespace-nowrap focus:outline-none transition-colors duration-150 border rounded ring-blue-700 bg-teal-50 text-black border-teal-50 text-sm p-1"
type="button"
@click.stop.prevent="zoomOut"
:disabled="isZoomOutDisabled"
aria-label="Zoom out"
>
<BaseIcon v-if="mdiMinus" :path="mdiMinus" :size="20" />
<BaseIcon v-if="mdiMinus" :path="mdiMinus" />
</button>
</div>
</template>
@ -32,7 +26,6 @@ import { MapService } from '@/Stores/map.service';
import BaseIcon from '@/Components/BaseIcon.vue';
import { mdiPlus, mdiMinus } from '@mdi/js';
import { Map } from 'leaflet';
@Component({
name: 'zoom-control',
@ -40,7 +33,7 @@ import { Map } from 'leaflet';
BaseIcon,
},
})
export class ZoomControlComponent extends Vue {
export default class ZoomControlComponent extends Vue {
mdiPlus = mdiPlus;
mdiMinus = mdiMinus;
@ -53,23 +46,16 @@ export class ZoomControlComponent extends Vue {
@Ref('inputMinus') inputMinus: HTMLButtonElement;
mapService = MapService();
map: Map | null = null;
isZoomInDisabled = false;
isZoomOutDisabled = false;
map;
mounted() {
let map = (this.map = this.mapService.getMap(this.mapId));
if (map) {
map.on('zoomend zoomlevelschange', this.updateDisabled, this);
this.updateDisabled();
}
}
// mounted() {
// let map = (this.map = this.mapService.getMap(this.mapId));
// map.on('zoomend zoomlevelschange', this.updateDisabled, this);
// }
unmounted() {
if (this.map) {
this.map.off('zoomend zoomlevelschange', this.updateDisabled, this);
}
}
// unmounted() {
// this.map.off('zoomend zoomlevelschange');
// }
public zoomIn() {
let map = this.mapService.getMap(this.mapId);
@ -83,266 +69,44 @@ export class ZoomControlComponent extends Vue {
public updateDisabled() {
let map = this.mapService.getMap(this.mapId);
if (!map) return;
// let className = 'leaflet-disabled';
this.isZoomInDisabled = map.getZoom() >= map.getMaxZoom();
this.isZoomOutDisabled = map.getZoom() <= map.getMinZoom();
this.inputPlus.disabled = false;
this.inputPlus.setAttribute('aria-disabled', 'false');
if (this.inputPlus) {
this.inputPlus.disabled = this.isZoomInDisabled;
this.inputPlus.setAttribute('aria-disabled', this.isZoomInDisabled.toString());
this.inputMinus.disabled = false;
this.inputMinus.setAttribute('aria-disabled', 'false');
if (map.getZoom() === map.getMinZoom()) {
this.inputMinus.disabled = true;
this.inputMinus.setAttribute('aria-disabled', 'true');
}
if (this.inputMinus) {
this.inputMinus.disabled = this.isZoomOutDisabled;
this.inputMinus.setAttribute('aria-disabled', this.isZoomOutDisabled.toString());
if (map.getZoom() === map.getMaxZoom()) {
this.inputPlus.disabled = true;
this.inputPlus.setAttribute('aria-disabled', 'true');
}
}
}
export default ZoomControlComponent;
</script>
<style scoped>
.zoom-control-container {
position: absolute;
left: 1rem;
top: 1rem;
z-index: 1000;
display: flex;
flex-direction: column;
gap: 0;
background: white;
border-radius: 0.75rem;
box-shadow:
0 4px 6px -1px rgba(0, 0, 0, 0.1),
0 2px 4px -1px rgba(0, 0, 0, 0.06);
overflow: hidden;
<style lang="css">
.gba-control-zoom {
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
transition: box-shadow 0.2s ease;
}
.zoom-control-container:hover {
box-shadow:
0 10px 15px -3px rgba(0, 0, 0, 0.1),
0 4px 6px -2px rgba(0, 0, 0, 0.05);
}
.dark .zoom-control-container {
background: #1f2937;
box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.3);
}
.dark .zoom-control-container:hover {
box-shadow: 0 10px 15px -3px rgba(0, 0, 0, 0.5);
}
.zoom-button {
display: flex;
align-items: center;
justify-content: center;
width: 2.5rem;
height: 2.5rem;
padding: 0;
background: white;
border: none;
color: #374151;
cursor: pointer;
transition: all 0.2s ease;
position: relative;
outline: none;
}
.dark .zoom-button {
background: #1f2937;
color: #d1d5db;
}
.zoom-button:hover:not(:disabled) {
background: #65dc21;
color: white;
transform: scale(1.05);
}
.dark .zoom-button:hover:not(:disabled) {
background: #65dc21;
}
.zoom-button:active:not(:disabled) {
transform: scale(0.95);
}
.zoom-button:disabled {
cursor: not-allowed;
opacity: 0.4;
background: #f3f4f6;
color: #9ca3af;
}
.dark .zoom-button:disabled {
background: #111827;
color: #4b5563;
}
.zoom-button:focus-visible {
outline: 2px solid #65dc21;
outline-offset: -2px;
}
/* Icon sizing */
.zoom-button :deep(svg) {
width: 1.25rem;
height: 1.25rem;
}
/* Separator between buttons */
.zoom-separator {
height: 1px;
background: #e5e7eb;
}
.dark .zoom-separator {
background: #374151;
}
/* Hover effect for the plus button */
.zoom-button-plus::after {
content: '';
border-radius: 4px;
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
background: linear-gradient(135deg, rgba(101, 220, 33, 0.1) 0%, rgba(53, 124, 6, 0.1) 100%);
opacity: 0;
transition: opacity 0.2s ease;
pointer-events: none;
left: 10px;
top: 10px;
z-index: 40;
}
.zoom-button-plus:hover:not(:disabled)::after {
opacity: 1;
}
.btn-group-vertical button {
display: block;
/* Hover effect for the minus button */
.zoom-button-minus::after {
content: '';
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
background: linear-gradient(135deg, rgba(101, 220, 33, 0.1) 0%, rgba(53, 124, 6, 0.1) 100%);
opacity: 0;
transition: opacity 0.2s ease;
pointer-events: none;
}
.zoom-button-minus:hover:not(:disabled)::after {
opacity: 1;
}
/* Responsive design */
@media (max-width: 768px) {
.zoom-control-container {
left: 0.75rem;
top: 0.75rem;
}
.zoom-button {
width: 2.25rem;
height: 2.25rem;
}
.zoom-button :deep(svg) {
width: 1.125rem;
height: 1.125rem;
}
}
@media (max-width: 640px) {
.zoom-control-container {
left: 0.5rem;
top: 0.5rem;
}
.zoom-button {
width: 2rem;
height: 2rem;
}
.zoom-button :deep(svg) {
width: 1rem;
height: 1rem;
}
}
/* Animation for button press */
@keyframes buttonPress {
0% {
transform: scale(1);
}
50% {
transform: scale(0.95);
}
100% {
transform: scale(1);
}
}
.zoom-button:active:not(:disabled) {
animation: buttonPress 0.2s ease;
}
/* Tooltip-like effect on hover (optional) */
.zoom-button-plus:hover:not(:disabled)::before {
content: 'Zoom In';
position: absolute;
left: calc(100% + 0.5rem);
top: 50%;
transform: translateY(-50%);
background: #1f2937;
color: white;
padding: 0.25rem 0.5rem;
border-radius: 0.25rem;
font-size: 0.75rem;
white-space: nowrap;
opacity: 0;
animation: fadeIn 0.2s ease 0.5s forwards;
pointer-events: none;
z-index: 1001;
}
.zoom-button-minus:hover:not(:disabled)::before {
content: 'Zoom Out';
position: absolute;
left: calc(100% + 0.5rem);
top: 50%;
transform: translateY(-50%);
background: #1f2937;
color: white;
padding: 0.25rem 0.5rem;
border-radius: 0.25rem;
font-size: 0.75rem;
white-space: nowrap;
opacity: 0;
animation: fadeIn 0.2s ease 0.5s forwards;
pointer-events: none;
z-index: 1001;
}
@keyframes fadeIn {
to {
opacity: 1;
}
}
/* Hide tooltips on mobile */
@media (max-width: 768px) {
.zoom-button-plus:hover:not(:disabled)::before,
.zoom-button-minus:hover:not(:disabled)::before {
display: none;
}
margin-left: 0;
margin-top: 0.5em;
}
</style>

View file

@ -21,11 +21,12 @@ import {
mdiFormatListGroup,
mdiFormatListNumbered,
mdiLogout,
mdiGithub,
mdiThemeLightDark,
mdiViewDashboard,
mdiMapSearch,
mdiInformationVariant,
mdiGlasses,
mdiXml
} from '@mdi/js';
import NavBarItem from '@/Components/NavBarItem.vue';
import NavBarItemLabel from '@/Components/NavBarItemLabel.vue';
@ -99,8 +100,7 @@ const showAbout = async () => {
<FirstrunWizard ref="about"></FirstrunWizard>
<div class="flex lg:items-stretch" :class="containerMaxW">
<div class="flex-1 items-stretch flex h-14">
<NavBarItem type="flex lg:hidden" @click.prevent="layoutStore.asideMobileToggle()"
v-if="props.showBurger">
<NavBarItem type="flex lg:hidden" @click.prevent="layoutStore.asideMobileToggle()" v-if="props.showBurger">
<BaseIcon :path="layoutStore.isAsideMobileExpanded ? mdiBackburger : mdiForwardburger" size="24" />
</NavBarItem>
<NavBarItem type="hidden lg:flex xl:hidden" @click.prevent="menuOpenLg" v-if="props.showBurger">
@ -110,9 +110,9 @@ const showAbout = async () => {
<NavBarItemLabel :icon="mdiViewDashboard" label="Dashboard" size="22" is-hover-label-only
route-name="apps.dashboard" />
</NavBarItem>
<!-- <NavBarItem route-name="apps.map">
<NavBarItem route-name="apps.map">
<NavBarItemLabel :icon="mdiMapSearch" label="Map" size="22" is-hover-label-only route-name="apps.map" />
</NavBarItem> -->
</NavBarItem>
<!-- <NavBarItem>
<NavBarSearch />
</NavBarItem> -->
@ -169,10 +169,13 @@ const showAbout = async () => {
</NavBarItem>
<NavBarItem v-if="userHasRoles(['reviewer'])" :route-name="'reviewer.dataset.list'">
<NavBarItemLabel :icon="mdiGlasses" label="Reviewer Menu" />
</NavBarItem>
<!-- <NavBarItem @click="showAbout">
<NavBarItemLabel :icon="mdiInformationVariant" label="About" />
</NavBarItem>
<!-- <NavBarItem>
<NavBarItemLabel :icon="mdiEmail" label="Messages" />
</NavBarItem> -->
<NavBarItem @click="showAbout">
<NavBarItemLabel :icon="mdiInformationVariant" label="About" />
</NavBarItem>
<BaseDivider nav-bar />
<NavBarItem @click="logout">
<NavBarItemLabel :icon="mdiLogout" label="Log Out" />
@ -183,15 +186,12 @@ const showAbout = async () => {
<NavBarItem is-desktop-icon-only @click.prevent="toggleLightDark">
<NavBarItemLabel v-bind:icon="mdiThemeLightDark" label="Light/Dark" is-desktop-icon-only />
</NavBarItem>
<!-- <NavBarItem href="" target="_blank" is-desktop-icon-only>
<NavBarItem href="https://gitea.geosphere.at/geolba/tethys.backend" target="_blank" is-desktop-icon-only>
<NavBarItemLabel v-bind:icon="mdiGithub" label="GitHub" is-desktop-icon-only />
</NavBarItem> -->
<NavBarItem href="/oai" target="_blank" is-desktop-icon-only>
<NavBarItemLabel v-bind:icon="mdiXml" label="OAI Interface" is-desktop-icon-only />
</NavBarItem>
<!-- <NavBarItem is-desktop-icon-only @click="showAbout">
<NavBarItem is-desktop-icon-only @click="showAbout">
<NavBarItemLabel v-bind:icon="mdiInformationVariant" label="About" is-desktop-icon-only />
</NavBarItem> -->
</NavBarItem>
<NavBarItem is-desktop-icon-only @click="logout">
<NavBarItemLabel v-bind:icon="mdiLogout" label="Log out" is-desktop-icon-only />
</NavBarItem>

View file

@ -28,7 +28,7 @@
autocomplete="off"
@keydown.down="onArrowDown"
@keydown.up="onArrowUp"
@keydown.enter.prevent="onEnter"
@keydown.enter="onEnter"
/>
<svg
class="w-4 h-4 absolute left-2.5 top-3.5"

View file

@ -5,7 +5,7 @@
<div class="relative" data-te-dropdown-ref>
<button id="states-button" data-dropdown-toggle="dropdown-states"
class="whitespace-nowrap h-12 z-10 inline-flex items-center py-2.5 px-4 text-sm font-medium text-center text-gray-500 bg-gray-100 border border-gray-300 rounded-l-lg hover:bg-gray-200 focus:ring-4 focus:outline-none focus:ring-gray-100 dark:bg-gray-700 dark:hover:bg-gray-600 dark:focus:ring-gray-700 dark:text-white dark:border-gray-600"
type="button" :disabled="isReadOnly" @click.prevent="showStates">
type="button" @click.prevent="showStates">
<!-- <svg aria-hidden="true" class="h-3 mr-2" viewBox="0 0 15 12" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect x="0.5" width="14" height="12" rx="2" fill="white" />
<mask id="mask0_12694_49953" style="mask-type: alpha" maskUnits="userSpaceOnUse" x="0" y="0" width="15" height="12">
@ -65,7 +65,7 @@
</svg> -->
<!-- eng -->
{{ language }}
<svg aria-hidden="true" class="w-4 h-4 ml-1" fill="currentColor" viewBox="0 0 20 20" v-if="!isReadOnly"
<svg aria-hidden="true" class="w-4 h-4 ml-1" fill="currentColor" viewBox="0 0 20 20"
xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd"
d="M5.293 7.293a1 1 0 011.414 0L10 10.586l3.293-3.293a1 1 0 111.414 1.414l-4 4a1 1 0 01-1.414 0l-4-4a1 1 0 010-1.414z"
@ -93,7 +93,7 @@
<!-- :class="inputElClass" -->
<!-- class="block p-2.5 w-full z-20 text-sm text-gray-900 bg-gray-50 rounded-r-lg border-l-gray-50 border-l-2 border border-gray-300 focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:border-l-gray-700 dark:border-gray-600 dark:placeholder-gray-400 dark:text-white dark:focus:border-blue-500" -->
<input v-model="computedValue" type="text" :name="props.name" autocomplete="off" :class="inputElClass"
placeholder="Search Keywords..." required @input="handleInput" :readonly="isReadOnly" />
placeholder="Search Keywords..." required @input="handleInput" />
<!-- v-model="data.search" -->
<svg class="w-4 h-4 absolute left-2.5 top-3.5" v-show="computedValue.length < 2"
xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke="currentColor">
@ -101,12 +101,12 @@
d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z" />
</svg>
<svg class="w-4 h-4 absolute left-2.5 top-3.5" v-show="computedValue.length >= 2 && !isReadOnly"
<svg class="w-4 h-4 absolute left-2.5 top-3.5" v-show="computedValue.length >= 2"
xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke="currentColor" @click="() => {
computedValue = '';
data.isOpen = false;
}
">
">
<path stroke-linecap="round" stroke-linejoin="round" d="M6 18L18 6M6 6l12 12" />
</svg>
</div>
@ -166,10 +166,6 @@ let props = defineProps({
type: String,
default: '',
},
isReadOnly: {
type: Boolean,
default: false,
},
required: Boolean,
borderless: Boolean,
transparent: Boolean,
@ -194,18 +190,11 @@ const inputElClass = computed(() => {
'dark:placeholder-gray-400 dark:text-white dark:focus:border-blue-500',
'h-12',
props.borderless ? 'border-0' : 'border',
props.transparent && 'bg-transparent',
props.transparent ? 'bg-transparent' : 'bg-white dark:bg-slate-800',
// props.isReadOnly ? 'bg-gray-50 dark:bg-slate-600' : 'bg-white dark:bg-slate-800',
];
// if (props.icon) {
base.push('pl-10');
if (props.isReadOnly) {
// Read-only: no focus ring, grayed-out text and border, and disabled cursor.
base.push('bg-gray-50', 'dark:bg-slate-600', 'border', 'border-gray-300', 'dark:border-slate-600', 'text-gray-500', 'cursor-not-allowed', 'focus:outline-none', 'focus:ring-0', 'focus:border-gray-300');
} else {
// Actionable field: focus ring, white/dark background, and darker border.
base.push('bg-white dark:bg-slate-800', 'focus:ring focus:outline-none', 'border', 'border-gray-700');
}
// }
return base;
});

View file

@ -15,10 +15,6 @@ defineProps({
required: true,
},
main: Boolean,
showCogButton: {
type: Boolean,
default: false,
}
});
const hasSlot = computed(() => useSlots().default);
@ -34,6 +30,6 @@ const hasSlot = computed(() => useSlots().default);
</h1>
</div>
<slot v-if="hasSlot" />
<BaseButton v-else-if="showCogButton" :icon="mdiCog" small />
<BaseButton v-else :icon="mdiCog" small />
</section>
</template>

View file

@ -6,29 +6,10 @@ import FormField from '@/Components/FormField.vue';
import FormControl from '@/Components/FormControl.vue';
// Define props
// const props = defineProps<{
// modelValue: string,
// errors: Partial<Record<"new_password" | "old_password" | "confirm_password", string>>,
// showRequiredMessage: boolean,
// }>();
const props = defineProps({
modelValue: {
type: String,
},
errors: {
type: Object,
default: () => ({} as Partial<Record<"new_password" | "old_password" | "confirm_password", string>>),
},
showRequiredMessage: {
type: Boolean,
default:true,
},
fieldLabel: {
type: String,
default: 'New password',
}
});
const props = defineProps<{
modelValue: string;
errors: Partial<Record<"new_password" | "old_password" | "confirm_password", string>>;
}>();
const emit = defineEmits(['update:modelValue', 'score']);
@ -80,8 +61,8 @@ const passwordMetrics = computed<PasswordMetrics>(() => {
<template>
<!-- Password input Form -->
<FormField :label="fieldLabel" :help="showRequiredMessage ? 'Required. New password' : ''" :class="{'text-red-400': errors.new_password }">
<FormControl v-model="localPassword" :icon="mdiFormTextboxPassword" name="new_password" type="password" :required="showRequiredMessage"
<FormField label="New password" help="Required. New password" :class="{'text-red-400': errors.new_password }">
<FormControl v-model="localPassword" :icon="mdiFormTextboxPassword" name="new_password" type="password" required
:error="errors.new_password">
<!-- Secure Icon -->
<template #right>
@ -103,10 +84,10 @@ const passwordMetrics = computed<PasswordMetrics>(() => {
<div class="text-gray-700 text-sm">
{{ passwordMetrics.score }} / 6 points max
</div>
</FormField>
</FormField>
<!-- Password Strength Bar -->
<div v-if="passwordMetrics.score > 0"class="po-password-strength-bar w-full h-2 rounded transition-all duration-200 mb-4"
<div class="po-password-strength-bar w-full h-2 rounded transition-all duration-200 mb-4"
:class="passwordMetrics.scoreLabel" :style="{ width: `${(passwordMetrics.score / 6) * 100}%` }"
role="progressbar" :aria-valuenow="passwordMetrics.score" aria-valuemin="0" aria-valuemax="6"
:aria-label="`Password strength: ${passwordMetrics.scoreLabel || 'unknown'}`">

View file

@ -12,7 +12,6 @@ import { Subject } from '@/Dataset';
// import FormField from '@/Components/FormField.vue';
import FormControl from '@/Components/FormControl.vue';
import SearchCategoryAutocomplete from '@/Components/SearchCategoryAutocomplete.vue';
import { mdiRefresh } from '@mdi/js';
const props = defineProps({
checkable: Boolean,
@ -28,22 +27,6 @@ const props = defineProps({
type: Object,
default: () => ({}),
},
subjectsToDelete: {
type: Array<Subject>,
default: [],
}
});
const emit = defineEmits(['update:subjectsToDelete']);
// Create a computed property for subjectsToDelete with getter and setter
const deletetSubjects = computed({
get: () => props.subjectsToDelete,
set: (values: Array<Subject>) => {
props.subjectsToDelete.length = 0;
props.subjectsToDelete.push(...values);
emit('update:subjectsToDelete', values);
}
});
const styleService = StyleService();
@ -75,45 +58,21 @@ const pagesList = computed(() => {
});
const removeItem = (key: number) => {
// items.value.splice(key, 1);
const item = items.value[key];
// If the item has an ID, add it to the delete list
if (item.id) {
addToDeleteList(item);
}
// Remove from the visible list
items.value.splice(key, 1);
};
// Helper function to add a subject to the delete list
const addToDeleteList = (subject: Subject) => {
if (subject.id) {
const newList = [...props.subjectsToDelete, subject];
deletetSubjects.value = newList;
}
};
// Helper function to reactivate a subject (remove from delete list)
const reactivateSubject = (index: number) => {
const newList = [...props.subjectsToDelete];
const removedSubject = newList.splice(index, 1)[0];
deletetSubjects.value = newList;
// Add the subject back to the keywords list if it's not already there
if (removedSubject && !props.keywords.some(k => k.id === removedSubject.id)) {
props.keywords.push(removedSubject);
}
};
const isKeywordReadOnly = (item: Subject) => {
return (item.dataset_count ?? 0) > 1 || item.type !== 'uncontrolled';
};
</script>
<template>
<!-- <CardBoxModal v-model="isModalActive" title="Sample modal">
<p>Lorem ipsum dolor sit amet <b>adipiscing elit</b></p>
<p>This is sample modal</p>
</CardBoxModal>
<CardBoxModal v-model="isModalDangerActive" large-title="Please confirm" button="danger" has-cancel>
<p>Lorem ipsum dolor sit amet <b>adipiscing elit</b></p>
<p>This is sample modal</p>
</CardBoxModal> -->
<!-- <div v-if="checkedRows.length" class="p-3 bg-gray-100/50 dark:bg-slate-800">
<span v-for="checkedRow in checkedRows" :key="checkedRow.id"
@ -128,34 +87,17 @@ const isKeywordReadOnly = (item: Subject) => {
<!-- <th v-if="checkable" /> -->
<!-- <th class="hidden lg:table-cell"></th> -->
<th scope="col">Type</th>
<th scope="col" class="relative">
Value
<div class="inline-block relative ml-1 group">
<button
class="w-4 h-4 rounded-full bg-gray-200 text-gray-600 text-xs flex items-center justify-center focus:outline-none hover:bg-gray-300">
i
</button>
<div
class="absolute left-0 top-full mt-1 w-64 bg-white shadow-lg rounded-md p-3 text-xs text-left z-50 transform scale-0 origin-top-left transition-transform duration-100 group-hover:scale-100">
<p class="text-gray-700">
Keywords are only editable if they are used by a single dataset (Usage Count = 1)".
</p>
<div class="absolute -top-1 left-1 w-2 h-2 bg-white transform rotate-45"></div>
</div>
</div>
</th>
<th scope="col">Value</th>
<th scope="col">Language</th>
<th scope="col">Usage Count</th>
<th scope="col" />
</tr>
</thead>
<tbody>
<tr v-for="(item, index) in itemsPaginated" :key="index">
<td data-label="Type" scope="row">
<FormControl required v-model="item.type"
@update:modelValue="() => { item.value = ''; }" :type="'select'"
placeholder="[Enter Language]" :options="props.subjectTypes">
<FormControl required v-model="item.type" @update:modelValue="() => {item.external_key = undefined; item.value= '';}" :type="'select'" placeholder="[Enter Language]" :options="props.subjectTypes">
<div class="text-red-400 text-sm" v-if="errors[`subjects.${index}.type`]">
{{ errors[`subjects.${index}.type`].join(', ') }}
</div>
@ -163,19 +105,22 @@ const isKeywordReadOnly = (item: Subject) => {
</td>
<td data-label="Value" scope="row">
<SearchCategoryAutocomplete v-if="item.type !== 'uncontrolled'" v-model="item.value" @subject="
(result) => {
item.language = result.language;
item.external_key = result.uri;
}
" :is-read-only="item.dataset_count > 1">
<SearchCategoryAutocomplete
v-if="item.type !== 'uncontrolled'"
v-model="item.value"
@subject="
(result) => {
item.language = result.language;
item.external_key = result.uri;
}
"
>
<div class="text-red-400 text-sm" v-if="errors[`subjects.${index}.value`]">
{{ errors[`subjects.${index}.value`].join(', ') }}
</div>
</SearchCategoryAutocomplete>
<FormControl v-else required v-model="item.value" type="text" placeholder="[enter keyword value]"
:borderless="true" :is-read-only="item.dataset_count > 1">
<FormControl v-else required v-model="item.value" type="text" placeholder="[enter keyword value]" :borderless="true">
<div class="text-red-400 text-sm" v-if="errors[`subjects.${index}.value`]">
{{ errors[`subjects.${index}.value`].join(', ') }}
</div>
@ -183,24 +128,23 @@ const isKeywordReadOnly = (item: Subject) => {
</td>
<td data-label="Language" scope="row">
<FormControl required v-model="item.language" :type="'select'" placeholder="[Enter Lang]"
:options="{ de: 'de', en: 'en' }" :is-read-only="isKeywordReadOnly(item)">
<FormControl
required
v-model="item.language"
:type="'select'"
placeholder="[Enter Lang]"
:options="{ de: 'de', en: 'en' }"
:is-read-only="item.type != 'uncontrolled'"
>
<div class="text-red-400 text-sm" v-if="errors[`subjects.${index}.language`]">
{{ errors[`subjects.${index}.language`].join(', ') }}
</div>
</FormControl>
</td>
<td data-label="Usage Count" scope="row">
<div class="text-center">
{{ item.dataset_count || 1 }}
</div>
</td>
<td class="before:hidden lg:w-1 whitespace-nowrap" scope="row">
<BaseButtons type="justify-start lg:justify-end" no-wrap>
<!-- <BaseButton color="info" :icon="mdiEye" small @click="isModalActive = true" /> -->
<BaseButton color="danger" :icon="mdiTrashCan" small @click.prevent="removeItem(index)" />
<BaseButton v-if="index > 2" color="danger" :icon="mdiTrashCan" small @click.prevent="removeItem(index)" />
</BaseButtons>
</td>
</tr>
@ -211,8 +155,15 @@ const isKeywordReadOnly = (item: Subject) => {
<div class="p-3 lg:px-6 border-t border-gray-100 dark:border-slate-800">
<BaseLevel>
<BaseButtons>
<BaseButton v-for="page in pagesList" :key="page" :active="page === currentPage" :label="page + 1" small
:outline="styleService.darkMode" @click="currentPage = page" />
<BaseButton
v-for="page in pagesList"
:key="page"
:active="page === currentPage"
:label="page + 1"
small
:outline="styleService.darkMode"
@click="currentPage = page"
/>
</BaseButtons>
<small>Page {{ currentPageHuman }} of {{ numPages }}</small>
</BaseLevel>
@ -221,47 +172,6 @@ const isKeywordReadOnly = (item: Subject) => {
<div class="text-red-400 text-sm" v-if="errors.subjects && Array.isArray(errors.subjects)">
{{ errors.subjects.join(', ') }}
</div>
<!-- Subjects to delete section -->
<div v-if="deletetSubjects.length > 0" class="mt-8">
<h1 class="pt-8 pb-3 font-semibold sm:text-lg text-gray-900">Keywords To Delete</h1>
<ul id="deleteSubjects" tag="ul" class="flex flex-1 flex-wrap -m-1">
<li v-for="(element, index) in deletetSubjects" :key="index"
class="block p-1 w-1/2 sm:w-1/3 md:w-1/4 lg:w-1/6 xl:w-1/8 h-32">
<article tabindex="0"
class="bg-red-100 group w-full h-full rounded-md cursor-pointer relative shadow-sm overflow-hidden">
<section
class="flex flex-col rounded-md text-xs break-words w-full h-full z-20 absolute top-0 py-2 px-3">
<h1 class="flex-1 text-gray-700 group-hover:text-blue-800 font-medium text-sm mb-1">{{
element.value }}</h1>
<div class="flex items-center justify-between mt-auto">
<div class="flex flex-col">
<p class="p-1 size text-xs text-gray-700">
<span class="font-semibold">Type:</span> {{ element.type }}
</p>
<p class="p-1 size text-xs text-gray-700" v-if="element.dataset_count">
<span class="font-semibold">Used by:</span>
<span
class="inline-flex items-center justify-center bg-gray-200 text-gray-800 rounded-full w-5 h-5 text-xs">
{{ element.dataset_count }}
</span> datasets
</p>
</div>
<button
class="delete ml-auto focus:outline-none hover:bg-gray-300 p-1 rounded-md text-gray-800"
@click.prevent="reactivateSubject(index)">
<svg viewBox="0 0 24 24" class="w-5 h-5">
<path fill="currentColor" :d="mdiRefresh"></path>
</svg>
</button>
</div>
</section>
</article>
</li>
</ul>
</div>
</template>
<style scoped>

View file

@ -1,598 +1,273 @@
<script setup lang="ts">
import { computed, ref, watch } from 'vue';
import { computed, ref } from 'vue';
// import { MainService } from '@/Stores/main';
// import { StyleService } from '@/Stores/style.service';
import { mdiTrashCan } from '@mdi/js';
import { mdiDragVariant, mdiChevronLeft, mdiChevronRight } from '@mdi/js';
import { mdiAccount, mdiDomain } from '@mdi/js';
import { mdiDragVariant } from '@mdi/js';
import BaseIcon from '@/Components/BaseIcon.vue';
// import CardBoxModal from '@/Components/CardBoxModal.vue';
// import TableCheckboxCell from '@/Components/TableCheckboxCell.vue';
// import BaseLevel from '@/Components/BaseLevel.vue';
import BaseButtons from '@/Components/BaseButtons.vue';
import BaseButton from '@/Components/BaseButton.vue';
// import UserAvatar from '@/Components/UserAvatar.vue';
// import Person from 'App/Models/Person';
import { Person } from '@/Dataset';
import Draggable from 'vuedraggable';
import FormControl from '@/Components/FormControl.vue';
interface Props {
checkable?: boolean;
persons?: Person[];
relation: string;
contributortypes?: Record<string, string>;
errors?: Record<string, string[]>;
isLoading?: boolean;
canDelete?: boolean;
canEdit?: boolean;
canReorder?: boolean;
}
const props = withDefaults(defineProps<Props>(), {
checkable: false,
persons: () => [],
contributortypes: () => ({}),
errors: () => ({}),
isLoading: false,
canDelete: true,
canEdit: true,
canReorder: true,
const props = defineProps({
checkable: Boolean,
persons: {
type: Array<Person>,
default: () => [],
},
relation: {
type: String,
required: true,
},
contributortypes: {
type: Object,
default: () => ({}),
},
errors: {
type: Object,
default: () => ({}),
},
});
const emit = defineEmits<{
'update:persons': [value: Person[]];
'remove-person': [index: number, person: Person];
'person-updated': [index: number, person: Person];
'reorder': [oldIndex: number, newIndex: number];
}>();
// const styleService = StyleService();
// const mainService = MainService();
// const items = computed(() => props.persons);
// Local state
const perPage = ref(5);
const currentPage = ref(0);
const dragEnabled = ref(props.canReorder);
// Name type options
const nameTypeOptions = {
Personal: 'Personal',
Organizational: 'Org',
};
// Computed properties
const items = computed({
get() {
return props.persons;
},
// setter
set(value) {
// Note: we are using destructuring assignment syntax here.
props.persons.length = 0;
props.persons.push(...value);
},
});
const itemsPaginated = computed(() => {
const start = perPage.value * currentPage.value;
const end = perPage.value * (currentPage.value + 1);
return items.value.slice(start, end);
// const isModalActive = ref(false);
// const isModalDangerActive = ref(false);
const perPage = ref(5);
const currentPage = ref(0);
// const checkedRows = ref([]);
const itemsPaginated = computed({
get() {
return items.value.slice(perPage.value * currentPage.value, perPage.value * (currentPage.value + 1));
},
// setter
set(value) {
// Note: we are using destructuring assignment syntax here.
props.persons.length = 0;
props.persons.push(...value);
},
});
const numPages = computed(() => Math.ceil(items.value.length / perPage.value));
const currentPageHuman = computed(() => currentPage.value + 1);
const hasMultiplePages = computed(() => numPages.value > 1);
const showContributorTypes = computed(() => Object.keys(props.contributortypes).length > 0);
const pagesList = computed(() => {
const pages: number[] = [];
const maxVisible = 10;
const pagesList: Array<number> = [];
if (numPages.value <= maxVisible) {
for (let i = 0; i < numPages.value; i++) {
pages.push(i);
}
} else {
// Smart pagination with ellipsis
if (currentPage.value <= 2) {
for (let i = 0; i < 4; i++) pages.push(i);
pages.push(-1); // Ellipsis marker
pages.push(numPages.value - 1);
} else if (currentPage.value >= numPages.value - 3) {
pages.push(0);
pages.push(-1);
for (let i = numPages.value - 4; i < numPages.value; i++) {
pages.push(i);
}
} else {
pages.push(0);
pages.push(-1);
for (let i = currentPage.value - 1; i <= currentPage.value + 1; i++) {
pages.push(i);
}
pages.push(-1);
pages.push(numPages.value - 1);
}
for (let i = 0; i < numPages.value; i++) {
pagesList.push(i);
}
return pages;
return pagesList;
});
// Methods
const removeAuthor = (index: number) => {
const actualIndex = perPage.value * currentPage.value + index;
const person = items.value[actualIndex];
const displayName =
person.name_type === 'Organizational'
? person.last_name || person.email
: `${person.first_name || ''} ${person.last_name || person.email}`.trim();
if (confirm(`Are you sure you want to remove ${displayName}?`)) {
items.value.splice(actualIndex, 1);
emit('remove-person', actualIndex, person);
if (itemsPaginated.value.length === 0 && currentPage.value > 0) {
currentPage.value--;
}
}
const removeAuthor = (key: number) => {
items.value.splice(key, 1);
};
const updatePerson = (index: number, field: keyof Person, value: any) => {
const actualIndex = perPage.value * currentPage.value + index;
const person = items.value[actualIndex];
// const remove = (arr, cb) => {
// const newArr = [];
// Handle name_type change - clear first_name if switching to Organizational
if (field === 'name_type' && value === 'Organizational') {
person.first_name = '';
}
// arr.forEach((item) => {
// if (!cb(item)) {
// newArr.push(item);
// }
// });
(person as any)[field] = value;
emit('person-updated', actualIndex, person);
};
// return newArr;
// };
const goToPage = (page: number) => {
if (page >= 0 && page < numPages.value) {
currentPage.value = page;
}
};
const getFieldError = (index: number, field: string): string => {
const actualIndex = perPage.value * currentPage.value + index;
const errorKey = `${props.relation}.${actualIndex}.${field}`;
return props.errors[errorKey]?.join(', ') || '';
};
const handleDragEnd = (evt: any) => {
if (evt.oldIndex !== evt.newIndex) {
emit('reorder', evt.oldIndex, evt.newIndex);
}
};
// Watchers
watch(
() => props.persons.length,
() => {
if (currentPage.value >= numPages.value && numPages.value > 0) {
currentPage.value = numPages.value - 1;
}
},
);
// Pagination helper
const perPageOptions = [
{ value: 5, label: '5 per page' },
{ value: 10, label: '10 per page' },
{ value: 20, label: '20 per page' },
{ value: 50, label: '50 per page' },
];
// const checked = (isChecked, client) => {
// if (isChecked) {
// checkedRows.value.push(client);
// } else {
// checkedRows.value = remove(checkedRows.value, (row) => row.id === client.id);
// }
// };
</script>
<template>
<div class="card">
<!-- Table Controls -->
<div
v-if="hasMultiplePages"
class="flex justify-between items-center px-4 py-2.5 border-b border-gray-200 dark:border-slate-700 bg-gray-50 dark:bg-slate-800/50"
>
<div class="flex items-center gap-2">
<span class="text-xs text-gray-600 dark:text-gray-400">
{{ currentPage * perPage + 1 }}-{{ Math.min((currentPage + 1) * perPage, items.length) }} of {{ items.length }}
</span>
</div>
<select
v-model="perPage"
@change="currentPage = 0"
class="px-2 py-1 text-xs border rounded dark:bg-slate-800 dark:border-slate-600 focus:ring-2 focus:ring-blue-500 focus:border-transparent"
>
<option v-for="option in perPageOptions" :key="option.value" :value="option.value">
{{ option.label }}
</option>
</select>
</div>
<!-- <CardBoxModal v-model="isModalActive" title="Sample modal">
<p>Lorem ipsum dolor sit amet <b>adipiscing elit</b></p>
<p>This is sample modal</p>
</CardBoxModal>
<!-- Table -->
<div class="overflow-x-auto">
<table class="w-full table-compact">
<thead>
<tr class="bg-gray-50 dark:bg-slate-800/50 border-b border-gray-200 dark:border-slate-700">
<th v-if="canReorder" class="w-8 px-2 py-2" />
<th scope="col" class="text-left px-2 py-2 text-xs font-semibold text-gray-600 dark:text-gray-300 w-10">#</th>
<th class="text-left px-2 py-2 text-[10px] font-semibold text-gray-600 dark:text-gray-300 w-40">Type</th>
<th class="text-left px-2 py-2 text-xs font-semibold text-gray-600 dark:text-gray-300 min-w-[120px]">First Name</th>
<th class="text-left px-2 py-2 text-xs font-semibold text-gray-600 dark:text-gray-300 min-w-[160px]">
Last Name / Org
</th>
<th class="text-left px-2 py-2 text-xs font-semibold text-gray-600 dark:text-gray-300 min-w-[140px]">ORCID</th>
<th class="text-left px-2 py-2 text-xs font-semibold text-gray-600 dark:text-gray-300 min-w-[160px]">Email</th>
<th
v-if="showContributorTypes"
scope="col"
class="text-left px-2 py-2 text-xs font-semibold text-gray-600 dark:text-gray-300 w-32"
<CardBoxModal v-model="isModalDangerActive" large-title="Please confirm" button="danger" has-cancel>
<p>Lorem ipsum dolor sit amet <b>adipiscing elit</b></p>
<p>This is sample modal</p>
</CardBoxModal> -->
<!-- <div v-if="checkedRows.length" class="p-3 bg-gray-100/50 dark:bg-slate-800">
<span v-for="checkedRow in checkedRows" :key="checkedRow.id"
class="inline-block px-2 py-1 rounded-sm mr-2 text-sm bg-gray-100 dark:bg-slate-700">
{{ checkedRow.name }}
</span>
</div> -->
<table>
<thead>
<tr>
<!-- <th v-if="checkable" /> -->
<th />
<th scope="col">Sort</th>
<th scope="col">Id</th>
<!-- <th class="hidden lg:table-cell"></th> -->
<th>First Name</th>
<th>Last Name</th>
<th>Email</th>
<th scope="col" v-if="Object.keys(contributortypes).length">
<span>Type</span>
</th>
<!-- <th>Name Type</th> -->
<!-- <th>Progress</th> -->
<!-- <th>Created</th> -->
<th />
</tr>
</thead>
<!-- <tbody> -->
<!-- <tr v-for="(client, index) in itemsPaginated" :key="client.id"> -->
<draggable id="galliwasery" tag="tbody" v-model="items" item-key="id">
<template #item="{ index, element }">
<tr>
<td class="drag-icon">
<BaseIcon :path="mdiDragVariant" />
</td>
<td scope="row">{{ index + 1 }}</td>
<td data-label="Id">{{ element.id }}</td>
<!-- <TableCheckboxCell v-if="checkable" @checked="checked($event, client)" /> -->
<!-- <td v-if="element.name" class="border-b-0 lg:w-6 before:hidden hidden lg:table-cell">
<UserAvatar :username="element.name" class="w-24 h-24 mx-auto lg:w-6 lg:h-6" />
</td> -->
<td data-label="First Name">
<!-- {{ element.first_name }} -->
<FormControl
required
v-model="element.first_name"
type="text" :is-read-only="element.status==true"
placeholder="[FIRST NAME]"
>
Role
</th>
<th v-if="canDelete" class="w-16 px-2 py-2 text-xs font-semibold text-gray-600 dark:text-gray-300">Actions</th>
</tr>
</thead>
<!-- Draggable tbody for non-paginated view -->
<draggable
v-if="canReorder && !hasMultiplePages"
tag="tbody"
v-model="items"
item-key="id"
:disabled="!dragEnabled || isLoading"
@end="handleDragEnd"
handle=".drag-handle"
>
<template #item="{ index, element }">
<tr
class="border-b border-gray-100 dark:border-slate-800 hover:bg-blue-50 dark:hover:bg-slate-800/70 transition-colors"
<div
class="text-red-400 text-sm"
v-if="errors && Array.isArray(errors[`${relation}.${index}.first_name`])"
>
{{ errors[`${relation}.${index}.first_name`].join(', ') }}
</div>
</FormControl>
</td>
<td data-label="Last Name">
<FormControl
required
v-model="element.last_name"
type="text" :is-read-only="element.status==true"
placeholder="[LAST NAME]"
>
<td v-if="canReorder" class="px-2 py-2">
<div class="drag-handle cursor-move text-gray-400 hover:text-gray-600 dark:hover:text-gray-300">
<BaseIcon :path="mdiDragVariant" :size="18" />
</div>
</td>
<td class="px-2 py-2 text-xs text-gray-600 dark:text-gray-400">{{ index + 1 }}</td>
<!-- Name Type Selector -->
<td class="px-2 py-2">
<div class="flex items-center gap-1.5">
<BaseIcon
:path="element.name_type === 'Organizational' ? mdiDomain : mdiAccount"
:size="16"
:class="element.name_type === 'Organizational' ? 'text-purple-500' : 'text-blue-500'"
:title="element.name_type"
/>
<FormControl
required
v-model="element.name_type"
type="select"
:options="nameTypeOptions"
:is-read-only="element.status == true"
class="text-[8px] compact-select-mini flex-1"
/>
</div>
<div
class="text-red-500 text-[8px] mt-0.5"
v-if="errors && Array.isArray(errors[`${relation}.${index}.name_type`])"
>
{{ errors[`${relation}.${index}.name_type`][0] }}
</div>
</td>
<!-- First Name - Only shown for Personal type -->
<td class="px-2 py-2">
<FormControl
v-if="element.name_type !== 'Organizational'"
required
v-model="element.first_name"
type="text"
:is-read-only="element.status == true"
placeholder="First name"
class="text-xs compact-input"
/>
<span v-else class="text-gray-400 text-xs italic"></span>
<div
class="text-red-500 text-xs mt-0.5"
v-if="errors && Array.isArray(errors[`${relation}.${index}.first_name`])"
>
{{ errors[`${relation}.${index}.first_name`][0] }}
</div>
</td>
<!-- Last Name / Organization Name -->
<td class="px-2 py-2">
<FormControl
required
v-model="element.last_name"
type="text"
:is-read-only="element.status == true"
:placeholder="element.name_type === 'Organizational' ? 'Organization' : 'Last name'"
class="text-xs compact-input"
/>
<div
class="text-red-500 text-xs mt-0.5"
v-if="errors && Array.isArray(errors[`${relation}.${index}.last_name`])"
>
{{ errors[`${relation}.${index}.last_name`][0] }}
</div>
</td>
<!-- Orcid -->
<td class="px-2 py-2">
<FormControl
v-model="element.identifier_orcid"
type="text"
:is-read-only="element.status == true"
placeholder="0000-0000-0000-0000"
class="text-xs compact-input font-mono"
/>
<div
class="text-red-500 text-xs mt-0.5"
v-if="errors && Array.isArray(errors[`${relation}.${index}.identifier_orcid`])"
>
{{ errors[`${relation}.${index}.identifier_orcid`][0] }}
</div>
</td>
<!-- Email -->
<td class="px-2 py-2">
<FormControl
required
v-model="element.email"
type="email"
:is-read-only="element.status == true"
placeholder="email@example.com"
class="text-xs compact-input"
/>
<div
class="text-red-500 text-xs mt-0.5"
v-if="errors && Array.isArray(errors[`${relation}.${index}.email`])"
>
{{ errors[`${relation}.${index}.email`][0] }}
</div>
</td>
<!-- Contributor Type -->
<td v-if="Object.keys(contributortypes).length" class="px-2 py-2">
<FormControl
required
v-model="element.pivot_contributor_type"
type="select"
:options="contributortypes"
placeholder="Role"
class="text-xs compact-select"
/>
<div
class="text-red-500 text-xs mt-0.5"
v-if="errors && Array.isArray(errors[`${relation}.${index}.pivot_contributor_type`])"
>
{{ errors[`${relation}.${index}.pivot_contributor_type`][0] }}
</div>
</td>
<!-- Actions -->
<td class="px-2 py-2 whitespace-nowrap">
<BaseButton
color="danger"
:icon="mdiTrashCan"
small
@click.prevent="removeAuthor(index)"
class="compact-button"
/>
</td>
</tr>
</template>
</draggable>
<!-- Non-draggable tbody for paginated view -->
<tbody v-else>
<tr
v-for="(element, index) in itemsPaginated"
:key="element.id || index"
class="border-b border-gray-100 dark:border-slate-800 hover:bg-blue-50 dark:hover:bg-slate-800/70 transition-colors"
>
<td class="px-2 py-2 text-gray-400">
<BaseIcon v-if="canReorder && !hasMultiplePages" :path="mdiDragVariant" :size="18" />
</td>
<td class="px-2 py-2 text-xs text-gray-600 dark:text-gray-400">{{ currentPage * perPage + index + 1 }}</td>
<!-- Name Type Selector -->
<td class="px-2 py-2">
<div class="flex items-center gap-1.5">
<BaseIcon
:path="element.name_type === 'Organizational' ? mdiDomain : mdiAccount"
:size="16"
:class="element.name_type === 'Organizational' ? 'text-purple-500' : 'text-blue-500'"
:title="element.name_type"
/>
<FormControl
required
v-model="element.name_type"
type="select"
:options="nameTypeOptions"
:is-read-only="element.status == true"
class="text-xs compact-select"
:error="getFieldError(index, 'name_type')"
/>
<div
class="text-red-400 text-sm"
v-if="errors && Array.isArray(errors[`${relation}.${index}.last_name`])"
>
{{ errors[`${relation}.${index}.last_name`].join(', ') }}
</div>
<div v-if="getFieldError(index, 'name_type')" class="text-red-500 text-xs mt-0.5">
{{ getFieldError(index, 'name_type') }}
</FormControl>
</td>
<td data-label="Email">
<FormControl
required
v-model="element.email"
type="text" :is-read-only="element.status==true"
placeholder="[EMAIL]"
>
<div
class="text-red-400 text-sm"
v-if="errors && Array.isArray(errors[`${relation}.${index}.email`])"
>
{{ errors[`${relation}.${index}.email`].join(', ') }}
</div>
</td>
<!-- First Name -->
<td class="px-2 py-2">
<FormControl
v-if="element.name_type !== 'Organizational'"
required
:model-value="element.first_name"
@update:model-value="updatePerson(index, 'first_name', $event)"
type="text"
:is-read-only="element.status || !canEdit"
placeholder="First name"
class="text-xs compact-input"
:error="getFieldError(index, 'first_name')"
/>
<span v-else class="text-gray-400 text-xs italic"></span>
<div v-if="getFieldError(index, 'first_name')" class="text-red-500 text-xs mt-0.5">
{{ getFieldError(index, 'first_name') }}
</FormControl>
</td>
<td v-if="Object.keys(contributortypes).length">
<!-- <select type="text" v-model="element.pivot.contributor_type">
<option v-for="(option, i) in contributortypes" :value="option" :key="i">
{{ option }}
</option>
</select> -->
<FormControl
required
v-model="element.pivot_contributor_type"
type="select"
:options="contributortypes"
placeholder="[relation type]"
>
<div
class="text-red-400 text-sm"
v-if="errors && Array.isArray(errors[`${relation}.${index}.pivot_contributor_type`])"
>
{{ errors[`${relation}.${index}.pivot_contributor_type`].join(', ') }}
</div>
</td>
<!-- Last Name / Organization -->
<td class="px-2 py-2">
<FormControl
required
:model-value="element.last_name"
@update:model-value="updatePerson(index, 'last_name', $event)"
type="text"
:is-read-only="element.status || !canEdit"
:placeholder="element.name_type === 'Organizational' ? 'Organization' : 'Last name'"
class="text-xs compact-input"
:error="getFieldError(index, 'last_name')"
/>
<div v-if="getFieldError(index, 'last_name')" class="text-red-500 text-xs mt-0.5">
{{ getFieldError(index, 'last_name') }}
</div>
</td>
<!-- Orcid -->
<td class="px-2 py-2">
<FormControl
:model-value="element.identifier_orcid"
@update:model-value="updatePerson(index, 'identifier_orcid', $event)"
type="text"
:is-read-only="element.status || !canEdit"
placeholder="0000-0000-0000-0000"
class="text-xs compact-input font-mono"
:error="getFieldError(index, 'identifier_orcid')"
/>
<div v-if="getFieldError(index, 'identifier_orcid')" class="text-red-500 text-xs mt-0.5">
{{ getFieldError(index, 'identifier_orcid') }}
</div>
</td>
<!-- Email -->
<td class="px-2 py-2">
<FormControl
required
:model-value="element.email"
@update:model-value="updatePerson(index, 'email', $event)"
type="email"
:is-read-only="element.status || !canEdit"
placeholder="email@example.com"
class="text-xs compact-input"
:error="getFieldError(index, 'email')"
/>
<div v-if="getFieldError(index, 'email')" class="text-red-500 text-xs mt-0.5">
{{ getFieldError(index, 'email') }}
</div>
</td>
<!-- Contributor Type -->
<td v-if="showContributorTypes" class="px-2 py-2">
<FormControl
required
:model-value="element.pivot_contributor_type"
@update:model-value="updatePerson(index, 'pivot_contributor_type', $event)"
type="select"
:options="contributortypes"
:is-read-only="!canEdit"
placeholder="Role"
class="text-xs compact-select"
:error="getFieldError(index, 'pivot_contributor_type')"
/>
<div v-if="getFieldError(index, 'pivot_contributor_type')" class="text-red-500 text-xs mt-0.5">
{{ getFieldError(index, 'pivot_contributor_type') }}
</div>
</td>
<!-- Actions -->
<td v-if="canDelete" class="px-2 py-2 whitespace-nowrap">
<BaseButton
color="danger"
:icon="mdiTrashCan"
small
@click.prevent="removeAuthor(index)"
title="Remove person"
class="compact-button"
/>
</td>
</tr>
<!-- Empty State -->
<!-- <tr v-if="items.length === 0">
<td :colspan="showContributorTypes ? 9 : 8" class="text-center py-12 text-gray-400">
<div class="flex flex-col items-center gap-2">
<BaseIcon :path="mdiBookOpenPageVariant" :size="32" class="text-gray-300" />
<span class="text-sm">No persons added yet</span>
</div>
</td>
</tr>
</tbody>-if="canDelete" class="p-3">
<BaseButtons type="justify-start lg:justify-end" no-wrap>
<BaseButton
color="danger"
:icon="mdiTrashCan"
small
@click="removeAuthor(index)"
:disabled="element.status || !canEdit"
title="Remove person"
/>
</BaseButtons>
</td>
</tr>
<tr v-if="items.length === 0">
<td :colspan="showContributorTypes ? 10 : 9" class="text-center p-8 text-gray-500">
No persons added yet
</td>
</tr> -->
</tbody>
</table>
</div>
<!-- Pagination -->
<div v-if="hasMultiplePages" class="flex justify-between items-center p-3 border-t border-gray-200 dark:border-slate-700">
<div class="flex gap-1">
<BaseButton :disabled="currentPage === 0" @click="goToPage(currentPage - 1)" :icon="mdiChevronLeft" small outline />
<template v-for="(page, i) in pagesList" :key="i">
<span v-if="page === -1" class="px-3 py-1">...</span>
<BaseButton
v-else
@click="goToPage(page)"
:label="String(page + 1)"
:color="page === currentPage ? 'info' : 'whiteDark'"
small
:outline="page !== currentPage"
/>
</template>
</FormControl>
</td>
<!-- <td data-label="Name Type">
{{ client.name_type }}
</td> -->
<!-- <td data-label="Orcid">
{{ client.identifier_orcid }}
</td> -->
<!-- <td data-label="Progress" class="lg:w-32">
<progress class="flex w-2/5 self-center lg:w-full" max="100" v-bind:value="client.progress">
{{ client.progress }}
</progress>
</td> -->
<td class="before:hidden lg:w-1 whitespace-nowrap">
<BaseButtons type="justify-start lg:justify-end" no-wrap>
<!-- <BaseButton color="info" :icon="mdiEye" small @click="isModalActive = true" /> -->
<BaseButton color="danger" :icon="mdiTrashCan" small @click.prevent="removeAuthor(index)" />
</BaseButtons>
</td>
</tr>
</template>
</draggable>
<!-- </tbody> -->
</table>
<!-- :class="[ pagesList.length > 1 ? 'block' : 'hidden']" -->
<div class="p-3 lg:px-6 border-t border-gray-100 dark:border-slate-800">
<!-- <BaseLevel>
<BaseButtons>
<BaseButton
:disabled="currentPage >= numPages - 1"
@click="goToPage(currentPage + 1)"
:icon="mdiChevronRight"
v-for="page in pagesList"
:key="page"
:active="page === currentPage"
:label="page + 1"
small
outline
:outline="styleService.darkMode"
@click="currentPage = page"
/>
</div>
<span class="text-sm text-gray-600 dark:text-gray-400"> Page {{ currentPageHuman }} of {{ numPages }} </span>
</div>
</BaseButtons>
<small>Page {{ currentPageHuman }} of {{ numPages }}</small>
</BaseLevel> -->
</div>
</template>
<style lang="postcss" scoped>
.drag-handle {
transition: color 0.2s;
}
.card {
@apply bg-white dark:bg-slate-900 rounded-lg shadow-sm;
}
@media (max-width: 768px) {
table {
font-size: 0.875rem;
}
th,
td {
padding: 0.5rem !important;
}
}
</style>
</template>

View file

@ -1,287 +0,0 @@
<template>
<Transition
enter-active-class="transition ease-out duration-300"
enter-from-class="opacity-0 transform -translate-y-2"
enter-to-class="opacity-100 transform translate-y-0"
leave-active-class="transition ease-in duration-200"
leave-from-class="opacity-100 transform translate-y-0"
leave-to-class="opacity-0 transform -translate-y-2"
>
<div v-if="show" class="mb-4 p-4 bg-amber-50 border border-amber-200 rounded-lg shadow-sm" role="alert" aria-live="polite">
<div class="flex items-start">
<div class="flex-shrink-0">
<WarningTriangleIcon class="h-5 w-5 text-amber-500" aria-hidden="true" />
</div>
<div class="ml-3 flex-1">
<h3 class="text-sm font-medium text-amber-800">
{{ title }}
</h3>
<div class="mt-1 text-sm text-amber-700">
<p>{{ message }}</p>
<!-- Optional detailed list of changes -->
<div v-if="showDetails && changesSummary.length > 0" class="mt-2">
<button
type="button"
@click.stop="toggleDetails"
class="text-amber-800 underline hover:text-amber-900 focus:outline-none focus:ring-2 focus:ring-amber-500 focus:ring-offset-2 focus:ring-offset-amber-50 rounded"
>
{{ detailsVisible ? 'Hide details' : 'Show details' }}
</button>
<Transition
enter-active-class="transition ease-out duration-200"
enter-from-class="opacity-0 max-h-0"
enter-to-class="opacity-100 max-h-40"
leave-active-class="transition ease-in duration-150"
leave-from-class="opacity-100 max-h-40"
leave-to-class="opacity-0 max-h-0"
>
<div v-if="detailsVisible" class="mt-2 overflow-hidden">
<ul class="text-xs text-amber-600 space-y-1">
<li v-for="change in changesSummary" :key="change" class="flex items-center">
<div class="w-1 h-1 bg-amber-400 rounded-full mr-2"></div>
{{ change }}
</li>
</ul>
</div>
</Transition>
</div>
</div>
</div>
<!-- Action buttons -->
<div v-if="showActions" class="ml-4 flex-shrink-0 flex space-x-2">
<button
v-if="onSave"
type="button"
@click.stop="handleSave"
:disabled="isSaving"
class="bg-amber-100 text-amber-800 px-3 py-1 rounded text-sm font-medium hover:bg-amber-200 focus:outline-none focus:ring-2 focus:ring-amber-500 focus:ring-offset-2 focus:ring-offset-amber-50 disabled:opacity-50 disabled:cursor-not-allowed transition-colors"
>
<span v-if="!isSaving">Save Now</span>
<span v-else class="flex items-center">
<LoadingSpinner class="w-3 h-3 mr-1" />
Saving...
</span>
</button>
<button
v-if="onDismiss"
type="button"
@click="handleDismiss"
class="text-amber-600 hover:text-amber-700 focus:outline-none focus:ring-2 focus:ring-amber-500 focus:ring-offset-2 focus:ring-offset-amber-50 rounded p-1"
:title="dismissLabel"
>
<XMarkIcon class="h-4 w-4" aria-hidden="true" />
<span class="sr-only">{{ dismissLabel }}</span>
</button>
</div>
</div>
<!-- Progress indicator for auto-save -->
<div v-if="showAutoSaveProgress && autoSaveCountdown > 0" class="mt-3">
<div class="flex items-center justify-between text-xs text-amber-600">
<span>Auto-save in {{ autoSaveCountdown }}s</span>
<button @click="cancelAutoSave" class="underline hover:text-amber-700">Cancel</button>
</div>
<div class="mt-1 w-full bg-amber-200 rounded-full h-1">
<div
class="bg-amber-500 h-1 rounded-full transition-all duration-1000 ease-linear"
:style="{ width: `${((initialCountdown - autoSaveCountdown) / initialCountdown) * 100}%` }"
></div>
</div>
</div>
</div>
</Transition>
</template>
<script setup lang="ts">
import { ref, computed, onMounted, onUnmounted, watch, defineComponent } from 'vue';
// Icons - you can replace these with your preferred icon library
const WarningTriangleIcon = defineComponent({
template: `
<svg viewBox="0 0 20 20" fill="currentColor">
<path fill-rule="evenodd" d="M8.257 3.099c.765-1.36 2.722-1.36 3.486 0l5.58 9.92c.75 1.334-.213 2.98-1.742 2.98H4.42c-1.53 0-2.493-1.646-1.743-2.98l5.58-9.92zM11 13a1 1 0 11-2 0 1 1 0 012 0zm-1-8a1 1 0 00-1 1v3a1 1 0 002 0V6a1 1 0 00-1-1z" clip-rule="evenodd" />
</svg>
`,
});
const XMarkIcon = defineComponent({
template: `
<svg viewBox="0 0 20 20" fill="currentColor">
<path d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z" />
</svg>
`,
});
const LoadingSpinner = defineComponent({
template: `
<svg class="animate-spin" fill="none" viewBox="0 0 24 24">
<circle class="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" stroke-width="4"></circle>
<path class="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"></path>
</svg>
`,
});
interface Props {
// Control visibility
show?: boolean;
// Content
title?: string;
message?: string;
changesSummary?: string[];
// Behavior
showDetails?: boolean;
showActions?: boolean;
showAutoSaveProgress?: boolean;
autoSaveDelay?: number; // seconds
// Callbacks
onSave?: () => Promise<void> | void;
onDismiss?: () => void;
onAutoSave?: () => Promise<void> | void;
// Labels
dismissLabel?: string;
}
const props = withDefaults(defineProps<Props>(), {
show: true,
title: 'You have unsaved changes',
message: 'Your changes will be lost if you leave this page without saving.',
changesSummary: () => [],
showDetails: false,
showActions: true,
showAutoSaveProgress: false,
autoSaveDelay: 30,
dismissLabel: 'Dismiss warning',
});
const emit = defineEmits<{
save: [];
dismiss: [];
autoSave: [];
}>();
// Local state
const detailsVisible = ref(false);
const isSaving = ref(false);
const autoSaveCountdown = ref(0);
const initialCountdown = ref(0);
let autoSaveTimer: NodeJS.Timeout | null = null;
let countdownTimer: NodeJS.Timeout | null = null;
// Methods
const toggleDetails = () => {
detailsVisible.value = !detailsVisible.value;
};
const handleSave = async () => {
if (isSaving.value) return;
try {
isSaving.value = true;
await props.onSave?.();
emit('save');
} catch (error) {
console.error('Save failed:', error);
// You might want to emit an error event here
} finally {
isSaving.value = false;
}
};
const handleDismiss = () => {
props.onDismiss?.();
emit('dismiss');
stopAutoSave();
};
const startAutoSave = () => {
if (!props.onAutoSave || autoSaveTimer) return;
autoSaveCountdown.value = props.autoSaveDelay;
initialCountdown.value = props.autoSaveDelay;
// Countdown timer
countdownTimer = setInterval(() => {
autoSaveCountdown.value--;
if (autoSaveCountdown.value <= 0) {
executeAutoSave();
}
}, 1000);
};
const executeAutoSave = async () => {
stopAutoSave();
try {
await props.onAutoSave?.();
emit('autoSave');
} catch (error) {
console.error('Auto-save failed:', error);
// Optionally restart auto-save on failure
if (props.show) {
startAutoSave();
}
}
};
const cancelAutoSave = () => {
stopAutoSave();
};
const stopAutoSave = () => {
if (autoSaveTimer) {
clearTimeout(autoSaveTimer);
autoSaveTimer = null;
}
if (countdownTimer) {
clearInterval(countdownTimer);
countdownTimer = null;
}
autoSaveCountdown.value = 0;
};
// Watchers
watch(
() => props.show,
(newShow) => {
if (newShow && props.showAutoSaveProgress && props.onAutoSave) {
startAutoSave();
} else if (!newShow) {
stopAutoSave();
}
},
);
// Lifecycle
onMounted(() => {
if (props.show && props.showAutoSaveProgress && props.onAutoSave) {
startAutoSave();
}
});
onUnmounted(() => {
stopAutoSave();
});
</script>
<style scoped>
/* Additional custom styles if needed */
.max-h-0 {
max-height: 0;
}
.max-h-40 {
max-height: 10rem;
}
</style>

View file

@ -132,25 +132,13 @@ export interface Description {
export interface Person {
id?: number;
// Name fields
first_name?: string;
last_name?: string; // Also used for organization name
name?: string; // Alternative full name field
name?: string;
email: string;
name_type?: string;
// Additional identifiers
identifier_orcid?: string;
// Status and metadata
status: boolean; // true = read-only/locked, false = editable
created_at?: string;
updated_at?: string;
// Statistics
datasetCount?: string;
// Relationship data (for many-to-many relationships)
pivot_contributor_type?: string; // Type of contribution (e.g., 'Author', 'Editor', 'Contributor')
created_at?: string;
status: boolean;
}
interface IErrorMessage {

View file

@ -14,11 +14,11 @@ const props = defineProps({
showAsideMenu: {
type: Boolean,
default: true // Set default value to true
},
hasProgressBar: {
type: Boolean,
default: false // New prop to indicate if progress bar is shown
}
// user: {
// type: Object,
// default: () => ({}),
// }
});
</script>
@ -29,18 +29,9 @@ const props = defineProps({
}">
<div :class="{
'ml-60 lg:ml-0': layoutService.isAsideMobileExpanded,
'xl:pl-60': props.showAsideMenu==true,
'pt-14': !props.hasProgressBar,
'pt-24': props.hasProgressBar // Increased padding when progress bar is present (pt-14 + height of progress bar)
}"
class="min-h-screen w-screen transition-position lg:w-auto bg-gray-50 dark:bg-slate-800 dark:text-slate-100">
<NavBar
:class="{
'ml-60 lg:ml-0': layoutService.isAsideMobileExpanded,
'top-10': props.hasProgressBar // Push NavBar down when progress bar is present
}"
:showBurger="props.showAsideMenu"
/>
'xl:pl-60': props.showAsideMenu==true }"
class="pt-14 min-h-screen w-screen transition-position lg:w-auto bg-gray-50 dark:bg-slate-800 dark:text-slate-100">
<NavBar :class="{ 'ml-60 lg:ml-0': layoutService.isAsideMobileExpanded }" :showBurger="props.showAsideMenu" />
<!-- Conditionally render AsideMenu based on showAsideMenu prop -->
<template v-if="showAsideMenu">
<AsideMenu />

View file

@ -1,6 +1,6 @@
<script lang="ts" setup>
import { Head, usePage } from '@inertiajs/vue3';
import { mdiLicense, mdiCheckCircle, mdiCloseCircle, mdiAlertBoxOutline } from '@mdi/js';
import { usePage } from '@inertiajs/vue3';
import { mdiAccountKey, mdiSquareEditOutline, mdiAlertBoxOutline } from '@mdi/js';
import { computed, ComputedRef } from 'vue';
import LayoutAuthenticated from '@/Layouts/LayoutAuthenticated.vue';
import SectionMain from '@/Components/SectionMain.vue';
@ -9,150 +9,107 @@ import BaseButton from '@/Components/BaseButton.vue';
import CardBox from '@/Components/CardBox.vue';
import BaseButtons from '@/Components/BaseButtons.vue';
import NotificationBar from '@/Components/NotificationBar.vue';
// import Pagination from '@/Components/Admin/Pagination.vue';
// import Sort from '@/Components/Admin/Sort.vue';
import { stardust } from '@eidellev/adonis-stardust/client';
// import CardBoxModal from '@/Components/CardBoxModal.vue';
interface License {
id: number;
name: string;
sort_order: number;
active: boolean;
}
// const isModalDangerActive = ref(false);
// const deleteId = ref();
const props = defineProps({
defineProps({
licenses: {
type: Array<License>,
default: () => [],
type: Object,
default: () => ({}),
},
// filters: {
// type: Object,
// default: () => ({}),
// },
can: {
type: Object,
default: () => ({}),
},
});
const flash: ComputedRef<any> = computed(() => usePage().props.flash);
const flash: ComputedRef<any> = computed(() => {
// let test = usePage();
// console.log(test);
return usePage().props.flash;
});
const licenseCount = computed(() => props.licenses.length);
const getLicenseColor = (index: number) => {
const colors = [
'bg-emerald-100 text-emerald-800 dark:bg-emerald-900 dark:text-emerald-300',
'bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-300',
'bg-violet-100 text-violet-800 dark:bg-violet-900 dark:text-violet-300',
'bg-amber-100 text-amber-800 dark:bg-amber-900 dark:text-amber-300',
'bg-rose-100 text-rose-800 dark:bg-rose-900 dark:text-rose-300',
'bg-cyan-100 text-cyan-800 dark:bg-cyan-900 dark:text-cyan-300',
];
return colors[index % colors.length];
};
</script>
<template>
<LayoutAuthenticated>
<Head title="Licenses" />
<SectionMain>
<SectionTitleLineWithButton :icon="mdiLicense" title="Licenses" main>
<div class="flex items-center gap-3">
<span class="text-sm text-gray-500 dark:text-gray-400 font-medium">
{{ licenseCount }} {{ licenseCount === 1 ? 'license' : 'licenses' }}
</span>
</div>
<SectionTitleLineWithButton :icon="mdiAccountKey" title="Licenses" main>
<!-- <BaseButton
v-if="can.create"
:route-name="stardust.route('settings.role.create')"
:icon="mdiPlus"
label="Add"
color="info"
rounded-full
small
/> -->
</SectionTitleLineWithButton>
<NotificationBar v-if="flash.message" color="success" :icon="mdiAlertBoxOutline">
{{ flash.message }}
</NotificationBar>
<CardBox class="mb-6" has-table>
</CardBox>
<CardBox class="mb-6" has-form-data>
<table>
<thead>
<tr>
<th>Name</th>
<th>Sort Order</th>
<th>Status</th>
<th>
<!-- <Sort label="Name" attribute="name" /> -->
Name
</th>
<th>
<!-- <Sort label="Sort Order" attribute="sort_order" /> -->
Sort Order
</th>
<th v-if="can.edit">Actions</th>
</tr>
</thead>
<tbody>
<tr v-if="licenses.length === 0">
<td colspan="4" class="text-center py-12">
<div class="flex flex-col items-center justify-center text-gray-500 dark:text-gray-400">
<p class="text-lg font-medium mb-2">No licenses found</p>
<p class="text-sm">Licenses will appear here once configured</p>
</div>
</td>
</tr>
<tr
v-for="(license, index) in licenses"
:key="license.id"
class="hover:bg-gray-50 dark:hover:bg-slate-800 transition-colors"
>
<tr v-for="license in licenses" :key="license.id">
<td data-label="Name">
<span
class="inline-flex items-center px-3 py-1 rounded-full text-sm font-medium transition-all hover:shadow-md"
:class="getLicenseColor(index)"
<!-- <Link
:href="stardust.route('settings.role.show', [role.id])"
class="no-underline hover:underline text-cyan-600 dark:text-cyan-400"
>
{{ license.name }}
</span>
</Link> -->
{{ license.name }}
</td>
<td data-label="Sort Order">
<span
class="inline-flex items-center justify-center w-8 h-8 rounded-full bg-gray-100 dark:bg-slate-700 text-gray-700 dark:text-gray-300 font-semibold text-sm"
>
{{ license.sort_order }}
</span>
</td>
<td data-label="Status">
<span
v-if="license.active"
class="inline-flex items-center gap-1 px-2.5 py-1 rounded-full text-xs font-medium bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-300"
>
<svg class="w-4 h-4" fill="currentColor" viewBox="0 0 20 20">
<path
fill-rule="evenodd"
d="M10 18a8 8 0 100-16 8 8 0 000 16zm3.707-9.293a1 1 0 00-1.414-1.414L9 10.586 7.707 9.293a1 1 0 00-1.414 1.414l2 2a1 1 0 001.414 0l4-4z"
clip-rule="evenodd"
/>
</svg>
Active
</span>
<span
v-else
class="inline-flex items-center gap-1 px-2.5 py-1 rounded-full text-xs font-medium bg-gray-100 text-gray-800 dark:bg-gray-800 dark:text-gray-300"
>
<svg class="w-4 h-4" fill="currentColor" viewBox="0 0 20 20">
<path
fill-rule="evenodd"
d="M10 18a8 8 0 100-16 8 8 0 000 16zM8.707 7.293a1 1 0 00-1.414 1.414L8.586 10l-1.293 1.293a1 1 0 101.414 1.414L10 11.414l1.293 1.293a1 1 0 001.414-1.414L11.414 10l1.293-1.293a1 1 0 00-1.414-1.414L10 8.586 8.707 7.293z"
clip-rule="evenodd"
/>
</svg>
Inactive
</span>
<td data-label="Description">
{{ license.sort_order }}
</td>
<td v-if="can.edit" class="before:hidden lg:w-1 whitespace-nowrap">
<BaseButtons type="justify-start lg:justify-end" no-wrap>
<BaseButton
v-if="license.active"
<BaseButton v-if="license.active"
:route-name="stardust.route('settings.license.down', [license.id])"
color="warning"
:icon="mdiCloseCircle"
label="Deactivate"
small
/>
<BaseButton
v-else
:route-name="stardust.route('settings.license.up', [license.id])"
color="success"
:icon="mdiCheckCircle"
label="Activate"
small
/>
color="warning" :icon="mdiSquareEditOutline" label="deactivate" small />
<BaseButton v-else :route-name="stardust.route('settings.license.up', [license.id])"
color="success" :icon="mdiSquareEditOutline" label="activate" small />
</BaseButtons>
</td>
</tr>
</tbody>
</table>
<!-- <div class="py-4">
<Pagination v-bind:data="roles.meta" />
</div> -->
</CardBox>
</SectionMain>
</LayoutAuthenticated>

Some files were not shown because too many files have changed in this diff Show more