feat: Enhance Person data structure and improve TablePersons component

- Updated Person interface to include first_name and last_name fields for better clarity and organization handling.
- Modified TablePersons.vue to support new fields, including improved pagination and drag-and-drop functionality.
- Added loading states and error handling for form controls within the table.
- Enhanced the visual layout of the table with responsive design adjustments.
- Updated solr.xslt to correctly reference ServerDateModified and EmbargoDate attributes.
- updated AvatarController
- improved download method for editor, and reviewer
- improved security for officlial download file file API: filterd by server_state
This commit is contained in:
Kaimbacher 2025-09-08 12:28:26 +02:00
parent e1ccf0ddc8
commit 06ed2f3625
12 changed files with 3143 additions and 1387 deletions

View file

@ -4,20 +4,29 @@ import Person from '#models/person';
// node ace make:controller Author
export default class AuthorsController {
public async index({}: HttpContext) {
// select * from gba.persons
// where exists (select * from gba.documents inner join gba.link_documents_persons on "documents"."id" = "link_documents_persons"."document_id"
// where ("link_documents_persons"."role" = 'author') and ("persons"."id" = "link_documents_persons"."person_id"));
public async index({}: HttpContext) {
const authors = await Person.query()
.preload('datasets')
.where('name_type', 'Personal')
.whereHas('datasets', (dQuery) => {
dQuery.wherePivot('role', 'author');
})
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.orderBy('datasets_count', 'desc');
.select([
'id',
'academic_title',
'first_name',
'last_name',
'identifier_orcid',
'status',
'name_type',
'created_at'
// Note: 'email' is omitted
])
.preload('datasets')
.where('name_type', 'Personal')
.whereHas('datasets', (dQuery) => {
dQuery.wherePivot('role', 'author');
})
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.orderBy('datasets_count', 'desc');
return authors;
}

View file

@ -2,26 +2,46 @@ import type { HttpContext } from '@adonisjs/core/http';
import { StatusCodes } from 'http-status-codes';
import redis from '@adonisjs/redis/services/main';
const PREFIXES = ['von', 'van'];
const PREFIXES = ['von', 'van', 'de', 'del', 'della', 'di', 'da', 'dos', 'du', 'le', 'la'];
const DEFAULT_SIZE = 50;
const MIN_SIZE = 16;
const MAX_SIZE = 512;
const FONT_SIZE_RATIO = 0.4;
const COLOR_LIGHTENING_PERCENT = 60;
const COLOR_DARKENING_FACTOR = 0.6;
const CACHE_TTL = 24 * 60 * 60; // 24 hours instead of 1 hour
export default class AvatarController {
public async generateAvatar({ request, response }: HttpContext) {
try {
const { name, size = DEFAULT_SIZE } = request.only(['name', 'size']);
if (!name) {
return response.status(StatusCodes.BAD_REQUEST).json({ error: 'Name is required' });
// Enhanced validation
if (!name || typeof name !== 'string' || name.trim().length === 0) {
return response.status(StatusCodes.BAD_REQUEST).json({
error: 'Name is required and must be a non-empty string',
});
}
const parsedSize = this.validateSize(size);
if (!parsedSize.isValid) {
return response.status(StatusCodes.BAD_REQUEST).json({
error: parsedSize.error,
});
}
// Build a unique cache key for the given name and size
const cacheKey = `avatar:${name.trim().toLowerCase()}-${size}`;
const cachedSvg = await redis.get(cacheKey);
if (cachedSvg) {
this.setResponseHeaders(response);
return response.send(cachedSvg);
const cacheKey = `avatar:${this.sanitizeName(name)}-${parsedSize.value}`;
// const cacheKey = `avatar:${name.trim().toLowerCase()}-${size}`;
try {
const cachedSvg = await redis.get(cacheKey);
if (cachedSvg) {
this.setResponseHeaders(response);
return response.send(cachedSvg);
}
} catch (redisError) {
// Log redis error but continue without cache
console.warn('Redis cache read failed:', redisError);
}
const initials = this.getInitials(name);
@ -29,41 +49,85 @@ export default class AvatarController {
const svgContent = this.createSvg(size, colors, initials);
// // Cache the generated avatar for future use, e.g. 1 hour expiry
await redis.setex(cacheKey, 3600, svgContent);
try {
await redis.setex(cacheKey, CACHE_TTL, svgContent);
} catch (redisError) {
// Log but don't fail the request
console.warn('Redis cache write failed:', redisError);
}
this.setResponseHeaders(response);
return response.send(svgContent);
} catch (error) {
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({ error: error.message });
console.error('Avatar generation error:', error);
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
error: 'Failed to generate avatar',
});
}
}
private getInitials(name: string): string {
const parts = name
private validateSize(size: any): { isValid: boolean; value?: number; error?: string } {
const numSize = Number(size);
if (isNaN(numSize)) {
return { isValid: false, error: 'Size must be a valid number' };
}
if (numSize < MIN_SIZE || numSize > MAX_SIZE) {
return {
isValid: false,
error: `Size must be between ${MIN_SIZE} and ${MAX_SIZE}`,
};
}
return { isValid: true, value: Math.floor(numSize) };
}
private sanitizeName(name: string): string {
return name
.trim()
.toLowerCase()
.replace(/[^a-z0-9\s-]/gi, '');
}
private getInitials(name: string): string {
const sanitized = name.trim().replace(/\s+/g, ' '); // normalize whitespace
const parts = sanitized
.split(' ')
.filter((part) => part.length > 0);
.filter((part) => part.length > 0)
.map((part) => part.trim());
if (parts.length === 0) {
return 'NA';
}
if (parts.length >= 2) {
return this.getMultiWordInitials(parts);
if (parts.length === 1) {
// For single word, take first 2 characters or first char if only 1 char
return parts[0].substring(0, Math.min(2, parts[0].length)).toUpperCase();
}
return parts[0].substring(0, 2).toUpperCase();
return this.getMultiWordInitials(parts);
}
private getMultiWordInitials(parts: string[]): string {
const firstName = parts[0];
const lastName = parts[parts.length - 1];
const firstInitial = firstName.charAt(0).toUpperCase();
const lastInitial = lastName.charAt(0).toUpperCase();
// Filter out prefixes and short words
const significantParts = parts.filter((part) => !PREFIXES.includes(part.toLowerCase()) && part.length > 1);
if (PREFIXES.includes(lastName.toLowerCase()) && lastName === lastName.toUpperCase()) {
return firstInitial + lastName.charAt(1).toUpperCase();
if (significantParts.length === 0) {
// Fallback to first and last regardless of prefixes
const firstName = parts[0];
const lastName = parts[parts.length - 1];
return (firstName.charAt(0) + lastName.charAt(0)).toUpperCase();
}
return firstInitial + lastInitial;
if (significantParts.length === 1) {
return significantParts[0].substring(0, 2).toUpperCase();
}
// Take first and last significant parts
const firstName = significantParts[0];
const lastName = significantParts[significantParts.length - 1];
return (firstName.charAt(0) + lastName.charAt(0)).toUpperCase();
}
private generateColors(name: string): { background: string; text: string } {
@ -75,31 +139,44 @@ export default class AvatarController {
}
private createSvg(size: number, colors: { background: string; text: string }, initials: string): string {
const fontSize = size * FONT_SIZE_RATIO;
return `
<svg width="${size}" height="${size}" xmlns="http://www.w3.org/2000/svg">
<rect width="100%" height="100%" fill="#${colors.background}"/>
<text x="50%" y="50%" dominant-baseline="middle" text-anchor="middle" font-weight="bold" font-family="Arial, sans-serif" font-size="${fontSize}" fill="#${colors.text}">${initials}</text>
</svg>
`;
const fontSize = Math.max(12, Math.floor(size * FONT_SIZE_RATIO)); // Ensure readable font size
// Escape any potential HTML/XML characters in initials
const escapedInitials = this.escapeXml(initials);
return `<svg width="${size}" height="${size}" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 ${size} ${size}">
<rect width="100%" height="100%" fill="#${colors.background}" rx="${size * 0.1}"/>
<text x="50%" y="50%" dominant-baseline="central" text-anchor="middle"
font-weight="600" font-family="-apple-system, BlinkMacSystemFont, 'Segoe UI', system-ui, sans-serif"
font-size="${fontSize}" fill="#${colors.text}">${escapedInitials}</text>
</svg>`;
}
private escapeXml(text: string): string {
return text.replace(/&/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;').replace(/"/g, '&quot;').replace(/'/g, '&apos;');
}
private setResponseHeaders(response: HttpContext['response']): void {
response.header('Content-type', 'image/svg+xml');
response.header('Cache-Control', 'no-cache');
response.header('Pragma', 'no-cache');
response.header('Expires', '0');
response.header('Content-Type', 'image/svg+xml');
response.header('Cache-Control', 'public, max-age=86400'); // Cache for 1 day
response.header('ETag', `"${Date.now()}"`); // Simple ETag
}
private getColorFromName(name: string): string {
let hash = 0;
for (let i = 0; i < name.length; i++) {
hash = name.charCodeAt(i) + ((hash << 5) - hash);
const normalizedName = name.toLowerCase().trim();
for (let i = 0; i < normalizedName.length; i++) {
hash = normalizedName.charCodeAt(i) + ((hash << 5) - hash);
hash = hash & hash; // Convert to 32-bit integer
}
// Ensure we get vibrant colors by constraining the color space
const colorParts = [];
for (let i = 0; i < 3; i++) {
const value = (hash >> (i * 8)) & 0xff;
let value = (hash >> (i * 8)) & 0xff;
// Ensure minimum color intensity for better contrast
value = Math.max(50, value);
colorParts.push(value.toString(16).padStart(2, '0'));
}
return colorParts.join('');
@ -110,7 +187,7 @@ export default class AvatarController {
const g = parseInt(hexColor.substring(2, 4), 16);
const b = parseInt(hexColor.substring(4, 6), 16);
const lightenValue = (value: number) => Math.min(255, Math.floor((value * (100 + percent)) / 100));
const lightenValue = (value: number) => Math.min(255, Math.floor(value + (255 - value) * (percent / 100)));
const newR = lightenValue(r);
const newG = lightenValue(g);
@ -124,7 +201,7 @@ export default class AvatarController {
const g = parseInt(hexColor.slice(2, 4), 16);
const b = parseInt(hexColor.slice(4, 6), 16);
const darkenValue = (value: number) => Math.round(value * COLOR_DARKENING_FACTOR);
const darkenValue = (value: number) => Math.max(0, Math.floor(value * COLOR_DARKENING_FACTOR));
const darkerR = darkenValue(r);
const darkerG = darkenValue(g);

View file

@ -9,8 +9,7 @@ export default class DatasetController {
// Select datasets with server_state 'published' or 'deleted' and sort by the last published date
const datasets = await Dataset.query()
.where(function (query) {
query.where('server_state', 'published')
.orWhere('server_state', 'deleted');
query.where('server_state', 'published').orWhere('server_state', 'deleted');
})
.preload('titles')
.preload('identifier')
@ -39,7 +38,9 @@ export default class DatasetController {
.where('publish_id', params.publish_id)
.preload('titles')
.preload('descriptions')
.preload('user')
.preload('user', (builder) => {
builder.select(['id', 'firstName', 'lastName', 'avatar', 'login']);
})
.preload('authors', (builder) => {
builder.orderBy('pivot_sort_order', 'asc');
})

View file

@ -2,7 +2,6 @@ import type { HttpContext } from '@adonisjs/core/http';
import File from '#models/file';
import { StatusCodes } from 'http-status-codes';
import * as fs from 'fs';
import * as path from 'path';
import { DateTime } from 'luxon';
// node ace make:controller Author
@ -23,8 +22,13 @@ export default class FileController {
});
}
// Check embargo date
const dataset = file.dataset; // or file.dataset
const dataset = file.dataset;
// Files from unpublished datasets are now blocked
if (dataset.server_state !== 'published') {
return response.status(StatusCodes.FORBIDDEN).send({
message: `File access denied: Dataset is not published.`,
});
}
if (dataset && this.isUnderEmbargo(dataset.embargo_date)) {
return response.status(StatusCodes.FORBIDDEN).send({
message: `File is under embargo until ${dataset.embargo_date?.toFormat('yyyy-MM-dd')}`,
@ -32,13 +36,16 @@ export default class FileController {
}
// Proceed with file download
const filePath = '/storage/app/data/' + file.pathName;
const ext = path.extname(filePath);
const fileName = file.label + ext;
const filePath = '/storage/app/data/' + file.pathName;
const fileExt = file.filePath.split('.').pop() || '';
// const fileName = file.label + fileExt;
const fileName = file.label.toLowerCase().endsWith(`.${fileExt.toLowerCase()}`)
? file.label
: `${file.label}.${fileExt}`;
try {
fs.accessSync(filePath, fs.constants.R_OK); //| fs.constants.W_OK);
// console.log("can read/write:", path);
// console.log("can read/write:", filePath);
response
.header('Cache-Control', 'no-cache private')
@ -47,7 +54,7 @@ export default class FileController {
.header('Content-Disposition', 'inline; filename=' + fileName)
.header('Content-Transfer-Encoding', 'binary')
.header('Access-Control-Allow-Origin', '*')
.header('Access-Control-Allow-Methods', 'GET,POST');
.header('Access-Control-Allow-Methods', 'GET');
response.status(StatusCodes.OK).download(filePath);
} catch (err) {

View file

@ -252,7 +252,6 @@ export default class DatasetsController {
dataset.reject_editor_note = null;
}
//save main and additional titles
const reviewer_id = request.input('reviewer_id', null);
dataset.reviewer_id = reviewer_id;
@ -290,8 +289,6 @@ export default class DatasetsController {
});
}
public async rejectUpdate({ request, response, auth }: HttpContext) {
const authUser = auth.user!;
@ -402,12 +399,10 @@ export default class DatasetsController {
.back();
}
return inertia.render('Editor/Dataset/Publish', {
dataset,
can: {
reject: await auth.user?.can(['dataset-editor-reject']),
can: {
reject: await auth.user?.can(['dataset-editor-reject']),
publish: await auth.user?.can(['dataset-publish']),
},
});
@ -454,7 +449,7 @@ export default class DatasetsController {
public async rejectToReviewer({ request, inertia, response }: HttpContext) {
const id = request.param('id');
const dataset = await Dataset.query()
.where('id', id)
.where('id', id)
.preload('reviewer', (builder) => {
builder.select('id', 'login', 'email');
})
@ -555,7 +550,6 @@ export default class DatasetsController {
}
}
return response
.flash(
`You have successfully rejected dataset ${dataset.id} reviewed by ${dataset.reviewer.login}.${emailStatusMessage}`,
@ -605,11 +599,10 @@ export default class DatasetsController {
doiIdentifier.dataset_id = dataset.id;
doiIdentifier.type = 'doi';
doiIdentifier.status = 'findable';
// save updated dataset to db an index to OpenSearch
try {
// save modified date of datset for re-caching model in db an update the search index
// save modified date of datset for re-caching model in db an update the search index
dataset.server_date_modified = DateTime.now();
// autoUpdate: true only triggers when dataset.save() is called, not when saving a related model like below
await dataset.save();
@ -1125,9 +1118,20 @@ export default class DatasetsController {
// const filePath = await drive.use('local').getUrl('/'+ file.filePath)
const filePath = file.filePath;
const fileExt = file.filePath.split('.').pop() || '';
// Check if label already includes the extension
const fileName = file.label.toLowerCase().endsWith(`.${fileExt.toLowerCase()}`) ? file.label : `${file.label}.${fileExt}`;
// Set the response headers and download the file
response.header('Content-Type', file.mime_type || 'application/octet-stream');
response.attachment(`${file.label}.${fileExt}`);
response
.header('Cache-Control', 'no-cache private')
.header('Content-Description', 'File Transfer')
.header('Content-Type', file.mime_type || 'application/octet-stream')
// .header('Content-Disposition', 'inline; filename=' + fileName)
.header('Content-Transfer-Encoding', 'binary')
.header('Access-Control-Allow-Origin', '*')
.header('Access-Control-Allow-Methods', 'GET');
response.attachment(fileName);
return response.download(filePath);
}

View file

@ -107,13 +107,12 @@ export default class DatasetsController {
}
return inertia.render('Reviewer/Dataset/Review', {
dataset,
dataset,
can: {
review: await auth.user?.can(['dataset-review']),
reject: await auth.user?.can(['dataset-review-reject']),
},
});
}
public async review_old({ request, inertia, response, auth }: HttpContext) {
@ -370,6 +369,19 @@ export default class DatasetsController {
.flash(`You have rejected dataset ${dataset.id}! to editor ${dataset.editor.login}`, 'message');
}
// public async download({ params, response }: HttpContext) {
// const id = params.id;
// // Find the file by ID
// const file = await File.findOrFail(id);
// // const filePath = await drive.use('local').getUrl('/'+ file.filePath)
// const filePath = file.filePath;
// const fileExt = file.filePath.split('.').pop() || '';
// // Set the response headers and download the file
// response.header('Content-Type', file.mime_type || 'application/octet-stream');
// response.attachment(`${file.label}.${fileExt}`);
// return response.download(filePath);
// }
public async download({ params, response }: HttpContext) {
const id = params.id;
// Find the file by ID
@ -377,9 +389,20 @@ export default class DatasetsController {
// const filePath = await drive.use('local').getUrl('/'+ file.filePath)
const filePath = file.filePath;
const fileExt = file.filePath.split('.').pop() || '';
// Check if label already includes the extension
const fileName = file.label.toLowerCase().endsWith(`.${fileExt.toLowerCase()}`) ? file.label : `${file.label}.${fileExt}`;
// Set the response headers and download the file
response.header('Content-Type', file.mime_type || 'application/octet-stream');
response.attachment(`${file.label}.${fileExt}`);
response
.header('Cache-Control', 'no-cache private')
.header('Content-Description', 'File Transfer')
.header('Content-Type', file.mime_type || 'application/octet-stream')
// .header('Content-Disposition', 'inline; filename=' + fileName)
.header('Content-Transfer-Encoding', 'binary')
.header('Access-Control-Allow-Origin', '*')
.header('Access-Control-Allow-Methods', 'GET');
response.attachment(fileName);
return response.download(filePath);
}
}