- validate all file-upload via clamdscan (clamav), throw ValidationException in case of an error
All checks were successful
CI Pipeline / japa-tests (push) Successful in 50s
All checks were successful
CI Pipeline / japa-tests (push) Successful in 50s
- add @types/clamscan and clamscan for node - package clamav-daemon and clamav-frehshclam for docker - add API Controller: HomeController.ts for /api/years and /api/sitelinks/{year} change root path of file storage from '/storage/app/public/files' to '/storage/app/public' - adapt dockerfile to use node:18-bookworm-slim
This commit is contained in:
parent
5f8fe1c16d
commit
b6b1c90ff8
20 changed files with 941 additions and 278 deletions
64
app/Controllers/Http/Api/HomeController.ts
Normal file
64
app/Controllers/Http/Api/HomeController.ts
Normal file
|
@ -0,0 +1,64 @@
|
|||
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
|
||||
import Database from '@ioc:Adonis/Lucid/Database';
|
||||
import { StatusCodes } from 'http-status-codes';
|
||||
|
||||
export default class HomeController {
|
||||
public async findDocumentsPerYear({ response, params }: HttpContextContract) {
|
||||
const year = params.year;
|
||||
const from = parseInt(year);
|
||||
const serverState = 'published';
|
||||
try {
|
||||
// Database.raw(`date_part('year', server_date_published) as pub_year`)
|
||||
// const datasets = await Dataset.query()
|
||||
// .select(['id', 'publish_id', 'server_date_published', ])
|
||||
// .where('server_state', serverState)
|
||||
// .andWhereRaw(`date_part('year', server_date_published) = ?`, [from])
|
||||
// .preload('titles')
|
||||
// .preload('authors')
|
||||
// .orderBy('server_date_published');
|
||||
|
||||
const datasets = await Database.from('documents as doc')
|
||||
.select([
|
||||
'publish_id',
|
||||
'server_date_published',
|
||||
Database.raw(`date_part('year', server_date_published) as pub_year`)
|
||||
],
|
||||
// Database
|
||||
// .raw('select "ip_address" from "user_logins" where "users.id" = "user_logins.user_id" limit 1')
|
||||
// .wrap('(', ')')
|
||||
)
|
||||
.where('server_state', serverState)
|
||||
.innerJoin('link_documents_persons as ba', 'doc.id', 'ba.document_id')
|
||||
.andWhereRaw(`date_part('year', server_date_published) = ?`, [from])
|
||||
.orderBy('server_date_published');
|
||||
|
||||
return response.json(datasets);
|
||||
} catch (error) {
|
||||
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
|
||||
message: error.message || 'Some error occurred while retrieving datasets.',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public async findYears({ response }: HttpContextContract) {
|
||||
const serverState = 'published';
|
||||
// Use raw SQL queries to select all cars which belongs to the user
|
||||
try {
|
||||
const datasets = await Database.rawQuery(
|
||||
'SELECT distinct EXTRACT(YEAR FROM server_date_published) as published_date FROM gba.documents WHERE server_state = ?',
|
||||
[serverState],
|
||||
);
|
||||
|
||||
// Pluck the ids of the cars
|
||||
const years = datasets.rows.map((dataset) => dataset.published_date);
|
||||
// check if the cars is returned
|
||||
// if (years.length > 0) {
|
||||
return response.status(StatusCodes.OK).json(years);
|
||||
// }
|
||||
} catch (error) {
|
||||
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
|
||||
message: 'An error occurred while retrieving the list of publication years from the Tethys repository.',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Add table
editor.link_modal.header
Reference in a new issue