- added npm package dotenv-webpack for using env variables on clientside
All checks were successful
CI Pipeline / japa-tests (push) Successful in 53s

- added API File Controller for downloading files e.g. /api/download/1022
- also create has codes by submitting new dataset
- added edit dataset functionalities for role submitter
- added the following route for role submitter: /dataset/:id/update', 'DatasetController.update'
- created extra UpdateDatasetValidator.ts for validating updated dataset
- npm updates
This commit is contained in:
Kaimbacher 2023-11-22 17:06:55 +01:00
parent a7142f694f
commit d8bdce1369
23 changed files with 2181 additions and 853 deletions

View file

@ -0,0 +1,54 @@
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
import File from 'App/Models/File';
import { StatusCodes } from 'http-status-codes';
import * as fs from 'fs';
import * as path from 'path';
// node ace make:controller Author
export default class FileController {
// @Get("download/:id")
public async findOne({ response, params }: HttpContextContract) {
const id = params.id;
const file = await File.findOrFail(id);
// const file = await File.findOne({
// where: { id: id },
// });
if (file) {
const filePath = '/storage/app/public/' + file.pathName;
const ext = path.extname(filePath);
const fileName = file.label + ext;
try {
fs.accessSync(filePath, fs.constants.R_OK); //| fs.constants.W_OK);
// console.log("can read/write:", path);
response
.header('Cache-Control', 'no-cache private')
.header('Content-Description', 'File Transfer')
.header('Content-Type', file.mimeType)
.header('Content-Disposition', 'inline; filename=' + fileName)
.header('Content-Transfer-Encoding', 'binary')
.header('Access-Control-Allow-Origin', '*')
.header('Access-Control-Allow-Methods', 'GET,POST');
response.status(StatusCodes.OK).download(filePath);
} catch (err) {
// console.log("no access:", path);
response.status(StatusCodes.NOT_FOUND).send({
message: `File with id ${id} doesn't exist on file server`,
});
}
// res.status(StatusCodes.OK).sendFile(filePath, (err) => {
// // res.setHeader("Content-Type", "application/json");
// // res.removeHeader("Content-Disposition");
// res.status(StatusCodes.NOT_FOUND).send({
// message: `File with id ${id} doesn't exist on file server`,
// });
// });
} else {
response.status(StatusCodes.NOT_FOUND).send({
message: `Cannot find File with id=${id}.`,
});
}
}
}

View file

@ -15,6 +15,7 @@ import Database from '@ioc:Adonis/Lucid/Database';
import { TransactionClientContract } from '@ioc:Adonis/Lucid/Database';
import Subject from 'App/Models/Subject';
import CreateDatasetValidator from 'App/Validators/CreateDatasetValidator';
import UpdateDatasetValidator from 'App/Validators/UpdateDatasetValidator';
import {
TitleTypes,
DescriptionTypes,
@ -33,8 +34,6 @@ import ClamScan from 'clamscan';
import { ValidationException } from '@ioc:Adonis/Core/Validator';
import Drive from '@ioc:Adonis/Core/Drive';
import { Exception } from '@adonisjs/core/build/standalone';
// import XmlModel from 'App/Library/XmlModel';
// import { XMLBuilder } from 'xmlbuilder2/lib/interfaces';
export default class DatasetController {
public async index({ auth, request, inertia }: HttpContextContract) {
@ -355,7 +354,7 @@ export default class DatasetController {
//store licenses:
const licenses: number[] = request.input('licenses', []);
dataset.useTransaction(trx).related('licenses').sync(licenses);
await dataset.useTransaction(trx).related('licenses').sync(licenses);
// save authors and contributors
await this.savePersons(dataset, request.input('authors', []), 'author', trx);
@ -456,7 +455,7 @@ export default class DatasetController {
newFile.visibleInOai = true;
// let path = coverImage.filePath;
await dataset.useTransaction(trx).related('files').save(newFile);
// await newFile.createHashValues();
await newFile.createHashValues(trx);
}
}
@ -682,29 +681,39 @@ export default class DatasetController {
// throw new GeneralException(trans('exceptions.publish.release.update_error'));
}
public async edit({ params, inertia }) {
const datasetQuery = Dataset.query().where('id', params.id);
datasetQuery.preload('titles').preload('descriptions').preload('coverage');
const dataset = await datasetQuery.firstOrFail();
public async edit({ request, inertia, response }) {
const id = request.param('id');
const datasetQuery = Dataset.query().where('id', id);
datasetQuery
.preload('titles', (query) => query.orderBy('id', 'asc'))
.preload('descriptions', (query) => query.orderBy('id', 'asc'))
.preload('coverage')
.preload('licenses')
.preload('authors')
.preload('contributors')
.preload('subjects')
.preload('references')
.preload('files');
// await dataset.loadMany([
// 'licenses',
// 'authors',
// 'contributors',
// 'titles',
// 'abstracts',
// 'files',
// 'coverage',
// 'subjects',
// 'references',
// ]);
const dataset = await datasetQuery.firstOrFail();
const validStates = ['inprogress', 'rejected_editor'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.toRoute('dataset.list');
}
const titleTypes = Object.entries(TitleTypes)
// .filter(([value]) => value !== 'Main')
.filter(([value]) => value !== 'Main')
.map(([key, value]) => ({ value: key, label: value }));
const descriptionTypes = Object.entries(DescriptionTypes)
// .filter(([value]) => value !== 'Abstract')
.filter(([value]) => value !== 'Abstract')
.map(([key, value]) => ({ value: key, label: value }));
const languages = await Language.query().where('active', true).pluck('part1', 'part1');
@ -724,33 +733,11 @@ export default class DatasetController {
const currentYear = currentDate.getFullYear();
const years = Array.from({ length: currentYear - 1990 + 1 }, (_, index) => 1990 + index);
// const licenses = await License.query()
// .select('id', 'name_long', 'link_licence')
// .orderBy('sort_order')
// .fetch();
const licenses = await License.query().select('id', 'name_long', 'link_licence').orderBy('sort_order');
const licenses = await License.query().select('id', 'name_long').where('active', 'true').pluck('name_long', 'id');
// const userHasRoles = user.roles;
// const datasetHasLicenses = await dataset.related('licenses').query().pluck('id');
// const checkeds = dataset.licenses.first().id;
const keywordTypes = {
uncontrolled: 'uncontrolled',
swd: 'swd',
};
const referenceTypes = ['DOI', 'Handle', 'ISBN', 'ISSN', 'URL', 'URN'];
const relationTypes = [
'IsSupplementTo',
'IsSupplementedBy',
'IsContinuedBy',
'Continues',
'IsNewVersionOf',
'IsPartOf',
'HasPart',
'Compiles',
'IsVariantFormOf',
];
const doctypes = {
analysisdata: { label: 'Analysis', value: 'analysisdata' },
measurementdata: { label: 'Measurements', value: 'measurementdata' },
@ -771,16 +758,164 @@ export default class DatasetController {
// messages,
projects,
licenses,
// datasetHasLicenses: Object.keys(datasetHasLicenses).map((key) => datasetHasLicenses[key]), //convert object to array with license ids
// checkeds,
years,
// languages,
keywordTypes,
referenceTypes,
relationTypes,
subjectTypes: SubjectTypes,
referenceIdentifierTypes: Object.entries(ReferenceIdentifierTypes).map(([key, value]) => ({ value: key, label: value })),
relationTypes: Object.entries(RelationTypes).map(([key, value]) => ({ value: key, label: value })),
doctypes,
});
}
public async update({ request, response, session }: HttpContextContract) {
try {
// await request.validate({ schema: newDatasetSchema, messages: this.messages });
await request.validate(UpdateDatasetValidator);
} catch (error) {
// - Handle errors
// return response.badRequest(error.messages);
throw error;
// return response.badRequest(error.messages);
}
const id = request.param('id');
let trx: TransactionClientContract | null = null;
try {
trx = await Database.transaction();
// const user = (await User.find(auth.user?.id)) as User;
// await this.createDatasetAndAssociations(user, request, trx);
const dataset = await Dataset.findOrFail(id);
// save the licenses
const licenses: number[] = request.input('licenses', []);
// await dataset.useTransaction(trx).related('licenses').sync(licenses);
await dataset.useTransaction(trx).related('licenses').sync(licenses);
// save authors and contributors
await dataset.useTransaction(trx).related('authors').sync([]);
await dataset.useTransaction(trx).related('contributors').sync([]);
await this.savePersons(dataset, request.input('authors', []), 'author', trx);
await this.savePersons(dataset, request.input('contributors', []), 'contributor', trx);
//save the titles:
const titles = request.input('titles', []);
// const savedTitles:Array<Title> = [];
for (const titleData of titles) {
if (titleData.id) {
const title = await Title.findOrFail(titleData.id);
title.value = titleData.value;
title.language = titleData.language;
title.type = titleData.type;
if (title.$isDirty) {
await title.useTransaction(trx).save();
// await dataset.useTransaction(trx).related('titles').save(title);
// savedTitles.push(title);
}
} else {
const title = new Title();
title.fill(titleData);
// savedTitles.push(title);
await dataset.useTransaction(trx).related('titles').save(title);
}
}
// save the abstracts
const descriptions = request.input('descriptions', []);
// const savedTitles:Array<Title> = [];
for (const descriptionData of descriptions) {
if (descriptionData.id) {
const description = await Description.findOrFail(descriptionData.id);
description.value = descriptionData.value;
description.language = descriptionData.language;
description.type = descriptionData.type;
if (description.$isDirty) {
await description.useTransaction(trx).save();
// await dataset.useTransaction(trx).related('titles').save(title);
// savedTitles.push(title);
}
} else {
const description = new Description();
description.fill(descriptionData);
// savedTitles.push(title);
await dataset.useTransaction(trx).related('descriptions').save(description);
}
}
// Save already existing files
const files = request.input('fileInputs', []);
for (const fileData of files) {
if (fileData.id) {
const file = await File.findOrFail(fileData.id);
file.label = fileData.label;
file.sortOrder = fileData.sort_order;
if (file.$isDirty) {
await file.useTransaction(trx).save();
}
}
}
// handle new uploaded files:
const uploadedFiles = request.files('files');
if (Array.isArray(uploadedFiles) && uploadedFiles.length > 0) {
// let index = 1;
// for (const key in files) {
// const formFile = files[key]
// for (const fileData of files) {
for (const [index, fileData] of uploadedFiles.entries()) {
// const uploads = request.file('uploads');
// const fileIndex = formFile.file;
// const file = uploads[fileIndex];
const fileName = `file-${cuid()}.${fileData.extname}`;
const mimeType = fileData.headers['content-type'] || 'application/octet-stream'; // Fallback to a default MIME type
const datasetFolder = `files/${dataset.id}`;
await fileData.moveToDisk(
datasetFolder,
{
name: fileName,
overwrite: true, // overwrite in case of conflict
},
'local',
);
// save file metadata into db
const newFile = new File();
newFile.pathName = `${datasetFolder}/${fileName}`;
newFile.fileSize = fileData.size;
newFile.mimeType = mimeType;
newFile.label = fileData.clientName;
newFile.sortOrder = index;
newFile.visibleInFrontdoor = true;
newFile.visibleInOai = true;
// let path = coverImage.filePath;
await dataset.useTransaction(trx).related('files').save(newFile);
await newFile.createHashValues();
}
}
const input = request.only(['project_id', 'embargo_date', 'language', 'type', 'creating_corporation']);
// dataset.type = request.input('type');
dataset.merge(input);
// let test: boolean = dataset.$isDirty;
await dataset.useTransaction(trx).save();
await trx.commit();
console.log('Dataset and related models created successfully');
} catch (error) {
if (trx !== null) {
await trx.rollback();
}
console.error('Failed to create dataset and related models:', error);
// throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` });
throw error;
}
session.flash('message', 'Dataset has been created successfully');
// return response.redirect().toRoute('user.index');
return response.redirect().back();
}
public async delete({ request, inertia, response, session }) {
const id = request.param('id');
try {