- addes @adonisjs/redis fo saving session into redis with redis.ts contract and config
Some checks failed
CI Pipeline / japa-tests (push) Failing after 52s
Some checks failed
CI Pipeline / japa-tests (push) Failing after 52s
- npm updated - added createHashValues and dlete inside File.ts - added dataset_count property inside Subject.ts - corrected rotes.ts with correct permissions
This commit is contained in:
parent
d8bdce1369
commit
b6fdfbff41
29 changed files with 496 additions and 201 deletions
|
@ -34,6 +34,7 @@ import ClamScan from 'clamscan';
|
|||
import { ValidationException } from '@ioc:Adonis/Core/Validator';
|
||||
import Drive from '@ioc:Adonis/Core/Drive';
|
||||
import { Exception } from '@adonisjs/core/build/standalone';
|
||||
import { MultipartFileContract } from '@ioc:Adonis/Core/BodyParser';
|
||||
|
||||
export default class DatasetController {
|
||||
public async index({ auth, request, inertia }: HttpContextContract) {
|
||||
|
@ -335,8 +336,8 @@ export default class DatasetController {
|
|||
}
|
||||
|
||||
session.flash('message', 'Dataset has been created successfully');
|
||||
// return response.redirect().toRoute('user.index');
|
||||
return response.redirect().back();
|
||||
return response.redirect().toRoute('user.index');
|
||||
// return response.redirect().back();
|
||||
}
|
||||
|
||||
private async createDatasetAndAssociations(user: User, request: HttpContextContract['request'], trx: TransactionClientContract) {
|
||||
|
@ -691,7 +692,10 @@ export default class DatasetController {
|
|||
.preload('licenses')
|
||||
.preload('authors')
|
||||
.preload('contributors')
|
||||
.preload('subjects')
|
||||
// .preload('subjects')
|
||||
.preload('subjects', (builder) => {
|
||||
builder.orderBy('id', 'asc').withCount('datasets');
|
||||
})
|
||||
.preload('references')
|
||||
.preload('files');
|
||||
|
||||
|
@ -779,6 +783,7 @@ export default class DatasetController {
|
|||
throw error;
|
||||
// return response.badRequest(error.messages);
|
||||
}
|
||||
// await request.validate(UpdateDatasetValidator);
|
||||
const id = request.param('id');
|
||||
|
||||
let trx: TransactionClientContract | null = null;
|
||||
|
@ -843,6 +848,25 @@ export default class DatasetController {
|
|||
}
|
||||
}
|
||||
|
||||
// await dataset.useTransaction(trx).related('subjects').sync([]);
|
||||
const keywords = request.input('subjects');
|
||||
for (const keywordData of keywords) {
|
||||
if (keywordData.id) {
|
||||
const subject = await Subject.findOrFail(keywordData.id);
|
||||
// await dataset.useTransaction(trx).related('subjects').attach([keywordData.id]);
|
||||
subject.value = keywordData.value;
|
||||
subject.type = keywordData.type;
|
||||
subject.external_key = keywordData.external_key;
|
||||
if (subject.$isDirty) {
|
||||
await subject.save();
|
||||
}
|
||||
} else {
|
||||
const keyword = new Subject();
|
||||
keyword.fill(keywordData);
|
||||
await dataset.useTransaction(trx).related('subjects').save(keyword, false);
|
||||
}
|
||||
}
|
||||
|
||||
// Save already existing files
|
||||
const files = request.input('fileInputs', []);
|
||||
for (const fileData of files) {
|
||||
|
@ -857,43 +881,57 @@ export default class DatasetController {
|
|||
}
|
||||
|
||||
// handle new uploaded files:
|
||||
const uploadedFiles = request.files('files');
|
||||
const uploadedFiles: MultipartFileContract[] = request.files('files');
|
||||
if (Array.isArray(uploadedFiles) && uploadedFiles.length > 0) {
|
||||
// let index = 1;
|
||||
// for (const key in files) {
|
||||
// const formFile = files[key]
|
||||
// for (const fileData of files) {
|
||||
for (const [index, fileData] of uploadedFiles.entries()) {
|
||||
// const uploads = request.file('uploads');
|
||||
// const fileIndex = formFile.file;
|
||||
// const file = uploads[fileIndex];
|
||||
|
||||
const fileName = `file-${cuid()}.${fileData.extname}`;
|
||||
const mimeType = fileData.headers['content-type'] || 'application/octet-stream'; // Fallback to a default MIME type
|
||||
const datasetFolder = `files/${dataset.id}`;
|
||||
await fileData.moveToDisk(
|
||||
datasetFolder,
|
||||
{
|
||||
name: fileName,
|
||||
overwrite: true, // overwrite in case of conflict
|
||||
},
|
||||
'local',
|
||||
);
|
||||
// save file metadata into db
|
||||
const newFile = new File();
|
||||
newFile.pathName = `${datasetFolder}/${fileName}`;
|
||||
newFile.fileSize = fileData.size;
|
||||
newFile.mimeType = mimeType;
|
||||
newFile.label = fileData.clientName;
|
||||
newFile.sortOrder = index;
|
||||
newFile.visibleInFrontdoor = true;
|
||||
newFile.visibleInOai = true;
|
||||
|
||||
await fileData.moveToDisk(datasetFolder, { name: fileName, overwrite: true }, 'local');
|
||||
// let path = coverImage.filePath;
|
||||
await dataset.useTransaction(trx).related('files').save(newFile);
|
||||
await newFile.createHashValues();
|
||||
|
||||
const { clientFileName, sortOrder } = this.extractVariableNameAndSortOrder(fileData.clientName);
|
||||
const mimeType = fileData.headers['content-type'] || 'application/octet-stream'; // Fallback to a default MIME type
|
||||
// save file metadata into db
|
||||
// const newFile = new File();
|
||||
// newFile.pathName = `${datasetFolder}/${fileName}`;
|
||||
// newFile.fileSize = fileData.size;
|
||||
// newFile.mimeType = mimeType;
|
||||
// newFile.label = clientFileName;
|
||||
// newFile.sortOrder = sortOrder ? sortOrder : index;
|
||||
// newFile.visibleInFrontdoor = true;
|
||||
// newFile.visibleInOai = true;
|
||||
|
||||
const newFile = await dataset
|
||||
.useTransaction(trx)
|
||||
.related('files')
|
||||
.create({
|
||||
pathName: `${datasetFolder}/${fileName}`,
|
||||
fileSize: fileData.size,
|
||||
mimeType,
|
||||
label: clientFileName,
|
||||
sortOrder: sortOrder || index,
|
||||
visibleInFrontdoor: true,
|
||||
visibleInOai: true,
|
||||
});
|
||||
|
||||
// save many related HashValue Instances to the file:
|
||||
await newFile.createHashValues(trx);
|
||||
}
|
||||
}
|
||||
|
||||
// save collection
|
||||
// const collection: Collection | null = await Collection.query().where('id', 21).first();
|
||||
// collection && (await dataset.useTransaction(trx).related('collections').attach([collection.id]));
|
||||
|
||||
// // Save coverage
|
||||
// if (data.coverage && !this.containsOnlyNull(data.coverage)) {
|
||||
// const formCoverage = request.input('coverage');
|
||||
// const coverage = await dataset.related('coverage').updateOrCreate({ dataset_id: dataset.id }, formCoverage);
|
||||
// } else if (data.coverage && this.containsOnlyNull(data.coverage) && !dataset.coverage) {
|
||||
// await dataset.coverage().delete();
|
||||
// }
|
||||
|
||||
const input = request.only(['project_id', 'embargo_date', 'language', 'type', 'creating_corporation']);
|
||||
// dataset.type = request.input('type');
|
||||
dataset.merge(input);
|
||||
|
@ -911,11 +949,30 @@ export default class DatasetController {
|
|||
throw error;
|
||||
}
|
||||
|
||||
session.flash('message', 'Dataset has been created successfully');
|
||||
session.flash('message', 'Dataset has been updated successfully');
|
||||
// return response.redirect().toRoute('user.index');
|
||||
return response.redirect().back();
|
||||
}
|
||||
|
||||
private extractVariableNameAndSortOrder(inputString: string): { clientFileName: string; sortOrder?: number } {
|
||||
const regex = /^([^?]+)(?:\?([^=]+)=([^&]+))?/;
|
||||
const match = inputString.match(regex);
|
||||
|
||||
if (match) {
|
||||
const clientFileName = match[1];
|
||||
|
||||
const param = match[2];
|
||||
let sortOrder;
|
||||
if (param && param.toLowerCase() === 'sortorder') {
|
||||
sortOrder = parseInt(match[3], 10);
|
||||
}
|
||||
|
||||
return { clientFileName, sortOrder };
|
||||
} else {
|
||||
return { clientFileName: '', sortOrder: undefined }; // Or handle as needed for no match
|
||||
}
|
||||
}
|
||||
|
||||
public async delete({ request, inertia, response, session }) {
|
||||
const id = request.param('id');
|
||||
try {
|
||||
|
@ -923,8 +980,8 @@ export default class DatasetController {
|
|||
.preload('user', (builder) => {
|
||||
builder.select('id', 'login');
|
||||
})
|
||||
.preload('files')
|
||||
.where('id', id)
|
||||
.preload('files')
|
||||
.firstOrFail();
|
||||
const validStates = ['inprogress', 'rejected_editor'];
|
||||
if (!validStates.includes(dataset.server_state)) {
|
||||
|
@ -958,21 +1015,27 @@ export default class DatasetController {
|
|||
if (validStates.includes(dataset.server_state)) {
|
||||
if (dataset.files && dataset.files.length > 0) {
|
||||
for (const file of dataset.files) {
|
||||
if (file.pathName) {
|
||||
// delete file from filesystem
|
||||
await Drive.delete(file.pathName);
|
||||
}
|
||||
// overwriten delete method also delets file on filespace
|
||||
await file.delete();
|
||||
}
|
||||
}
|
||||
// delete dataset wirh relation from db
|
||||
await dataset.delete();
|
||||
session.flash({ message: 'You have deleted 1 dataset!' });
|
||||
return response.redirect().toRoute('dataset.list');
|
||||
} else {
|
||||
session.flash({
|
||||
warning: `You cannot delete this dataset! The status of this dataset is "${dataset.server_state}"!`,
|
||||
});
|
||||
return response.redirect().back();
|
||||
const datasetFolder = `files/${params.id}`;
|
||||
const folderExists = await Drive.exists(datasetFolder);
|
||||
if (folderExists) {
|
||||
const folderContents = await Drive.list(datasetFolder).toArray();
|
||||
if (folderContents.length === 0) {
|
||||
await Drive.delete(datasetFolder);
|
||||
}
|
||||
// delete dataset wirh relation from db
|
||||
await dataset.delete();
|
||||
session.flash({ message: 'You have deleted 1 dataset!' });
|
||||
return response.redirect().toRoute('dataset.list');
|
||||
} else {
|
||||
session.flash({
|
||||
warning: `You cannot delete this dataset! Invalid server_state: "${dataset.server_state}"!`,
|
||||
});
|
||||
return response.status(400).redirect().back();
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof ValidationException) {
|
||||
|
|
Loading…
Add table
editor.link_modal.header
Reference in a new issue