forked from geolba/tethys.backend
- added npm package dotenv-webpack for using env variables on clientside
- added API File Controller for downloading files e.g. /api/download/1022 - also create has codes by submitting new dataset - added edit dataset functionalities for role submitter - added the following route for role submitter: /dataset/:id/update', 'DatasetController.update' - created extra UpdateDatasetValidator.ts for validating updated dataset - npm updates
This commit is contained in:
parent
a7142f694f
commit
d8bdce1369
23 changed files with 2181 additions and 853 deletions
54
app/Controllers/Http/Api/FileController.ts
Normal file
54
app/Controllers/Http/Api/FileController.ts
Normal file
|
@ -0,0 +1,54 @@
|
|||
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
|
||||
import File from 'App/Models/File';
|
||||
import { StatusCodes } from 'http-status-codes';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
// node ace make:controller Author
|
||||
export default class FileController {
|
||||
// @Get("download/:id")
|
||||
public async findOne({ response, params }: HttpContextContract) {
|
||||
const id = params.id;
|
||||
const file = await File.findOrFail(id);
|
||||
// const file = await File.findOne({
|
||||
// where: { id: id },
|
||||
// });
|
||||
if (file) {
|
||||
const filePath = '/storage/app/public/' + file.pathName;
|
||||
const ext = path.extname(filePath);
|
||||
const fileName = file.label + ext;
|
||||
try {
|
||||
fs.accessSync(filePath, fs.constants.R_OK); //| fs.constants.W_OK);
|
||||
// console.log("can read/write:", path);
|
||||
|
||||
response
|
||||
.header('Cache-Control', 'no-cache private')
|
||||
.header('Content-Description', 'File Transfer')
|
||||
.header('Content-Type', file.mimeType)
|
||||
.header('Content-Disposition', 'inline; filename=' + fileName)
|
||||
.header('Content-Transfer-Encoding', 'binary')
|
||||
.header('Access-Control-Allow-Origin', '*')
|
||||
.header('Access-Control-Allow-Methods', 'GET,POST');
|
||||
|
||||
response.status(StatusCodes.OK).download(filePath);
|
||||
} catch (err) {
|
||||
// console.log("no access:", path);
|
||||
response.status(StatusCodes.NOT_FOUND).send({
|
||||
message: `File with id ${id} doesn't exist on file server`,
|
||||
});
|
||||
}
|
||||
|
||||
// res.status(StatusCodes.OK).sendFile(filePath, (err) => {
|
||||
// // res.setHeader("Content-Type", "application/json");
|
||||
// // res.removeHeader("Content-Disposition");
|
||||
// res.status(StatusCodes.NOT_FOUND).send({
|
||||
// message: `File with id ${id} doesn't exist on file server`,
|
||||
// });
|
||||
// });
|
||||
} else {
|
||||
response.status(StatusCodes.NOT_FOUND).send({
|
||||
message: `Cannot find File with id=${id}.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
|
@ -15,6 +15,7 @@ import Database from '@ioc:Adonis/Lucid/Database';
|
|||
import { TransactionClientContract } from '@ioc:Adonis/Lucid/Database';
|
||||
import Subject from 'App/Models/Subject';
|
||||
import CreateDatasetValidator from 'App/Validators/CreateDatasetValidator';
|
||||
import UpdateDatasetValidator from 'App/Validators/UpdateDatasetValidator';
|
||||
import {
|
||||
TitleTypes,
|
||||
DescriptionTypes,
|
||||
|
@ -33,8 +34,6 @@ import ClamScan from 'clamscan';
|
|||
import { ValidationException } from '@ioc:Adonis/Core/Validator';
|
||||
import Drive from '@ioc:Adonis/Core/Drive';
|
||||
import { Exception } from '@adonisjs/core/build/standalone';
|
||||
// import XmlModel from 'App/Library/XmlModel';
|
||||
// import { XMLBuilder } from 'xmlbuilder2/lib/interfaces';
|
||||
|
||||
export default class DatasetController {
|
||||
public async index({ auth, request, inertia }: HttpContextContract) {
|
||||
|
@ -355,7 +354,7 @@ export default class DatasetController {
|
|||
|
||||
//store licenses:
|
||||
const licenses: number[] = request.input('licenses', []);
|
||||
dataset.useTransaction(trx).related('licenses').sync(licenses);
|
||||
await dataset.useTransaction(trx).related('licenses').sync(licenses);
|
||||
|
||||
// save authors and contributors
|
||||
await this.savePersons(dataset, request.input('authors', []), 'author', trx);
|
||||
|
@ -456,7 +455,7 @@ export default class DatasetController {
|
|||
newFile.visibleInOai = true;
|
||||
// let path = coverImage.filePath;
|
||||
await dataset.useTransaction(trx).related('files').save(newFile);
|
||||
// await newFile.createHashValues();
|
||||
await newFile.createHashValues(trx);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -682,29 +681,39 @@ export default class DatasetController {
|
|||
// throw new GeneralException(trans('exceptions.publish.release.update_error'));
|
||||
}
|
||||
|
||||
public async edit({ params, inertia }) {
|
||||
const datasetQuery = Dataset.query().where('id', params.id);
|
||||
datasetQuery.preload('titles').preload('descriptions').preload('coverage');
|
||||
const dataset = await datasetQuery.firstOrFail();
|
||||
public async edit({ request, inertia, response }) {
|
||||
const id = request.param('id');
|
||||
const datasetQuery = Dataset.query().where('id', id);
|
||||
datasetQuery
|
||||
.preload('titles', (query) => query.orderBy('id', 'asc'))
|
||||
.preload('descriptions', (query) => query.orderBy('id', 'asc'))
|
||||
.preload('coverage')
|
||||
.preload('licenses')
|
||||
.preload('authors')
|
||||
.preload('contributors')
|
||||
.preload('subjects')
|
||||
.preload('references')
|
||||
.preload('files');
|
||||
|
||||
// await dataset.loadMany([
|
||||
// 'licenses',
|
||||
// 'authors',
|
||||
// 'contributors',
|
||||
// 'titles',
|
||||
// 'abstracts',
|
||||
// 'files',
|
||||
// 'coverage',
|
||||
// 'subjects',
|
||||
// 'references',
|
||||
// ]);
|
||||
const dataset = await datasetQuery.firstOrFail();
|
||||
const validStates = ['inprogress', 'rejected_editor'];
|
||||
if (!validStates.includes(dataset.server_state)) {
|
||||
// session.flash('errors', 'Invalid server state!');
|
||||
return response
|
||||
.flash(
|
||||
'warning',
|
||||
`Invalid server state. Dataset with id ${id} cannot be edited. Datset has server state ${dataset.server_state}.`,
|
||||
)
|
||||
.redirect()
|
||||
.toRoute('dataset.list');
|
||||
}
|
||||
|
||||
const titleTypes = Object.entries(TitleTypes)
|
||||
// .filter(([value]) => value !== 'Main')
|
||||
.filter(([value]) => value !== 'Main')
|
||||
.map(([key, value]) => ({ value: key, label: value }));
|
||||
|
||||
const descriptionTypes = Object.entries(DescriptionTypes)
|
||||
// .filter(([value]) => value !== 'Abstract')
|
||||
.filter(([value]) => value !== 'Abstract')
|
||||
.map(([key, value]) => ({ value: key, label: value }));
|
||||
|
||||
const languages = await Language.query().where('active', true).pluck('part1', 'part1');
|
||||
|
@ -724,33 +733,11 @@ export default class DatasetController {
|
|||
const currentYear = currentDate.getFullYear();
|
||||
const years = Array.from({ length: currentYear - 1990 + 1 }, (_, index) => 1990 + index);
|
||||
|
||||
// const licenses = await License.query()
|
||||
// .select('id', 'name_long', 'link_licence')
|
||||
// .orderBy('sort_order')
|
||||
// .fetch();
|
||||
const licenses = await License.query().select('id', 'name_long', 'link_licence').orderBy('sort_order');
|
||||
|
||||
const licenses = await License.query().select('id', 'name_long').where('active', 'true').pluck('name_long', 'id');
|
||||
// const userHasRoles = user.roles;
|
||||
// const datasetHasLicenses = await dataset.related('licenses').query().pluck('id');
|
||||
// const checkeds = dataset.licenses.first().id;
|
||||
|
||||
const keywordTypes = {
|
||||
uncontrolled: 'uncontrolled',
|
||||
swd: 'swd',
|
||||
};
|
||||
|
||||
const referenceTypes = ['DOI', 'Handle', 'ISBN', 'ISSN', 'URL', 'URN'];
|
||||
|
||||
const relationTypes = [
|
||||
'IsSupplementTo',
|
||||
'IsSupplementedBy',
|
||||
'IsContinuedBy',
|
||||
'Continues',
|
||||
'IsNewVersionOf',
|
||||
'IsPartOf',
|
||||
'HasPart',
|
||||
'Compiles',
|
||||
'IsVariantFormOf',
|
||||
];
|
||||
|
||||
const doctypes = {
|
||||
analysisdata: { label: 'Analysis', value: 'analysisdata' },
|
||||
measurementdata: { label: 'Measurements', value: 'measurementdata' },
|
||||
|
@ -771,16 +758,164 @@ export default class DatasetController {
|
|||
// messages,
|
||||
projects,
|
||||
licenses,
|
||||
// datasetHasLicenses: Object.keys(datasetHasLicenses).map((key) => datasetHasLicenses[key]), //convert object to array with license ids
|
||||
// checkeds,
|
||||
years,
|
||||
// languages,
|
||||
keywordTypes,
|
||||
referenceTypes,
|
||||
relationTypes,
|
||||
subjectTypes: SubjectTypes,
|
||||
referenceIdentifierTypes: Object.entries(ReferenceIdentifierTypes).map(([key, value]) => ({ value: key, label: value })),
|
||||
relationTypes: Object.entries(RelationTypes).map(([key, value]) => ({ value: key, label: value })),
|
||||
doctypes,
|
||||
});
|
||||
}
|
||||
|
||||
public async update({ request, response, session }: HttpContextContract) {
|
||||
try {
|
||||
// await request.validate({ schema: newDatasetSchema, messages: this.messages });
|
||||
await request.validate(UpdateDatasetValidator);
|
||||
} catch (error) {
|
||||
// - Handle errors
|
||||
// return response.badRequest(error.messages);
|
||||
throw error;
|
||||
// return response.badRequest(error.messages);
|
||||
}
|
||||
const id = request.param('id');
|
||||
|
||||
let trx: TransactionClientContract | null = null;
|
||||
try {
|
||||
trx = await Database.transaction();
|
||||
// const user = (await User.find(auth.user?.id)) as User;
|
||||
// await this.createDatasetAndAssociations(user, request, trx);
|
||||
const dataset = await Dataset.findOrFail(id);
|
||||
|
||||
// save the licenses
|
||||
const licenses: number[] = request.input('licenses', []);
|
||||
// await dataset.useTransaction(trx).related('licenses').sync(licenses);
|
||||
await dataset.useTransaction(trx).related('licenses').sync(licenses);
|
||||
|
||||
// save authors and contributors
|
||||
await dataset.useTransaction(trx).related('authors').sync([]);
|
||||
await dataset.useTransaction(trx).related('contributors').sync([]);
|
||||
await this.savePersons(dataset, request.input('authors', []), 'author', trx);
|
||||
await this.savePersons(dataset, request.input('contributors', []), 'contributor', trx);
|
||||
|
||||
//save the titles:
|
||||
const titles = request.input('titles', []);
|
||||
// const savedTitles:Array<Title> = [];
|
||||
for (const titleData of titles) {
|
||||
if (titleData.id) {
|
||||
const title = await Title.findOrFail(titleData.id);
|
||||
title.value = titleData.value;
|
||||
title.language = titleData.language;
|
||||
title.type = titleData.type;
|
||||
if (title.$isDirty) {
|
||||
await title.useTransaction(trx).save();
|
||||
// await dataset.useTransaction(trx).related('titles').save(title);
|
||||
// savedTitles.push(title);
|
||||
}
|
||||
} else {
|
||||
const title = new Title();
|
||||
title.fill(titleData);
|
||||
// savedTitles.push(title);
|
||||
await dataset.useTransaction(trx).related('titles').save(title);
|
||||
}
|
||||
}
|
||||
|
||||
// save the abstracts
|
||||
const descriptions = request.input('descriptions', []);
|
||||
// const savedTitles:Array<Title> = [];
|
||||
for (const descriptionData of descriptions) {
|
||||
if (descriptionData.id) {
|
||||
const description = await Description.findOrFail(descriptionData.id);
|
||||
description.value = descriptionData.value;
|
||||
description.language = descriptionData.language;
|
||||
description.type = descriptionData.type;
|
||||
if (description.$isDirty) {
|
||||
await description.useTransaction(trx).save();
|
||||
// await dataset.useTransaction(trx).related('titles').save(title);
|
||||
// savedTitles.push(title);
|
||||
}
|
||||
} else {
|
||||
const description = new Description();
|
||||
description.fill(descriptionData);
|
||||
// savedTitles.push(title);
|
||||
await dataset.useTransaction(trx).related('descriptions').save(description);
|
||||
}
|
||||
}
|
||||
|
||||
// Save already existing files
|
||||
const files = request.input('fileInputs', []);
|
||||
for (const fileData of files) {
|
||||
if (fileData.id) {
|
||||
const file = await File.findOrFail(fileData.id);
|
||||
file.label = fileData.label;
|
||||
file.sortOrder = fileData.sort_order;
|
||||
if (file.$isDirty) {
|
||||
await file.useTransaction(trx).save();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// handle new uploaded files:
|
||||
const uploadedFiles = request.files('files');
|
||||
if (Array.isArray(uploadedFiles) && uploadedFiles.length > 0) {
|
||||
// let index = 1;
|
||||
// for (const key in files) {
|
||||
// const formFile = files[key]
|
||||
// for (const fileData of files) {
|
||||
for (const [index, fileData] of uploadedFiles.entries()) {
|
||||
// const uploads = request.file('uploads');
|
||||
// const fileIndex = formFile.file;
|
||||
// const file = uploads[fileIndex];
|
||||
|
||||
const fileName = `file-${cuid()}.${fileData.extname}`;
|
||||
const mimeType = fileData.headers['content-type'] || 'application/octet-stream'; // Fallback to a default MIME type
|
||||
const datasetFolder = `files/${dataset.id}`;
|
||||
await fileData.moveToDisk(
|
||||
datasetFolder,
|
||||
{
|
||||
name: fileName,
|
||||
overwrite: true, // overwrite in case of conflict
|
||||
},
|
||||
'local',
|
||||
);
|
||||
// save file metadata into db
|
||||
const newFile = new File();
|
||||
newFile.pathName = `${datasetFolder}/${fileName}`;
|
||||
newFile.fileSize = fileData.size;
|
||||
newFile.mimeType = mimeType;
|
||||
newFile.label = fileData.clientName;
|
||||
newFile.sortOrder = index;
|
||||
newFile.visibleInFrontdoor = true;
|
||||
newFile.visibleInOai = true;
|
||||
// let path = coverImage.filePath;
|
||||
await dataset.useTransaction(trx).related('files').save(newFile);
|
||||
await newFile.createHashValues();
|
||||
}
|
||||
}
|
||||
|
||||
const input = request.only(['project_id', 'embargo_date', 'language', 'type', 'creating_corporation']);
|
||||
// dataset.type = request.input('type');
|
||||
dataset.merge(input);
|
||||
// let test: boolean = dataset.$isDirty;
|
||||
await dataset.useTransaction(trx).save();
|
||||
|
||||
await trx.commit();
|
||||
console.log('Dataset and related models created successfully');
|
||||
} catch (error) {
|
||||
if (trx !== null) {
|
||||
await trx.rollback();
|
||||
}
|
||||
console.error('Failed to create dataset and related models:', error);
|
||||
// throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` });
|
||||
throw error;
|
||||
}
|
||||
|
||||
session.flash('message', 'Dataset has been created successfully');
|
||||
// return response.redirect().toRoute('user.index');
|
||||
return response.redirect().back();
|
||||
}
|
||||
|
||||
public async delete({ request, inertia, response, session }) {
|
||||
const id = request.param('id');
|
||||
try {
|
||||
|
|
|
@ -9,6 +9,11 @@ export default class Description extends BaseModel {
|
|||
public static timestamps = false;
|
||||
public static fillable: string[] = ['value', 'type', 'language'];
|
||||
|
||||
@column({
|
||||
isPrimary: true,
|
||||
})
|
||||
public id: number;
|
||||
|
||||
@column({})
|
||||
public document_id: number;
|
||||
|
||||
|
|
|
@ -1,19 +1,18 @@
|
|||
import { DateTime } from 'luxon';
|
||||
import {
|
||||
column,
|
||||
hasMany,
|
||||
HasMany,
|
||||
belongsTo,
|
||||
BelongsTo,
|
||||
// manyToMany,
|
||||
// ManyToMany,
|
||||
SnakeCaseNamingStrategy,
|
||||
} from '@ioc:Adonis/Lucid/Orm';
|
||||
import { column, hasMany, HasMany, belongsTo, BelongsTo, SnakeCaseNamingStrategy, computed } from '@ioc:Adonis/Lucid/Orm';
|
||||
import HashValue from './HashValue';
|
||||
import Dataset from './Dataset';
|
||||
import BaseModel from './BaseModel';
|
||||
// import { Buffer } from 'buffer';
|
||||
import * as fs from 'fs';
|
||||
import crypto from 'crypto';
|
||||
import { TransactionClientContract } from '@ioc:Adonis/Lucid/Database';
|
||||
|
||||
export default class File extends BaseModel {
|
||||
// private readonly _data: Uint8Array;
|
||||
// private readonly _type: string;
|
||||
// private readonly _size: number;
|
||||
|
||||
public static namingStrategy = new SnakeCaseNamingStrategy();
|
||||
public static primaryKey = 'id';
|
||||
public static table = 'document_files';
|
||||
|
@ -73,4 +72,93 @@ export default class File extends BaseModel {
|
|||
foreignKey: 'file_id',
|
||||
})
|
||||
public hashvalues: HasMany<typeof HashValue>;
|
||||
|
||||
@computed({
|
||||
serializeAs: 'filePath',
|
||||
})
|
||||
public get filePath() {
|
||||
return `/storage/app/public/${this.pathName}`;
|
||||
// const mainTitle = this.titles?.find((title) => title.type === 'Main');
|
||||
// return mainTitle ? mainTitle.value : null;
|
||||
}
|
||||
|
||||
@computed({
|
||||
serializeAs: 'size',
|
||||
})
|
||||
public get size() {
|
||||
return this.fileSize;
|
||||
}
|
||||
|
||||
@computed({
|
||||
serializeAs: 'type',
|
||||
})
|
||||
public get type() {
|
||||
return this.mimeType;
|
||||
}
|
||||
|
||||
@computed({
|
||||
serializeAs: 'name',
|
||||
})
|
||||
get name(): string {
|
||||
return this.label;
|
||||
}
|
||||
|
||||
@computed({
|
||||
serializeAs: 'lastModified',
|
||||
})
|
||||
get lastModified(): number {
|
||||
return this.updatedAt.toUnixInteger(); //.toFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
|
||||
}
|
||||
|
||||
readonly webkitRelativePath: string = '';
|
||||
|
||||
@computed({
|
||||
serializeAs: 'fileData',
|
||||
})
|
||||
public get fileData(): string {
|
||||
// return this.fileData;
|
||||
// const fileData = fs.readFileSync(path.resolve(__dirname, this.filePath));
|
||||
// const fileData = fs.readFileSync(this.filePath);
|
||||
const fileContent: Buffer = fs.readFileSync(this.filePath);
|
||||
// Create a Blob from the file content
|
||||
// const blob = new Blob([fileContent], { type: this.type }); // Adjust
|
||||
// let fileSrc = URL.createObjectURL(blob);
|
||||
// return fileSrc;
|
||||
|
||||
// return Buffer.from(fileContent);
|
||||
// get the buffer from somewhere
|
||||
// const buff = fs.readFileSync('./test.bin');
|
||||
// create a JSON string that contains the data in the property "blob"
|
||||
const json = JSON.stringify({ blob: fileContent.toString('base64') });
|
||||
return json;
|
||||
}
|
||||
|
||||
public async createHashValues(trx?: TransactionClientContract) {
|
||||
const hashtypes: string[] = ['md5', 'sha512'];
|
||||
|
||||
for (const type of hashtypes) {
|
||||
const hash = new HashValue();
|
||||
hash.type = type;
|
||||
const hashString = await this.checksumFile(this.filePath, type); // Assuming getRealHash is a method in the same model
|
||||
hash.value = hashString;
|
||||
|
||||
// https://github.com/adonisjs/core/discussions/1872#discussioncomment-132289
|
||||
const file: File = this;
|
||||
if (trx) {
|
||||
await file.useTransaction(trx).related('hashvalues').save(hash); // Save the hash value to the database
|
||||
} else {
|
||||
await file.related('hashvalues').save(hash); // Save the hash value to the database
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async checksumFile(path, hashName = 'md5'): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const hash = crypto.createHash(hashName);
|
||||
const stream = fs.createReadStream(path);
|
||||
stream.on('error', (err) => reject(err));
|
||||
stream.on('data', (chunk) => hash.update(chunk));
|
||||
stream.on('end', () => resolve(hash.digest('hex')));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ import File from './File';
|
|||
|
||||
export default class HashValue extends BaseModel {
|
||||
public static namingStrategy = new SnakeCaseNamingStrategy();
|
||||
public static primaryKey = 'file_id, type';
|
||||
// public static primaryKey = 'file_id,type';
|
||||
public static table = 'file_hashvalues';
|
||||
|
||||
// static get primaryKey () {
|
||||
|
@ -20,10 +20,10 @@ export default class HashValue extends BaseModel {
|
|||
// public id: number;
|
||||
|
||||
// Foreign key is still on the same model
|
||||
@column({})
|
||||
@column({ isPrimary: true })
|
||||
public file_id: number;
|
||||
|
||||
@column({})
|
||||
@column({ isPrimary: true })
|
||||
public type: string;
|
||||
|
||||
@column()
|
||||
|
|
|
@ -10,6 +10,11 @@ export default class Title extends BaseModel {
|
|||
public static timestamps = false;
|
||||
public static fillable: string[] = ['value', 'type', 'language'];
|
||||
|
||||
@column({
|
||||
isPrimary: true,
|
||||
})
|
||||
public id: number;
|
||||
|
||||
@column({})
|
||||
public document_id: number;
|
||||
|
||||
|
|
|
@ -136,7 +136,7 @@ export default class CreateDatasetValidator {
|
|||
'unique': '{{ field }} must be unique, and this value is already taken',
|
||||
// 'confirmed': '{{ field }} is not correct',
|
||||
'licenses.minLength': 'at least {{ options.minLength }} permission must be defined',
|
||||
'licenses.*.number': 'Define roles as valid numbers',
|
||||
'licenses.*.number': 'Define licences as valid numbers',
|
||||
'rights.equalTo': 'you must agree to continue',
|
||||
|
||||
'titles.0.value.minLength': 'Main Title must be at least {{ options.minLength }} characters long',
|
||||
|
|
179
app/Validators/UpdateDatasetValidator.ts
Normal file
179
app/Validators/UpdateDatasetValidator.ts
Normal file
|
@ -0,0 +1,179 @@
|
|||
import { schema, CustomMessages, rules } from '@ioc:Adonis/Core/Validator';
|
||||
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
|
||||
import dayjs from 'dayjs';
|
||||
import { TitleTypes, DescriptionTypes, RelationTypes, ReferenceIdentifierTypes, ContributorTypes } from 'Contracts/enums';
|
||||
|
||||
export default class UpdateDatasetValidator {
|
||||
constructor(protected ctx: HttpContextContract) {}
|
||||
|
||||
/*
|
||||
* Define schema to validate the "shape", "type", "formatting" and "integrity" of data.
|
||||
*
|
||||
* For example:
|
||||
* 1. The username must be of data type string. But then also, it should
|
||||
* not contain special characters or numbers.
|
||||
* ```
|
||||
* schema.string({}, [ rules.alpha() ])
|
||||
* ```
|
||||
*
|
||||
* 2. The email must be of data type string, formatted as a valid
|
||||
* email. But also, not used by any other user.
|
||||
* ```
|
||||
* schema.string({}, [
|
||||
* rules.email(),
|
||||
* rules.unique({ table: 'users', column: 'email' }),
|
||||
* ])
|
||||
* ```
|
||||
*/
|
||||
public schema = schema.create({
|
||||
// first step
|
||||
language: schema.string({ trim: true }, [
|
||||
rules.regex(/^[a-zA-Z0-9-_]+$/), //Must be alphanumeric with hyphens or underscores
|
||||
]),
|
||||
licenses: schema.array([rules.minLength(1)]).members(schema.number()), // define at least one license for the new dataset
|
||||
rights: schema.string([rules.equalTo('true')]),
|
||||
// second step
|
||||
type: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
|
||||
creating_corporation: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
|
||||
titles: schema.array([rules.minLength(1)]).members(
|
||||
schema.object().members({
|
||||
value: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
|
||||
type: schema.enum(Object.values(TitleTypes)),
|
||||
language: schema.string({ trim: true }, [
|
||||
rules.minLength(2),
|
||||
rules.maxLength(255),
|
||||
rules.translatedLanguage('/language', 'type'),
|
||||
]),
|
||||
}),
|
||||
),
|
||||
descriptions: schema.array([rules.minLength(1)]).members(
|
||||
schema.object().members({
|
||||
value: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
|
||||
type: schema.enum(Object.values(DescriptionTypes)),
|
||||
language: schema.string({ trim: true }, [
|
||||
rules.minLength(2),
|
||||
rules.maxLength(255),
|
||||
rules.translatedLanguage('/language', 'type'),
|
||||
]),
|
||||
}),
|
||||
),
|
||||
authors: schema.array([rules.minLength(1)]).members(schema.object().members({ email: schema.string({ trim: true }) })),
|
||||
contributors: schema.array.optional().members(
|
||||
schema.object().members({
|
||||
email: schema.string({ trim: true }),
|
||||
pivot_contributor_type: schema.enum(Object.keys(ContributorTypes)),
|
||||
}),
|
||||
),
|
||||
// third step
|
||||
project_id: schema.number.optional(),
|
||||
embargo_date: schema.date.optional({ format: 'yyyy-MM-dd' }, [rules.after(10, 'days')]),
|
||||
coverage: schema.object().members({
|
||||
x_min: schema.number(),
|
||||
x_max: schema.number(),
|
||||
y_min: schema.number(),
|
||||
y_max: schema.number(),
|
||||
elevation_absolut: schema.number.optional(),
|
||||
elevation_min: schema.number.optional([rules.requiredIfExists('elevation_max')]),
|
||||
elevation_max: schema.number.optional([rules.requiredIfExists('elevation_min')]),
|
||||
depth_absolut: schema.number.optional(),
|
||||
depth_min: schema.number.optional([rules.requiredIfExists('depth_max')]),
|
||||
depth_max: schema.number.optional([rules.requiredIfExists('depth_min')]),
|
||||
}),
|
||||
references: schema.array.optional([rules.uniqueArray('value')]).members(
|
||||
schema.object().members({
|
||||
value: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
|
||||
type: schema.enum(Object.values(ReferenceIdentifierTypes)),
|
||||
relation: schema.enum(Object.values(RelationTypes)),
|
||||
label: schema.string({ trim: true }, [rules.minLength(2), rules.maxLength(255)]),
|
||||
}),
|
||||
),
|
||||
subjects: schema.array([rules.minLength(3), rules.uniqueArray('value')]).members(
|
||||
schema.object().members({
|
||||
value: schema.string({ trim: true }, [
|
||||
rules.minLength(3),
|
||||
rules.maxLength(255),
|
||||
// rules.unique({ table: 'dataset_subjects', column: 'value' }),
|
||||
]),
|
||||
// type: schema.enum(Object.values(TitleTypes)),
|
||||
language: schema.string({ trim: true }, [rules.minLength(2), rules.maxLength(255)]),
|
||||
}),
|
||||
),
|
||||
// file: schema.file({
|
||||
// size: '100mb',
|
||||
// extnames: ['jpg', 'gif', 'png'],
|
||||
// }),
|
||||
files: schema.array.optional().members(
|
||||
schema.file({
|
||||
size: '100mb',
|
||||
extnames: ['jpg', 'gif', 'png', 'tif', 'pdf'],
|
||||
}),
|
||||
)
|
||||
|
||||
// upload: schema.object().members({
|
||||
// label: schema.string({ trim: true }, [rules.maxLength(255)]),
|
||||
|
||||
// // label: schema.string({ trim: true }, [
|
||||
// // // rules.minLength(3),
|
||||
// // // rules.maxLength(255),
|
||||
// // ]),
|
||||
// }),
|
||||
});
|
||||
|
||||
/**
|
||||
* Custom messages for validation failures. You can make use of dot notation `(.)`
|
||||
* for targeting nested fields and array expressions `(*)` for targeting all
|
||||
* children of an array. For example:
|
||||
*
|
||||
* {
|
||||
* 'profile.username.required': 'Username is required',
|
||||
* 'scores.*.number': 'Define scores as valid numbers'
|
||||
* }
|
||||
*
|
||||
*/
|
||||
public messages: CustomMessages = {
|
||||
'minLength': '{{ field }} must be at least {{ options.minLength }} characters long',
|
||||
'maxLength': '{{ field }} must be less then {{ options.maxLength }} characters long',
|
||||
'required': '{{ field }} is required',
|
||||
'unique': '{{ field }} must be unique, and this value is already taken',
|
||||
// 'confirmed': '{{ field }} is not correct',
|
||||
'licenses.minLength': 'at least {{ options.minLength }} permission must be defined',
|
||||
'licenses.*.number': 'Define licences as valid numbers',
|
||||
'rights.equalTo': 'you must agree to continue',
|
||||
|
||||
'titles.0.value.minLength': 'Main Title must be at least {{ options.minLength }} characters long',
|
||||
'titles.0.value.required': 'Main Title is required',
|
||||
'titles.*.value.required': 'Additional title is required, if defined',
|
||||
'titles.*.type.required': 'Additional title type is required',
|
||||
'titles.*.language.required': 'Additional title language is required',
|
||||
'titles.*.language.translatedLanguage': 'The language of the translated title must be different from the language of the dataset',
|
||||
|
||||
'descriptions.0.value.minLength': 'Main Abstract must be at least {{ options.minLength }} characters long',
|
||||
'descriptions.0.value.required': 'Main Abstract is required',
|
||||
'descriptions.*.value.required': 'Additional description is required, if defined',
|
||||
'descriptions.*.type.required': 'Additional description type is required',
|
||||
'descriptions.*.language.required': 'Additional description language is required',
|
||||
'descriptions.*.language.translatedLanguage':
|
||||
'The language of the translated description must be different from the language of the dataset',
|
||||
|
||||
'authors.minLength': 'at least {{ options.minLength }} author must be defined',
|
||||
'contributors.*.pivot_contributor_type.required': 'contributor type is required, if defined',
|
||||
|
||||
'after': `{{ field }} must be older than ${dayjs().add(10, 'day')}`,
|
||||
|
||||
'subjects.minLength': 'at least {{ options.minLength }} keywords must be defined',
|
||||
'subjects.uniqueArray': 'The {{ options.array }} array must have unique values based on the {{ options.field }} attribute.',
|
||||
'subjects.*.value.required': 'keyword value is required',
|
||||
'subjects.*.value.minLength': 'keyword value must be at least {{ options.minLength }} characters long',
|
||||
'subjects.*.type.required': 'keyword type is required',
|
||||
'subjects.*.language.required': 'language of keyword is required',
|
||||
|
||||
'references.*.value.required': 'Additional reference value is required, if defined',
|
||||
'references.*.type.required': 'Additional reference identifier type is required',
|
||||
'references.*.relation.required': 'Additional reference relation type is required',
|
||||
'references.*.label.required': 'Additional reference label is required',
|
||||
|
||||
'files.minLength': 'At least {{ options.minLength }} file upload is required.',
|
||||
'files.*.size': 'file size is to big',
|
||||
'files.extnames': 'file extension is not supported',
|
||||
};
|
||||
}
|
Loading…
Add table
editor.link_modal.header
Reference in a new issue