hotfix-feat(dataset): implement file upload with validation and error handling

- Implemented file upload functionality for datasets using multipart requests.
- Added file size and type validation using VineJS.
- Added file name length validation.
- Added file scan to remove infected files.
- Implemented aggregated upload limit to prevent exceeding the server's capacity.
- Added error handling for file upload failures, including temporary file cleanup.
- Updated the `DatasetController` to handle file uploads, validation, and database transactions.
- Updated the `bodyparser.ts` config to process the file upload manually.
- Updated the `api.ts` routes to fetch the statistic data.
- Updated the `main.ts` store to fetch the statistic data.
- Updated the `Dashboard.vue` to display the submitters only for administrator role.
- Updated the `CardBoxWidget.vue` to display the submitters.
- Updated the `ServerError.vue` to use the LayoutGuest.vue.
- Updated the `AuthController.ts` and `start/routes.ts` to handle the database connection errors.
- Updated the `app/exceptions/handler.ts` to handle the database connection errors.
- Updated the `package.json` to use the correct version of the `@adonisjs/bodyparser`.
This commit is contained in:
Kaimbacher 2025-03-26 14:19:06 +01:00
parent a25f8bf6f7
commit b93e46207f
15 changed files with 637 additions and 200 deletions

View file

@ -5,7 +5,7 @@ import BackupCode from '#models/backup_code';
// import InvalidCredentialException from 'App/Exceptions/InvalidCredentialException';
import { authValidator } from '#validators/auth';
import hash from '@adonisjs/core/services/hash';
import db from '@adonisjs/lucid/services/db';
import TwoFactorAuthProvider from '#app/services/TwoFactorAuthProvider';
// import { Authenticator } from '@adonisjs/auth';
// import { LoginState } from 'Contracts/enums';
@ -29,6 +29,10 @@ export default class AuthController {
const { email, password } = request.only(['email', 'password']);
try {
await db.connection().rawQuery('SELECT 1')
// // attempt to verify credential and login user
// await auth.use('web').attempt(email, plainPassword);
@ -51,6 +55,9 @@ export default class AuthController {
await auth.use('web').login(user);
} catch (error) {
if (error.code === 'ECONNREFUSED') {
throw error
}
// if login fails, return vague form message and redirect back
session.flash('message', 'Your username, email, or password is incorrect');
return response.redirect().back();

View file

@ -40,12 +40,28 @@ import path from 'path';
import { Exception } from '@adonisjs/core/exceptions';
import { MultipartFile } from '@adonisjs/core/types/bodyparser';
import * as crypto from 'crypto';
// import MimeType from '#models/mime_type';
import { pipeline } from 'node:stream/promises';
import { createWriteStream } from 'node:fs';
import type { Multipart } from '@adonisjs/bodyparser';
import * as fs from 'fs';
import { join, isAbsolute } from 'node:path';
import type { BodyParserConfig } from '#models/types';
import { createId } from '@paralleldrive/cuid2';
import { tmpdir } from 'node:os';
import config from '@adonisjs/core/services/config';
interface Dictionary {
[index: string]: string;
}
import vine, { SimpleMessagesProvider, errors } from '@vinejs/vine';
export default class DatasetController {
/**
* Bodyparser config
*/
config: BodyParserConfig = config.get('bodyparser');
public async index({ auth, request, inertia }: HttpContext) {
const user = (await User.find(auth.user?.id)) as User;
const page = request.input('page', 1);
@ -401,22 +417,140 @@ export default class DatasetController {
return response.redirect().back();
}
/**
* Returns the tmp path for storing the files temporarly
*/
private getTmpPath(config: BodyParserConfig['multipart']): string {
if (typeof config.tmpFileName === 'function') {
const tmpPath = config.tmpFileName();
return isAbsolute(tmpPath) ? tmpPath : join(tmpdir(), tmpPath);
}
return join(tmpdir(), createId());
}
/**
* Returns config for a given type
*/
private getConfigFor<K extends keyof BodyParserConfig>(type: K): BodyParserConfig[K] {
const config = this.config[type];
return config;
}
private parseBytesSize(size: string): number {
const units = {
kb: 1024,
mb: 1024 * 1024,
gb: 1024 * 1024 * 1024,
tb: 1024 * 1024 * 1024 * 1024,
};
const match = size.match(/^(\d+)(kb|mb|gb|tb)$/i); // Regex to match size format
if (!match) {
throw new Error('Invalid size format');
}
const [, value, unit] = match;
return parseInt(value) * units[unit.toLowerCase()];
}
public async store({ auth, request, response, session }: HttpContext) {
// node ace make:validator CreateDataset
// At the top of the store() method, declare an array to hold temporary file paths
const uploadedTmpFiles: string[] = [];
// Aggregated limit example (adjust as needed)
const multipartConfig = this.getConfigFor('multipart');
const aggregatedLimit = multipartConfig.limit ? this.parseBytesSize(multipartConfig.limit) : 100 * 1024 * 1024;
// const aggregatedLimit = 200 * 1024 * 1024;
let totalUploadedSize = 0;
// Helper function to format bytes as human-readable text
function formatBytes(bytes: number): string {
if (bytes === 0) return '0 Bytes';
const k = 1024;
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
}
// const enabledExtensions = await this.getEnabledExtensions();
const multipart: Multipart = request.multipart;
multipart.onFile('files', { deferValidations: true }, async (part) => {
// Attach an individual file size accumulator if needed
let fileUploadedSize = 0;
// Simply accumulate the size in on('data') without performing the expensive check per chunk
part.on('data', (chunk) => {
// reporter(chunk);
// Increase counters using the chunk length
fileUploadedSize += chunk.length;
});
// After the file is completely read, update the global counter and check aggregated limit
part.on('end', () => {
totalUploadedSize += fileUploadedSize;
part.file.size = fileUploadedSize;
// Record the temporary file path
if (part.file.tmpPath) {
uploadedTmpFiles.push(part.file.tmpPath);
}
if (totalUploadedSize > aggregatedLimit) {
// Clean up all temporary files if aggregate limit is exceeded
uploadedTmpFiles.forEach((tmpPath) => {
try {
fs.unlinkSync(tmpPath);
} catch (cleanupError) {
console.error('Error cleaning up temporary file:', cleanupError);
}
});
const error = new errors.E_VALIDATION_ERROR({
'upload error': `Aggregated upload limit of ${formatBytes(aggregatedLimit)} exceeded. The total size of files being uploaded would exceed the limit.`,
});
request.multipart.abort(error);
}
});
part.on('error', (error) => {
// fileUploadError = error;
request.multipart.abort(error);
});
// await pipeline(part, createWriteStream(filePath));
// return { filePath };
// Process file with error handling
try {
// Extract extension from the client file name, e.g. "Tethys 5 - Ampflwang_dataset.zip"
const ext = path.extname(part.file.clientName).replace('.', '');
// Attach the extracted extension to the file object for later use
part.file.extname = ext;
const tmpPath = this.getTmpPath(multipartConfig);
(part.file as any).tmpPath = tmpPath;
const writeStream = createWriteStream(tmpPath);
await pipeline(part, writeStream);
} catch (error) {
request.multipart.abort(new errors.E_VALIDATION_ERROR({ 'upload error': error.message }));
}
});
try {
// Step 2 - Validate request body against the schema
// await request.validate({ schema: newDatasetSchema, messages: this.messages });
// await request.validate(CreateDatasetValidator);
await request.validateUsing(createDatasetValidator);
// console.log({ payload });
await multipart.process();
// // Instead of letting an error abort the controller, check if any error occurred
// if (fileUploadError) {
// // Flash the error and return an inertia view that shows the error message.
// session.flash('errors', { 'upload error': [fileUploadError.message] });
// return response.redirect().back();
// }
} catch (error) {
// Step 3 - Handle errors
// return response.badRequest(error.messages);
throw error;
// This is where you'd expect to catch any errors.
session.flash('errors', error.messages);
return response.redirect().back();
}
let trx: TransactionClientContract | null = null;
try {
await request.validateUsing(createDatasetValidator);
trx = await db.transaction();
const user = (await User.find(auth.user?.id)) as User;
@ -425,6 +559,14 @@ export default class DatasetController {
await trx.commit();
console.log('Dataset and related models created successfully');
} catch (error) {
// Clean up temporary files if validation or later steps fail
uploadedTmpFiles.forEach((tmpPath) => {
try {
fs.unlinkSync(tmpPath);
} catch (cleanupError) {
console.error('Error cleaning up temporary file:', cleanupError);
}
});
if (trx !== null) {
await trx.rollback();
}
@ -437,8 +579,12 @@ export default class DatasetController {
return response.redirect().toRoute('dataset.list');
// return response.redirect().back();
}
private async createDatasetAndAssociations(user: User, request: HttpContext['request'], trx: TransactionClientContract) {
private async createDatasetAndAssociations(
user: User,
request: HttpContext['request'],
trx: TransactionClientContract,
// uploadedFiles: Array<MultipartFile>,
) {
// Create a new instance of the Dataset model:
const dataset = new Dataset();
dataset.type = request.input('type');
@ -1275,7 +1421,7 @@ export default class DatasetController {
// This should be an array of collection ids.
const collections: number[] = request.input('collections', []);
// Synchronize the dataset collections using the transaction.
// Synchronize the dataset collections using the transaction.
await dataset.useTransaction(trx).related('collections').sync(collections);
// Commit the transaction.await trx.commit()