hotfix-feat(dataset): implement file upload with validation and error handling
- Implemented file upload functionality for datasets using multipart requests. - Added file size and type validation using VineJS. - Added file name length validation. - Added file scan to remove infected files. - Implemented aggregated upload limit to prevent exceeding the server's capacity. - Added error handling for file upload failures, including temporary file cleanup. - Updated the `DatasetController` to handle file uploads, validation, and database transactions. - Updated the `bodyparser.ts` config to process the file upload manually. - Updated the `api.ts` routes to fetch the statistic data. - Updated the `main.ts` store to fetch the statistic data. - Updated the `Dashboard.vue` to display the submitters only for administrator role. - Updated the `CardBoxWidget.vue` to display the submitters. - Updated the `ServerError.vue` to use the LayoutGuest.vue. - Updated the `AuthController.ts` and `start/routes.ts` to handle the database connection errors. - Updated the `app/exceptions/handler.ts` to handle the database connection errors. - Updated the `package.json` to use the correct version of the `@adonisjs/bodyparser`.
This commit is contained in:
parent
a25f8bf6f7
commit
b93e46207f
15 changed files with 637 additions and 200 deletions
|
@ -5,7 +5,7 @@ import BackupCode from '#models/backup_code';
|
|||
// import InvalidCredentialException from 'App/Exceptions/InvalidCredentialException';
|
||||
import { authValidator } from '#validators/auth';
|
||||
import hash from '@adonisjs/core/services/hash';
|
||||
|
||||
import db from '@adonisjs/lucid/services/db';
|
||||
import TwoFactorAuthProvider from '#app/services/TwoFactorAuthProvider';
|
||||
// import { Authenticator } from '@adonisjs/auth';
|
||||
// import { LoginState } from 'Contracts/enums';
|
||||
|
@ -29,6 +29,10 @@ export default class AuthController {
|
|||
const { email, password } = request.only(['email', 'password']);
|
||||
|
||||
try {
|
||||
|
||||
await db.connection().rawQuery('SELECT 1')
|
||||
|
||||
|
||||
// // attempt to verify credential and login user
|
||||
// await auth.use('web').attempt(email, plainPassword);
|
||||
|
||||
|
@ -51,6 +55,9 @@ export default class AuthController {
|
|||
|
||||
await auth.use('web').login(user);
|
||||
} catch (error) {
|
||||
if (error.code === 'ECONNREFUSED') {
|
||||
throw error
|
||||
}
|
||||
// if login fails, return vague form message and redirect back
|
||||
session.flash('message', 'Your username, email, or password is incorrect');
|
||||
return response.redirect().back();
|
||||
|
|
|
@ -40,12 +40,28 @@ import path from 'path';
|
|||
import { Exception } from '@adonisjs/core/exceptions';
|
||||
import { MultipartFile } from '@adonisjs/core/types/bodyparser';
|
||||
import * as crypto from 'crypto';
|
||||
// import MimeType from '#models/mime_type';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import { createWriteStream } from 'node:fs';
|
||||
import type { Multipart } from '@adonisjs/bodyparser';
|
||||
import * as fs from 'fs';
|
||||
import { join, isAbsolute } from 'node:path';
|
||||
import type { BodyParserConfig } from '#models/types';
|
||||
import { createId } from '@paralleldrive/cuid2';
|
||||
import { tmpdir } from 'node:os';
|
||||
import config from '@adonisjs/core/services/config';
|
||||
|
||||
interface Dictionary {
|
||||
[index: string]: string;
|
||||
}
|
||||
import vine, { SimpleMessagesProvider, errors } from '@vinejs/vine';
|
||||
|
||||
export default class DatasetController {
|
||||
/**
|
||||
* Bodyparser config
|
||||
*/
|
||||
config: BodyParserConfig = config.get('bodyparser');
|
||||
|
||||
public async index({ auth, request, inertia }: HttpContext) {
|
||||
const user = (await User.find(auth.user?.id)) as User;
|
||||
const page = request.input('page', 1);
|
||||
|
@ -401,22 +417,140 @@ export default class DatasetController {
|
|||
return response.redirect().back();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the tmp path for storing the files temporarly
|
||||
*/
|
||||
private getTmpPath(config: BodyParserConfig['multipart']): string {
|
||||
if (typeof config.tmpFileName === 'function') {
|
||||
const tmpPath = config.tmpFileName();
|
||||
return isAbsolute(tmpPath) ? tmpPath : join(tmpdir(), tmpPath);
|
||||
}
|
||||
|
||||
return join(tmpdir(), createId());
|
||||
}
|
||||
/**
|
||||
* Returns config for a given type
|
||||
*/
|
||||
private getConfigFor<K extends keyof BodyParserConfig>(type: K): BodyParserConfig[K] {
|
||||
const config = this.config[type];
|
||||
return config;
|
||||
}
|
||||
|
||||
private parseBytesSize(size: string): number {
|
||||
const units = {
|
||||
kb: 1024,
|
||||
mb: 1024 * 1024,
|
||||
gb: 1024 * 1024 * 1024,
|
||||
tb: 1024 * 1024 * 1024 * 1024,
|
||||
};
|
||||
|
||||
const match = size.match(/^(\d+)(kb|mb|gb|tb)$/i); // Regex to match size format
|
||||
|
||||
if (!match) {
|
||||
throw new Error('Invalid size format');
|
||||
}
|
||||
|
||||
const [, value, unit] = match;
|
||||
return parseInt(value) * units[unit.toLowerCase()];
|
||||
}
|
||||
|
||||
public async store({ auth, request, response, session }: HttpContext) {
|
||||
// node ace make:validator CreateDataset
|
||||
// At the top of the store() method, declare an array to hold temporary file paths
|
||||
const uploadedTmpFiles: string[] = [];
|
||||
// Aggregated limit example (adjust as needed)
|
||||
const multipartConfig = this.getConfigFor('multipart');
|
||||
const aggregatedLimit = multipartConfig.limit ? this.parseBytesSize(multipartConfig.limit) : 100 * 1024 * 1024;
|
||||
// const aggregatedLimit = 200 * 1024 * 1024;
|
||||
let totalUploadedSize = 0;
|
||||
|
||||
// Helper function to format bytes as human-readable text
|
||||
function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 Bytes';
|
||||
const k = 1024;
|
||||
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
}
|
||||
// const enabledExtensions = await this.getEnabledExtensions();
|
||||
const multipart: Multipart = request.multipart;
|
||||
|
||||
multipart.onFile('files', { deferValidations: true }, async (part) => {
|
||||
// Attach an individual file size accumulator if needed
|
||||
let fileUploadedSize = 0;
|
||||
|
||||
// Simply accumulate the size in on('data') without performing the expensive check per chunk
|
||||
part.on('data', (chunk) => {
|
||||
// reporter(chunk);
|
||||
// Increase counters using the chunk length
|
||||
fileUploadedSize += chunk.length;
|
||||
});
|
||||
|
||||
// After the file is completely read, update the global counter and check aggregated limit
|
||||
part.on('end', () => {
|
||||
totalUploadedSize += fileUploadedSize;
|
||||
part.file.size = fileUploadedSize;
|
||||
// Record the temporary file path
|
||||
if (part.file.tmpPath) {
|
||||
uploadedTmpFiles.push(part.file.tmpPath);
|
||||
}
|
||||
|
||||
if (totalUploadedSize > aggregatedLimit) {
|
||||
// Clean up all temporary files if aggregate limit is exceeded
|
||||
uploadedTmpFiles.forEach((tmpPath) => {
|
||||
try {
|
||||
fs.unlinkSync(tmpPath);
|
||||
} catch (cleanupError) {
|
||||
console.error('Error cleaning up temporary file:', cleanupError);
|
||||
}
|
||||
});
|
||||
const error = new errors.E_VALIDATION_ERROR({
|
||||
'upload error': `Aggregated upload limit of ${formatBytes(aggregatedLimit)} exceeded. The total size of files being uploaded would exceed the limit.`,
|
||||
});
|
||||
request.multipart.abort(error);
|
||||
}
|
||||
});
|
||||
|
||||
part.on('error', (error) => {
|
||||
// fileUploadError = error;
|
||||
request.multipart.abort(error);
|
||||
});
|
||||
|
||||
// await pipeline(part, createWriteStream(filePath));
|
||||
// return { filePath };
|
||||
// Process file with error handling
|
||||
try {
|
||||
// Extract extension from the client file name, e.g. "Tethys 5 - Ampflwang_dataset.zip"
|
||||
const ext = path.extname(part.file.clientName).replace('.', '');
|
||||
// Attach the extracted extension to the file object for later use
|
||||
part.file.extname = ext;
|
||||
|
||||
const tmpPath = this.getTmpPath(multipartConfig);
|
||||
(part.file as any).tmpPath = tmpPath;
|
||||
|
||||
const writeStream = createWriteStream(tmpPath);
|
||||
await pipeline(part, writeStream);
|
||||
} catch (error) {
|
||||
request.multipart.abort(new errors.E_VALIDATION_ERROR({ 'upload error': error.message }));
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
// Step 2 - Validate request body against the schema
|
||||
// await request.validate({ schema: newDatasetSchema, messages: this.messages });
|
||||
// await request.validate(CreateDatasetValidator);
|
||||
await request.validateUsing(createDatasetValidator);
|
||||
// console.log({ payload });
|
||||
await multipart.process();
|
||||
// // Instead of letting an error abort the controller, check if any error occurred
|
||||
// if (fileUploadError) {
|
||||
// // Flash the error and return an inertia view that shows the error message.
|
||||
// session.flash('errors', { 'upload error': [fileUploadError.message] });
|
||||
// return response.redirect().back();
|
||||
// }
|
||||
} catch (error) {
|
||||
// Step 3 - Handle errors
|
||||
// return response.badRequest(error.messages);
|
||||
throw error;
|
||||
// This is where you'd expect to catch any errors.
|
||||
session.flash('errors', error.messages);
|
||||
return response.redirect().back();
|
||||
}
|
||||
|
||||
let trx: TransactionClientContract | null = null;
|
||||
try {
|
||||
await request.validateUsing(createDatasetValidator);
|
||||
trx = await db.transaction();
|
||||
const user = (await User.find(auth.user?.id)) as User;
|
||||
|
||||
|
@ -425,6 +559,14 @@ export default class DatasetController {
|
|||
await trx.commit();
|
||||
console.log('Dataset and related models created successfully');
|
||||
} catch (error) {
|
||||
// Clean up temporary files if validation or later steps fail
|
||||
uploadedTmpFiles.forEach((tmpPath) => {
|
||||
try {
|
||||
fs.unlinkSync(tmpPath);
|
||||
} catch (cleanupError) {
|
||||
console.error('Error cleaning up temporary file:', cleanupError);
|
||||
}
|
||||
});
|
||||
if (trx !== null) {
|
||||
await trx.rollback();
|
||||
}
|
||||
|
@ -437,8 +579,12 @@ export default class DatasetController {
|
|||
return response.redirect().toRoute('dataset.list');
|
||||
// return response.redirect().back();
|
||||
}
|
||||
|
||||
private async createDatasetAndAssociations(user: User, request: HttpContext['request'], trx: TransactionClientContract) {
|
||||
private async createDatasetAndAssociations(
|
||||
user: User,
|
||||
request: HttpContext['request'],
|
||||
trx: TransactionClientContract,
|
||||
// uploadedFiles: Array<MultipartFile>,
|
||||
) {
|
||||
// Create a new instance of the Dataset model:
|
||||
const dataset = new Dataset();
|
||||
dataset.type = request.input('type');
|
||||
|
@ -1275,7 +1421,7 @@ export default class DatasetController {
|
|||
// This should be an array of collection ids.
|
||||
const collections: number[] = request.input('collections', []);
|
||||
|
||||
// Synchronize the dataset collections using the transaction.
|
||||
// Synchronize the dataset collections using the transaction.
|
||||
await dataset.useTransaction(trx).related('collections').sync(collections);
|
||||
|
||||
// Commit the transaction.await trx.commit()
|
||||
|
|
43
app/exceptions/db_handler.ts
Normal file
43
app/exceptions/db_handler.ts
Normal file
|
@ -0,0 +1,43 @@
|
|||
// import { Exception } from '@adonisjs/core/exceptions'
|
||||
import { HttpContext, ExceptionHandler } from '@adonisjs/core/http';
|
||||
|
||||
export default class DbHandlerException extends ExceptionHandler {
|
||||
// constructor() {
|
||||
// super(Logger)
|
||||
// }
|
||||
|
||||
async handle(error: any, ctx: HttpContext) {
|
||||
// Check for AggregateError type
|
||||
if (error.type === 'AggregateError' && error.aggregateErrors) {
|
||||
const dbErrors = error.aggregateErrors.some((err: any) => err.code === 'ECONNREFUSED' && err.port === 5432);
|
||||
|
||||
if (dbErrors) {
|
||||
return ctx.response.status(503).json({
|
||||
status: 'error',
|
||||
message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
|
||||
details: {
|
||||
code: error.code,
|
||||
type: error.type,
|
||||
ports: error.aggregateErrors.map((err: any) => ({
|
||||
port: err.port,
|
||||
address: err.address,
|
||||
})),
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Handle simple ECONNREFUSED errors
|
||||
if (error.code === 'ECONNREFUSED') {
|
||||
return ctx.response.status(503).json({
|
||||
status: 'error',
|
||||
message: 'Database connection failed. Please ensure PostgreSQL is running.',
|
||||
code: error.code,
|
||||
});
|
||||
}
|
||||
|
||||
return super.handle(error, ctx);
|
||||
}
|
||||
|
||||
static status = 500;
|
||||
}
|
|
@ -46,6 +46,7 @@ export default class HttpExceptionHandler extends ExceptionHandler {
|
|||
// return view.render('./errors/server-error', { error });
|
||||
// },
|
||||
// };
|
||||
|
||||
protected statusPages: Record<StatusPageRange, StatusPageRenderer> = {
|
||||
'404': (error, { inertia }) => {
|
||||
return inertia.render('Errors/ServerError', {
|
||||
|
@ -58,9 +59,47 @@ export default class HttpExceptionHandler extends ExceptionHandler {
|
|||
return inertia.render('Errors/ServerError', {
|
||||
error: error.message,
|
||||
code: error.status,
|
||||
});
|
||||
});
|
||||
},
|
||||
// '500': (error, { inertia }) => {
|
||||
// return inertia.render('Errors/postgres_error', {
|
||||
// status: 'error',
|
||||
// message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
|
||||
// details: {
|
||||
// code: error.code,
|
||||
// type: error.status,
|
||||
// ports: error.errors.map((err: any) => ({
|
||||
// port: err.port,
|
||||
// address: err.address,
|
||||
// })),
|
||||
// },
|
||||
// });
|
||||
// },
|
||||
'500..599': (error, { inertia }) => {
|
||||
if (error.code === 'ECONNREFUSED') {
|
||||
const dbErrors = error.errors.some((err: any) => err.code === 'ECONNREFUSED' && err.port === 5432);
|
||||
|
||||
if (dbErrors) {
|
||||
return inertia.render('Errors/postgres_error', {
|
||||
status: 'error',
|
||||
message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
|
||||
details: {
|
||||
code: error.code,
|
||||
type: error.status,
|
||||
ports: error.errors.map((err: any) => ({
|
||||
port: err.port,
|
||||
address: err.address,
|
||||
})),
|
||||
},
|
||||
});
|
||||
}
|
||||
} else {
|
||||
return inertia.render('Errors/ServerError', {
|
||||
error: error.message,
|
||||
code: error.status,
|
||||
});
|
||||
}
|
||||
},
|
||||
'500..599': (error, { inertia }) => inertia.render('Errors/ServerError', { error: error.message, code: error.status }),
|
||||
};
|
||||
|
||||
// constructor() {
|
||||
|
@ -68,7 +107,7 @@ export default class HttpExceptionHandler extends ExceptionHandler {
|
|||
// }
|
||||
|
||||
public async handle(error: any, ctx: HttpContext) {
|
||||
const { response, request, session } = ctx;
|
||||
const { response, request, session, inertia } = ctx;
|
||||
|
||||
/**
|
||||
* Handle failed authentication attempt
|
||||
|
@ -82,6 +121,47 @@ export default class HttpExceptionHandler extends ExceptionHandler {
|
|||
// return response.redirect('/dashboard');
|
||||
// }
|
||||
|
||||
// Handle Axios errors
|
||||
if (error.code === 'ECONNREFUSED') {
|
||||
const dbErrors = error.errors.some((err: any) => err.code === 'ECONNREFUSED' && err.port === 5432);
|
||||
|
||||
if (dbErrors) {
|
||||
// return ctx.response.status(503).json({
|
||||
// status: 'error',
|
||||
// message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
|
||||
// details: {
|
||||
// code: error.code,
|
||||
// type: error.status,
|
||||
// ports: error.errors.map((err: any) => ({
|
||||
// port: err.port,
|
||||
// address: err.address,
|
||||
// })),
|
||||
// },
|
||||
// });
|
||||
// return inertia.render('Errors/postgres_error', {
|
||||
// status: 'error',
|
||||
// message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
|
||||
// details: {
|
||||
// code: error.code,
|
||||
// type: error.status,
|
||||
// ports: error.errors.map((err: any) => ({
|
||||
// port: err.port,
|
||||
// address: err.address,
|
||||
// })),
|
||||
// },
|
||||
// });
|
||||
}
|
||||
}
|
||||
|
||||
// Handle simple ECONNREFUSED errors
|
||||
// if (error.code === 'ECONNREFUSED') {
|
||||
// return ctx.response.status(503).json({
|
||||
// status: 'error',
|
||||
// message: 'Database connection failed. Please ensure PostgreSQL is running.',
|
||||
// code: error.code,
|
||||
// });
|
||||
// }
|
||||
|
||||
// https://github.com/inertiajs/inertia-laravel/issues/56
|
||||
// let test = response.getStatus(); //200
|
||||
// let header = request.header('X-Inertia'); // true
|
||||
|
@ -98,12 +178,21 @@ export default class HttpExceptionHandler extends ExceptionHandler {
|
|||
// ->toResponse($request)
|
||||
// ->setStatusCode($response->status());
|
||||
}
|
||||
|
||||
// Handle simple ECONNREFUSED errors
|
||||
// if (error.code === 'ECONNREFUSED') {
|
||||
// return ctx.response.status(503).json({
|
||||
// status: 'error',
|
||||
// message: 'Database connection failed. Please ensure PostgreSQL is running.',
|
||||
// code: error.code,
|
||||
// });
|
||||
// }
|
||||
// Dynamically change the error templates based on the absence of X-Inertia header
|
||||
// if (!ctx.request.header('X-Inertia')) {
|
||||
// this.statusPages = {
|
||||
// '401..403': (error, { view }) => view.render('./errors/unauthorized', { error }),
|
||||
// '404': (error, { view }) => view.render('./errors/not-found', { error }),
|
||||
// '500..599': (error, { view }) => view.render('./errors/server-error', { error }),
|
||||
// '401..403': (error, { inertia }) => inertia.render('Errors/ServerError', { error: error.message, code: error.status }),
|
||||
// '404': (error, { inertia }) => inertia.render('Errors/ServerError', { error: error.message, code: error.status }),
|
||||
// '500..599': (error, { inertia }) => inertia.render('Errors/ServerError', { error: error.message, code: error.status }),
|
||||
// };
|
||||
// }
|
||||
|
||||
|
|
57
app/models/types.ts
Normal file
57
app/models/types.ts
Normal file
|
@ -0,0 +1,57 @@
|
|||
/**
|
||||
* Qs module config
|
||||
*/
|
||||
type QueryStringConfig = {
|
||||
depth?: number
|
||||
allowPrototypes?: boolean
|
||||
plainObjects?: boolean
|
||||
parameterLimit?: number
|
||||
arrayLimit?: number
|
||||
ignoreQueryPrefix?: boolean
|
||||
delimiter?: RegExp | string
|
||||
allowDots?: boolean
|
||||
charset?: 'utf-8' | 'iso-8859-1' | undefined
|
||||
charsetSentinel?: boolean
|
||||
interpretNumericEntities?: boolean
|
||||
parseArrays?: boolean
|
||||
comma?: boolean
|
||||
}
|
||||
/**
|
||||
* Base config used by all types
|
||||
*/
|
||||
type BodyParserBaseConfig = {
|
||||
encoding: string
|
||||
limit: string | number
|
||||
types: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Body parser config for parsing JSON requests
|
||||
*/
|
||||
export type BodyParserJSONConfig = BodyParserBaseConfig & {
|
||||
strict: boolean
|
||||
convertEmptyStringsToNull: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Parser config for parsing form data
|
||||
*/
|
||||
export type BodyParserFormConfig = BodyParserBaseConfig & {
|
||||
queryString: QueryStringConfig
|
||||
convertEmptyStringsToNull: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Parser config for parsing raw body (untouched)
|
||||
*/
|
||||
export type BodyParserRawConfig = BodyParserBaseConfig
|
||||
/**
|
||||
* Body parser config for all supported form types
|
||||
*/
|
||||
export type BodyParserConfig = {
|
||||
allowedMethods: string[]
|
||||
json: BodyParserJSONConfig
|
||||
form: BodyParserFormConfig
|
||||
raw: BodyParserRawConfig
|
||||
multipart: BodyParserMultipartConfig
|
||||
}
|
Loading…
Add table
editor.link_modal.header
Reference in a new issue