diff --git a/app/Controllers/Http/Submitter/DatasetController.ts b/app/Controllers/Http/Submitter/DatasetController.ts
index 42ae5c3..f11aaf6 100644
--- a/app/Controllers/Http/Submitter/DatasetController.ts
+++ b/app/Controllers/Http/Submitter/DatasetController.ts
@@ -45,11 +45,7 @@ import { pipeline } from 'node:stream/promises';
import { createWriteStream } from 'node:fs';
import type { Multipart } from '@adonisjs/bodyparser';
import * as fs from 'fs';
-import { join, isAbsolute } from 'node:path';
-import type { BodyParserConfig } from '#models/types';
-import { createId } from '@paralleldrive/cuid2';
-import { tmpdir } from 'node:os';
-import config from '@adonisjs/core/services/config';
+import { parseBytesSize, getConfigFor, getTmpPath, formatBytes } from '#app/utils/utility-functions';
interface Dictionary {
[index: string]: string;
@@ -60,7 +56,7 @@ export default class DatasetController {
/**
* Bodyparser config
*/
- config: BodyParserConfig = config.get('bodyparser');
+ // config: BodyParserConfig = config.get('bodyparser');
public async index({ auth, request, inertia }: HttpContext) {
const user = (await User.find(auth.user?.id)) as User;
@@ -272,6 +268,7 @@ export default class DatasetController {
}
return response.redirect().back();
}
+
public async thirdStep({ request, response }: HttpContext) {
const newDatasetSchema = vine.object({
// first step
@@ -297,8 +294,8 @@ export default class DatasetController {
.translatedLanguage({ mainLanguageField: 'language', typeField: 'type' }),
}),
)
- // .minLength(2)
- .arrayContainsTypes({ typeA: 'main', typeB: 'translated' }),
+ // .minLength(2)
+ .arrayContainsTypes({ typeA: 'main', typeB: 'translated' }),
descriptions: vine
.array(
vine.object({
@@ -420,60 +417,23 @@ export default class DatasetController {
return response.redirect().back();
}
- /**
- * Returns the tmp path for storing the files temporarly
- */
- private getTmpPath(config: BodyParserConfig['multipart']): string {
- if (typeof config.tmpFileName === 'function') {
- const tmpPath = config.tmpFileName();
- return isAbsolute(tmpPath) ? tmpPath : join(tmpdir(), tmpPath);
- }
-
- return join(tmpdir(), createId());
- }
- /**
- * Returns config for a given type
- */
- private getConfigFor(type: K): BodyParserConfig[K] {
- const config = this.config[type];
- return config;
- }
-
- private parseBytesSize(size: string): number {
- const units = {
- kb: 1024,
- mb: 1024 * 1024,
- gb: 1024 * 1024 * 1024,
- tb: 1024 * 1024 * 1024 * 1024,
- };
-
- const match = size.match(/^(\d+)(kb|mb|gb|tb)$/i); // Regex to match size format
-
- if (!match) {
- throw new Error('Invalid size format');
- }
-
- const [, value, unit] = match;
- return parseInt(value) * units[unit.toLowerCase()];
- }
-
public async store({ auth, request, response, session }: HttpContext) {
// At the top of the store() method, declare an array to hold temporary file paths
const uploadedTmpFiles: string[] = [];
// Aggregated limit example (adjust as needed)
- const multipartConfig = this.getConfigFor('multipart');
- const aggregatedLimit = multipartConfig.limit ? this.parseBytesSize(multipartConfig.limit) : 100 * 1024 * 1024;
+ const multipartConfig = getConfigFor('multipart');
+ const aggregatedLimit = multipartConfig.limit ? parseBytesSize(multipartConfig.limit) : 100 * 1024 * 1024;
// const aggregatedLimit = 200 * 1024 * 1024;
let totalUploadedSize = 0;
- // Helper function to format bytes as human-readable text
- function formatBytes(bytes: number): string {
- if (bytes === 0) return '0 Bytes';
- const k = 1024;
- const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
- const i = Math.floor(Math.log(bytes) / Math.log(k));
- return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
- }
+ // // Helper function to format bytes as human-readable text
+ // function formatBytes(bytes: number): string {
+ // if (bytes === 0) return '0 Bytes';
+ // const k = 1024;
+ // const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
+ // const i = Math.floor(Math.log(bytes) / Math.log(k));
+ // return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
+ // }
// const enabledExtensions = await this.getEnabledExtensions();
const multipart: Multipart = request.multipart;
@@ -529,7 +489,7 @@ export default class DatasetController {
// part.file.sortOrder = part.file.sortOrder;
- const tmpPath = this.getTmpPath(multipartConfig);
+ const tmpPath = getTmpPath(multipartConfig);
(part.file as any).tmpPath = tmpPath;
const writeStream = createWriteStream(tmpPath);
@@ -1054,20 +1014,82 @@ export default class DatasetController {
}
public async update({ request, response, session }: HttpContext) {
- try {
- // await request.validate(UpdateDatasetValidator);
- await request.validateUsing(updateDatasetValidator);
- } catch (error) {
- // - Handle errors
- // return response.badRequest(error.messages);
- throw error;
- // return response.badRequest(error.messages);
- }
- // await request.validate(UpdateDatasetValidator);
- const id = request.param('id');
+ // Get the dataset id from the route parameter
+ const datasetId = request.param('id');
+ // Retrieve the dataset and load its existing files
+ const dataset = await Dataset.findOrFail(datasetId);
+ await dataset.load('files');
+ // Accumulate the size of the already related files
+ const preExistingFileSize = dataset.files.reduce((acc, file) => acc + file.fileSize, 0);
+ const uploadedTmpFiles: string[] = [];
+ // Only process multipart if the request has a multipart content type
+ const contentType = request.request.headers['content-type'] || '';
+ if (contentType.includes('multipart/form-data')) {
+ const multipart: Multipart = request.multipart;
+ // Aggregated limit example (adjust as needed)
+ const multipartConfig = getConfigFor('multipart');
+ const aggregatedLimit = multipartConfig.limit ? parseBytesSize(multipartConfig.limit) : 100 * 1024 * 1024;
+ // Initialize totalUploadedSize with the size of existing files
+ let totalUploadedSize = preExistingFileSize;
+
+ multipart.onFile('files', { deferValidations: true }, async (part) => {
+ let fileUploadedSize = 0;
+
+ part.on('data', (chunk) => {
+ fileUploadedSize += chunk.length;
+ });
+
+ part.on('end', () => {
+ totalUploadedSize += fileUploadedSize;
+ part.file.size = fileUploadedSize;
+ if (part.file.tmpPath) {
+ uploadedTmpFiles.push(part.file.tmpPath);
+ }
+ if (totalUploadedSize > aggregatedLimit) {
+ uploadedTmpFiles.forEach((tmpPath) => {
+ try {
+ fs.unlinkSync(tmpPath);
+ } catch (cleanupError) {
+ console.error('Error cleaning up temporary file:', cleanupError);
+ }
+ });
+ const error = new errors.E_VALIDATION_ERROR({
+ 'upload error': `Aggregated upload limit of ${formatBytes(aggregatedLimit)} exceeded. The total size of files being uploaded would exceed the limit.`,
+ });
+ request.multipart.abort(error);
+ }
+ });
+
+ part.on('error', (error) => {
+ request.multipart.abort(error);
+ });
+
+ try {
+ const fileNameWithoutParams = part.file.clientName.split('?')[0];
+ const ext = path.extname(fileNameWithoutParams).replace('.', '');
+ part.file.extname = ext;
+ const tmpPath = getTmpPath(multipartConfig);
+ (part.file as any).tmpPath = tmpPath;
+ const writeStream = createWriteStream(tmpPath);
+ await pipeline(part, writeStream);
+ } catch (error) {
+ request.multipart.abort(new errors.E_VALIDATION_ERROR({ 'upload error': error.message }));
+ }
+ });
+
+ try {
+ await multipart.process();
+ } catch (error) {
+ session.flash('errors', error.messages);
+ return response.redirect().back();
+ }
+ }
+
+ const id = request.param('id');
let trx: TransactionClientContract | null = null;
try {
+ await request.validateUsing(updateDatasetValidator);
trx = await db.transaction();
// const user = (await User.find(auth.user?.id)) as User;
// await this.createDatasetAndAssociations(user, request, trx);
@@ -1175,9 +1197,9 @@ export default class DatasetController {
// handle new uploaded files:
const uploadedFiles: MultipartFile[] = request.files('files');
if (Array.isArray(uploadedFiles) && uploadedFiles.length > 0) {
- for (const [index, fileData] of uploadedFiles.entries()) {
+ for (const [index, file] of uploadedFiles.entries()) {
try {
- await this.scanFileForViruses(fileData.tmpPath); //, 'gitea.lan', 3310);
+ await this.scanFileForViruses(file.tmpPath); //, 'gitea.lan', 3310);
// await this.scanFileForViruses("/tmp/testfile.txt");
} catch (error) {
// If the file is infected or there's an error scanning the file, throw a validation exception
@@ -1185,29 +1207,29 @@ export default class DatasetController {
}
// move to disk:
- const fileName = `file-${cuid()}.${fileData.extname}`; //'file-ls0jyb8xbzqtrclufu2z2e0c.pdf'
+ const fileName = this.generateFilename(file.extname as string);
const datasetFolder = `files/${dataset.id}`; // 'files/307'
const datasetFullPath = path.join(`${datasetFolder}`, fileName);
- // await fileData.moveToDisk(datasetFolder, { name: fileName, overwrite: true }, 'local');
- // await fileData.move(drive.makePath(datasetFolder), {
+ // await file.moveToDisk(datasetFolder, { name: fileName, overwrite: true }, 'local');
+ // await file.move(drive.makePath(datasetFolder), {
// name: fileName,
// overwrite: true, // overwrite in case of conflict
// });
- await fileData.moveToDisk(datasetFullPath, 'local', {
+ await file.moveToDisk(datasetFullPath, 'local', {
name: fileName,
overwrite: true, // overwrite in case of conflict
disk: 'local',
});
//save to db:
- const { clientFileName, sortOrder } = this.extractVariableNameAndSortOrder(fileData.clientName);
- const mimeType = fileData.headers['content-type'] || 'application/octet-stream'; // Fallback to a default MIME type
+ const { clientFileName, sortOrder } = this.extractVariableNameAndSortOrder(file.clientName);
+ const mimeType = file.headers['content-type'] || 'application/octet-stream'; // Fallback to a default MIME type
const newFile = await dataset
.useTransaction(trx)
.related('files')
.create({
pathName: `${datasetFolder}/${fileName}`,
- fileSize: fileData.size,
+ fileSize: file.size,
mimeType,
label: clientFileName,
sortOrder: sortOrder || index,
@@ -1253,10 +1275,18 @@ export default class DatasetController {
// return response.redirect().toRoute('user.index');
return response.redirect().toRoute('dataset.edit', [dataset.id]);
} catch (error) {
+ // Clean up temporary files if validation or later steps fail
+ uploadedTmpFiles.forEach((tmpPath) => {
+ try {
+ fs.unlinkSync(tmpPath);
+ } catch (cleanupError) {
+ console.error('Error cleaning up temporary file:', cleanupError);
+ }
+ });
if (trx !== null) {
await trx.rollback();
}
- console.error('Failed to create dataset and related models:', error);
+ console.error('Failed to update dataset and related models:', error);
// throw new ValidationException(true, { 'upload error': `failed to create dataset and related models. ${error}` });
throw error;
}
diff --git a/app/utils/utility-functions.ts b/app/utils/utility-functions.ts
index 6afb2a0..3e1608a 100644
--- a/app/utils/utility-functions.ts
+++ b/app/utils/utility-functions.ts
@@ -1,3 +1,9 @@
+import { join, isAbsolute } from 'node:path';
+import type { BodyParserConfig } from '#models/types';
+import { createId } from '@paralleldrive/cuid2';
+import { tmpdir } from 'node:os';
+import config from '@adonisjs/core/services/config';
+
export function sum(a: number, b: number): number {
return a + b;
}
@@ -24,3 +30,51 @@ export function preg_match(regex: RegExp, str: string) {
const result: boolean = regex.test(str);
return result;
}
+
+/**
+ * Returns the tmp path for storing the files temporarly
+ */
+export function getTmpPath(config: BodyParserConfig['multipart']): string {
+ if (typeof config.tmpFileName === 'function') {
+ const tmpPath = config.tmpFileName();
+ return isAbsolute(tmpPath) ? tmpPath : join(tmpdir(), tmpPath);
+ }
+
+ return join(tmpdir(), createId());
+}
+/**
+ * Returns config for a given type
+ */
+export function getConfigFor(type: K): BodyParserConfig[K] {
+ const bodyParserConfig: BodyParserConfig = config.get('bodyparser');
+ const configType = bodyParserConfig[type];
+ return configType;
+}
+
+export function parseBytesSize(size: string): number {
+ const units: Record = {
+ kb: 1024,
+ mb: 1024 * 1024,
+ gb: 1024 * 1024 * 1024,
+ tb: 1024 * 1024 * 1024 * 1024,
+ };
+
+ const match = size.match(/^(\d+)(kb|mb|gb|tb)$/i); // Regex to match size format
+
+ if (!match) {
+ throw new Error('Invalid size format');
+ }
+
+ const [, value, unit] = match;
+ return parseInt(value) * units[unit.toLowerCase()];
+}
+
+// Helper function to format bytes as human-readable text
+
+export function formatBytes(bytes: number): string {
+ if (bytes === 0) return '0 Bytes';
+ const k = 1024;
+ const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
+ const i = Math.floor(Math.log(bytes) / Math.log(k));
+ return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
+}
diff --git a/config/bodyparser.ts b/config/bodyparser.ts
index 7af1bba..b7c7d35 100644
--- a/config/bodyparser.ts
+++ b/config/bodyparser.ts
@@ -128,7 +128,7 @@ allowedMethods: ['POST', 'PUT', 'PATCH', 'DELETE'],
| projects/:id/file
| ```
*/
- processManually: ['/submitter/dataset/submit'],
+ processManually: ['/submitter/dataset/submit', '/submitter/dataset/:id/update'],
/*
|--------------------------------------------------------------------------
diff --git a/resources/js/Components/FileUpload.vue b/resources/js/Components/FileUpload.vue
index 3d4cbeb..c5ee2ce 100644
--- a/resources/js/Components/FileUpload.vue
+++ b/resources/js/Components/FileUpload.vue
@@ -42,7 +42,8 @@
Click to upload or drag and drop
-
+
@@ -190,7 +191,7 @@