- mail_settings_controller for setting smtp settings - added view ror rjecting dataset for editor - added new model AppConfig for stroing appwide config values - better validate_chesum.ts command with process chunking - added vue3 apps 'BasicSettings' like email, profile settings - started with 2 multilingual capabilities - npm updates
This commit is contained in:
parent
010bead723
commit
b06ccae603
67 changed files with 7820 additions and 1463 deletions
|
@ -1,70 +1,120 @@
|
|||
import crypto from 'crypto';
|
||||
import fs from 'fs';
|
||||
// import Config from '@ioc:Adonis/Core/Config';
|
||||
import fs from 'fs/promises'; // Use fs/promises for async file operations
|
||||
import logger from '@adonisjs/core/services/logger';
|
||||
import { BaseCommand } from "@adonisjs/core/ace";
|
||||
import { CommandOptions } from "@adonisjs/core/types/ace";
|
||||
import { BaseCommand } from '@adonisjs/core/ace';
|
||||
import { CommandOptions } from '@adonisjs/core/types/ace';
|
||||
import dayjs from 'dayjs';
|
||||
import TethysFile from '#models/file';
|
||||
import AppConfig from '#models/appconfig';
|
||||
// import db from '@adonisjs/lucid/services/db'; // Import the DB service
|
||||
|
||||
export default class ValidateChecksum extends BaseCommand {
|
||||
/**
|
||||
* Command name is used to run the command
|
||||
* Command name used to run the command
|
||||
*/
|
||||
public static commandName = 'validate:checksum';
|
||||
|
||||
/**
|
||||
* Command description is displayed in the "help" output
|
||||
* Command description displayed in the "help" output
|
||||
*/
|
||||
public static description = '';
|
||||
static options: CommandOptions = {
|
||||
loadApp: true,
|
||||
staysAlive: false,
|
||||
};
|
||||
|
||||
public static options: CommandOptions = {
|
||||
startApp: true,
|
||||
staysAlive: false,
|
||||
};
|
||||
|
||||
private chunkSize = 100; // Set chunk size for pagination
|
||||
|
||||
public async run() {
|
||||
// this.logger.info('Hello world!')
|
||||
const { default: File } = await import('#models/file');
|
||||
// const { default: HashValue } = await (await (import ('App/Models/HashValue')));
|
||||
let page = 1; // Start with the first page
|
||||
let hasMoreFiles = true; // Flag to check if there are more files to process
|
||||
|
||||
// query all published files from database:
|
||||
const files = await File.query()
|
||||
.whereHas('dataset', (dQuery) => {
|
||||
dQuery.where('server_state', 'published');
|
||||
})
|
||||
.preload('hashvalues');
|
||||
// Loop to process files in chunks
|
||||
while (hasMoreFiles) {
|
||||
// Query a chunk of published files from the database with pagination
|
||||
const files = await TethysFile.query()
|
||||
.whereHas('dataset', (dQuery) => {
|
||||
dQuery.where('server_state', 'published'); // Only get published datasets
|
||||
})
|
||||
.orderBy('document_id', 'asc') // Order by document ID
|
||||
.preload('hashvalues') // Preload hash values
|
||||
.forPage(page, this.chunkSize); // Get files for the current page
|
||||
|
||||
// const logLevel = Config.get('app.logger.level', 'info');
|
||||
// console.log(this.logger.)
|
||||
|
||||
for (var file of files) {
|
||||
let hashValue = await file.related('hashvalues').query().pluck('value', 'type');
|
||||
|
||||
const filePath = '/storage/app/public/' + file.pathName;
|
||||
let calculatedMd5FileHash;
|
||||
try {
|
||||
calculatedMd5FileHash = await this.checksumFile(filePath, 'md5');
|
||||
} catch (exception) {
|
||||
// this.logger.error(exception.message);
|
||||
logger.error(exception.message);
|
||||
continue;
|
||||
// Check if there are no more files to process
|
||||
if (files.length === 0) {
|
||||
hasMoreFiles = false; // No more files, exit the loop
|
||||
break;
|
||||
}
|
||||
|
||||
if (hashValue['md5'] === calculatedMd5FileHash) {
|
||||
logger.info(`File id ${file.id} OK: stored md5 checksum: ${calculatedMd5FileHash}, same control md5 checksum: ${hashValue['md5']}`);
|
||||
// Process the current chunk of files
|
||||
await this.processChunk(files);
|
||||
|
||||
// Move to the next page
|
||||
page += 1; // Increment page number
|
||||
}
|
||||
|
||||
// Write the current timestamp into the database
|
||||
const timestamp = dayjs().unix(); // Get Unix timestamp
|
||||
|
||||
// Update the timestamp in the appconfigs table
|
||||
// await db.from('appconfigs')
|
||||
// .where('appid', 'backgroundjob')
|
||||
// .where('configkey', 'lastjob')
|
||||
// .update({ configvalue: timestamp });
|
||||
|
||||
await AppConfig.updateOrCreate({ appid: 'backgroundjob', configkey: 'lastjob' }, { configvalue: timestamp });
|
||||
|
||||
// Log the updated timestamp
|
||||
logger.info(`Updated last job timestamp to: ${timestamp}`);
|
||||
logger.info(`Cron job executed at: ${dayjs.unix(timestamp).format('YYYY-MM-DD HH:mm:ss')}`);
|
||||
}
|
||||
|
||||
private async processChunk(filesArray: TethysFile[]) {
|
||||
// Process all files in parallel using Promise.all
|
||||
await Promise.all(
|
||||
filesArray.map((file) => this.fetchData(file)), // Fetch data for each file
|
||||
);
|
||||
}
|
||||
|
||||
private async fetchData(file: TethysFile): Promise<void> {
|
||||
// Create a hashValues object to store hash values
|
||||
const hashValues = file.hashvalues.reduce(
|
||||
(acc, h) => {
|
||||
acc[h.type] = h.value; // Map hash type to its value
|
||||
return acc;
|
||||
},
|
||||
{} as { [key: string]: string },
|
||||
);
|
||||
|
||||
// Construct the file path
|
||||
const filePath = '/storage/app/public/' + file.pathName;
|
||||
|
||||
try {
|
||||
// Calculate the MD5 checksum of the file
|
||||
const calculatedMd5FileHash = await this.checksumFile(filePath, 'md5');
|
||||
|
||||
// Compare the calculated hash with the stored hash
|
||||
if (hashValues['md5'] === calculatedMd5FileHash) {
|
||||
logger.info(
|
||||
`File id ${file.id} OK: stored md5 checksum: ${calculatedMd5FileHash}, same control md5 checksum: ${hashValues['md5']}`,
|
||||
);
|
||||
} else {
|
||||
// Log an error if checksums do not match
|
||||
logger.error(
|
||||
`File id ${file.id}: stored md5 checksum: ${calculatedMd5FileHash}, control md5 checksum: ${hashValue['md5']}`,
|
||||
`File id ${file.id}: stored md5 checksum: ${calculatedMd5FileHash}, control md5 checksum: ${hashValues['md5']}`,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
// Log any error encountered during processing
|
||||
logger.error(`File id ${file.id} error: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async checksumFile(path: string, hashName = 'md5'): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const hash = crypto.createHash(hashName);
|
||||
const stream = fs.createReadStream(path);
|
||||
stream.on('error', (err) => reject(err));
|
||||
stream.on('data', (chunk) => hash.update(chunk));
|
||||
stream.on('end', () => resolve(hash.digest('hex')));
|
||||
});
|
||||
const hash = crypto.createHash(hashName); // Create a hash object
|
||||
const data = await fs.readFile(path); // Read file asynchronously
|
||||
hash.update(data); // Update hash with file data
|
||||
return hash.digest('hex'); // Return the hash in hexadecimal format
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
editor.link_modal.header
Reference in a new issue