- small adaptions for AsideMenuItem.vue, AsideMenuLayer.vue
All checks were successful
CI Pipeline / japa-tests (push) Successful in 50s
All checks were successful
CI Pipeline / japa-tests (push) Successful in 50s
- new routes editor.dataset.list and editor.dataset.update - fir functionalities for editor role, suche as listing and receiving released datasets - npm updates
This commit is contained in:
parent
c1e056b9fc
commit
6fef581dd0
11 changed files with 550 additions and 84 deletions
256
app/Controllers/Http/Editor/DatasetController.ts
Normal file
256
app/Controllers/Http/Editor/DatasetController.ts
Normal file
|
@ -0,0 +1,256 @@
|
|||
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
|
||||
import { Client } from '@opensearch-project/opensearch';
|
||||
import User from 'App/Models/User';
|
||||
import Dataset from 'App/Models/Dataset';
|
||||
import XmlModel from 'App/Library/XmlModel';
|
||||
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces';
|
||||
import { create } from 'xmlbuilder2';
|
||||
import { readFileSync } from 'fs';
|
||||
import { transform } from 'saxon-js';
|
||||
import type { ModelQueryBuilderContract } from '@ioc:Adonis/Lucid/Orm';
|
||||
|
||||
// Create a new instance of the client
|
||||
const client = new Client({ node: 'http://localhost:9200' }); // replace with your OpenSearch endpoint
|
||||
|
||||
export default class DatasetsController {
|
||||
private proc;
|
||||
|
||||
constructor() {
|
||||
this.proc = readFileSync('public/assets2/solr.sef.json');
|
||||
// Load the XSLT file
|
||||
// this.proc = readFileSync('public/assets2/datasetxml2oai.sef.json');
|
||||
}
|
||||
|
||||
// public async index({}: HttpContextContract) {}
|
||||
public async index({ auth, request, inertia }: HttpContextContract) {
|
||||
const user = (await User.find(auth.user?.id)) as User;
|
||||
const page = request.input('page', 1);
|
||||
let datasets: ModelQueryBuilderContract<typeof Dataset, Dataset> = Dataset.query();
|
||||
|
||||
// if (request.input('search')) {
|
||||
// // users = users.whereRaw('name like %?%', [request.input('search')])
|
||||
// const searchTerm = request.input('search');
|
||||
// datasets.where('name', 'ilike', `%${searchTerm}%`);
|
||||
// }
|
||||
|
||||
if (request.input('sort')) {
|
||||
type SortOrder = 'asc' | 'desc' | undefined;
|
||||
let attribute = request.input('sort');
|
||||
let sortOrder: SortOrder = 'asc';
|
||||
|
||||
if (attribute.substr(0, 1) === '-') {
|
||||
sortOrder = 'desc';
|
||||
// attribute = substr(attribute, 1);
|
||||
attribute = attribute.substr(1);
|
||||
}
|
||||
datasets.orderBy(attribute, sortOrder);
|
||||
} else {
|
||||
// users.orderBy('created_at', 'desc');
|
||||
datasets.orderBy('id', 'asc');
|
||||
}
|
||||
|
||||
// const users = await User.query().orderBy('login').paginate(page, limit);
|
||||
const myDatasets = await datasets
|
||||
.where('server_state', 'released')
|
||||
.orWhere((dQuery) => {
|
||||
dQuery
|
||||
.whereIn('server_state', ['editor_accepted', 'rejected_reviewer', 'reviewed', 'published'])
|
||||
.where('editor_id', user.id);
|
||||
})
|
||||
.preload('titles')
|
||||
.preload('user', (query) => query.select('id', 'login'))
|
||||
.paginate(page, 10);
|
||||
|
||||
return inertia.render('Editor/Dataset/Index', {
|
||||
datasets: myDatasets.serialize(),
|
||||
filters: request.all(),
|
||||
can: {
|
||||
// create: await auth.user?.can(['dataset-submit']),
|
||||
receive: await auth.user?.can(['dataset-receive']),
|
||||
edit: await auth.user?.can(['dataset-editor-edit']),
|
||||
delete: await auth.user?.can(['dataset-editor-delete']),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
public async receive({ request, inertia, response }: HttpContextContract) {
|
||||
const id = request.param('id');
|
||||
const dataset = await Dataset.query()
|
||||
.where('id', id)
|
||||
.preload('titles')
|
||||
.preload('descriptions')
|
||||
.preload('user', (builder) => {
|
||||
builder.select('id', 'login');
|
||||
})
|
||||
|
||||
.firstOrFail();
|
||||
|
||||
const validStates = ['released'];
|
||||
if (!validStates.includes(dataset.server_state)) {
|
||||
// session.flash('errors', 'Invalid server state!');
|
||||
return response
|
||||
.flash(
|
||||
'warning',
|
||||
`Invalid server state. Dataset with id ${id} cannot be received. Datset has server state ${dataset.server_state}.`,
|
||||
)
|
||||
.redirect()
|
||||
.back();
|
||||
}
|
||||
|
||||
return inertia.render('Editor/Dataset/Receive', {
|
||||
dataset,
|
||||
});
|
||||
}
|
||||
|
||||
public async create({}: HttpContextContract) {}
|
||||
|
||||
public async store({}: HttpContextContract) {}
|
||||
|
||||
public async show({}: HttpContextContract) {}
|
||||
|
||||
public async edit({}: HttpContextContract) {}
|
||||
|
||||
// public async update({}: HttpContextContract) {}
|
||||
public async update({ response }) {
|
||||
const id = 273; //request.param('id');
|
||||
const dataset = await Dataset.query().preload('xmlCache').where('id', id).firstOrFail();
|
||||
// add xml elements
|
||||
let xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
|
||||
const datasetNode = xml.root().ele('Dataset');
|
||||
await this.createXmlRecord(dataset, datasetNode);
|
||||
// const domNode = await this.getDatasetXmlDomNode(dataset);
|
||||
// const xmlString = xml.end({ prettyPrint: true });
|
||||
|
||||
// const data = request.only(['field1', 'field2']); // get it from xslt
|
||||
|
||||
// Create an index with non-default settings.
|
||||
var index_name = 'tethys-features';
|
||||
|
||||
const xmlString = xml.end({ prettyPrint: false });
|
||||
let doc = '';
|
||||
try {
|
||||
const result = await transform({
|
||||
// stylesheetFileName: `${config.TMP_BASE_DIR}/data-quality/rules/iati.sef.json`,
|
||||
stylesheetText: this.proc,
|
||||
destination: 'serialized',
|
||||
// sourceFileName: sourceFile,
|
||||
sourceText: xmlString,
|
||||
// stylesheetParams: xsltParameter,
|
||||
// logLevel: 10,
|
||||
});
|
||||
doc = result.principalResult;
|
||||
} catch (error) {
|
||||
return response.status(500).json({
|
||||
message: 'An error occurred while creating the user',
|
||||
error: error.message,
|
||||
});
|
||||
}
|
||||
|
||||
// var settings = {
|
||||
// settings: {
|
||||
// index: {
|
||||
// number_of_shards: 4,
|
||||
// number_of_replicas: 3,
|
||||
// },
|
||||
// },
|
||||
// };
|
||||
// var test = await client.indices.create({
|
||||
// index: index_name,
|
||||
// body: settings,
|
||||
// });
|
||||
|
||||
// var document = {
|
||||
// title: 'Sample Document',
|
||||
// authors: [
|
||||
// {
|
||||
// first_name: 'John',
|
||||
// last_name: 'Doe',
|
||||
// },
|
||||
// {
|
||||
// first_name: 'Jane',
|
||||
// last_name: 'Smith',
|
||||
// },
|
||||
// ],
|
||||
// year: '2018',
|
||||
// genre: 'Crime fiction',
|
||||
// };
|
||||
|
||||
// http://localhost:9200/datastets/_doc/1
|
||||
|
||||
// var id = '1';
|
||||
|
||||
try {
|
||||
// console.log(doc);
|
||||
let document = JSON.parse(`${doc}`);
|
||||
|
||||
// https://opensearch.org/docs/2.1/opensearch/supported-field-types/geo-shape/
|
||||
// Define the new document
|
||||
// const document = {
|
||||
// title: 'Your Document Name',
|
||||
// id: dataset.publish_id,
|
||||
// doctype: 'GIS',
|
||||
// // "location" : {
|
||||
// // "type" : "point",
|
||||
// // "coordinates" : [74.00, 40.71]
|
||||
// // },
|
||||
// geo_location: {
|
||||
// type: 'linestring',
|
||||
// coordinates: [
|
||||
// [-77.03653, 38.897676],
|
||||
// [-77.009051, 38.889939],
|
||||
// ],
|
||||
// },
|
||||
// // geo_location: 'BBOX (71.0589, 74.0060, 42.3601, 40.7128)'
|
||||
// // geo_location: {
|
||||
// // type: 'envelope',
|
||||
// // coordinates: [
|
||||
// // [13.0, 53.0],
|
||||
// // [14.0, 52.0],
|
||||
// // ], // Define your BBOX coordinates
|
||||
// // },
|
||||
// };
|
||||
|
||||
// Update the document
|
||||
var test = await client.index({
|
||||
id: dataset.publish_id?.toString(),
|
||||
index: index_name,
|
||||
body: document,
|
||||
refresh: true,
|
||||
});
|
||||
|
||||
// Return the result
|
||||
return response.json(test.body);
|
||||
} catch (error) {
|
||||
// Handle any errors
|
||||
console.error(error);
|
||||
return response.status(500).json({ error: 'An error occurred while updating the data.' });
|
||||
}
|
||||
}
|
||||
|
||||
public async destroy({}: HttpContextContract) {}
|
||||
|
||||
public async syncOpensearch({}: HttpContextContract) {}
|
||||
|
||||
private async createXmlRecord(dataset: Dataset, datasetNode: XMLBuilder) {
|
||||
const domNode = await this.getDatasetXmlDomNode(dataset);
|
||||
if (domNode) {
|
||||
datasetNode.import(domNode);
|
||||
}
|
||||
}
|
||||
|
||||
private async getDatasetXmlDomNode(dataset: Dataset) {
|
||||
const xmlModel = new XmlModel(dataset);
|
||||
// xmlModel.setModel(dataset);
|
||||
xmlModel.excludeEmptyFields();
|
||||
xmlModel.caching = true;
|
||||
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
|
||||
// dataset.load('xmlCache');
|
||||
if (dataset.xmlCache) {
|
||||
xmlModel.xmlCache = dataset.xmlCache;
|
||||
}
|
||||
|
||||
// return cache.getDomDocument();
|
||||
const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
|
||||
return domDocument;
|
||||
}
|
||||
}
|
Loading…
Add table
editor.link_modal.header
Reference in a new issue