- remove VOLUME assignments from DOXKERFILE
All checks were successful
CI Pipeline / japa-tests (push) Successful in 54s
All checks were successful
CI Pipeline / japa-tests (push) Successful in 54s
- add package @opensearch-project/opensearch for manipulating opensearch index - index tethys datasets via new command IndexDatasets, callable node ace index:datasets or node ace index:datasets -p 193 - add mapping file for opensearch index in public/records.json - added solr.xslt for transforming Datset model to json for opensearch adding in opensearch - added route /editor/ dataset/:id/update (beginning of editor/DatasetController.ts - npm updates
This commit is contained in:
parent
7915f66dd6
commit
cf859ba402
21 changed files with 1357 additions and 280 deletions
175
app/Controllers/Http/Editor/DatasetsController.ts
Normal file
175
app/Controllers/Http/Editor/DatasetsController.ts
Normal file
|
@ -0,0 +1,175 @@
|
|||
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
|
||||
import { Client } from '@opensearch-project/opensearch';
|
||||
import Dataset from 'App/Models/Dataset';
|
||||
import XmlModel from 'App/Library/XmlModel';
|
||||
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces';
|
||||
import { create } from 'xmlbuilder2';
|
||||
import { readFileSync } from 'fs';
|
||||
import { transform } from 'saxon-js';
|
||||
|
||||
// Create a new instance of the client
|
||||
const client = new Client({ node: 'http://localhost:9200' }); // replace with your OpenSearch endpoint
|
||||
|
||||
export default class DatasetsController {
|
||||
private proc;
|
||||
|
||||
constructor() {
|
||||
this.proc = readFileSync('public/assets2/solr.sef.json');
|
||||
// Load the XSLT file
|
||||
// this.proc = readFileSync('public/assets2/datasetxml2oai.sef.json');
|
||||
}
|
||||
|
||||
public async index({}: HttpContextContract) {}
|
||||
|
||||
public async create({}: HttpContextContract) {}
|
||||
|
||||
public async store({}: HttpContextContract) {}
|
||||
|
||||
public async show({}: HttpContextContract) {}
|
||||
|
||||
public async edit({}: HttpContextContract) {}
|
||||
|
||||
// public async update({}: HttpContextContract) {}
|
||||
public async update({ request, response }) {
|
||||
const id = 273; //request.param('id');
|
||||
const dataset = await Dataset.query().preload('xmlCache').where('id', id).firstOrFail();
|
||||
// add xml elements
|
||||
let xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
|
||||
const datasetNode = xml.root().ele('Dataset');
|
||||
await this.createXmlRecord(dataset, datasetNode);
|
||||
// const domNode = await this.getDatasetXmlDomNode(dataset);
|
||||
// const xmlString = xml.end({ prettyPrint: true });
|
||||
|
||||
// const data = request.only(['field1', 'field2']); // get it from xslt
|
||||
|
||||
// Create an index with non-default settings.
|
||||
var index_name = 'tethys-features';
|
||||
|
||||
const xmlString = xml.end({ prettyPrint: false });
|
||||
let doc = '';
|
||||
try {
|
||||
const result = await transform({
|
||||
// stylesheetFileName: `${config.TMP_BASE_DIR}/data-quality/rules/iati.sef.json`,
|
||||
stylesheetText: this.proc,
|
||||
destination: 'serialized',
|
||||
// sourceFileName: sourceFile,
|
||||
sourceText: xmlString,
|
||||
// stylesheetParams: xsltParameter,
|
||||
// logLevel: 10,
|
||||
});
|
||||
doc = result.principalResult;
|
||||
} catch (error) {
|
||||
return response.status(500).json({
|
||||
message: 'An error occurred while creating the user',
|
||||
error: error.message,
|
||||
});
|
||||
}
|
||||
|
||||
// var settings = {
|
||||
// settings: {
|
||||
// index: {
|
||||
// number_of_shards: 4,
|
||||
// number_of_replicas: 3,
|
||||
// },
|
||||
// },
|
||||
// };
|
||||
// var test = await client.indices.create({
|
||||
// index: index_name,
|
||||
// body: settings,
|
||||
// });
|
||||
|
||||
// var document = {
|
||||
// title: 'Sample Document',
|
||||
// authors: [
|
||||
// {
|
||||
// first_name: 'John',
|
||||
// last_name: 'Doe',
|
||||
// },
|
||||
// {
|
||||
// first_name: 'Jane',
|
||||
// last_name: 'Smith',
|
||||
// },
|
||||
// ],
|
||||
// year: '2018',
|
||||
// genre: 'Crime fiction',
|
||||
// };
|
||||
|
||||
// http://localhost:9200/datastets/_doc/1
|
||||
|
||||
// var id = '1';
|
||||
|
||||
try {
|
||||
// console.log(doc);
|
||||
let document = JSON.parse(`${doc}`);
|
||||
|
||||
// https://opensearch.org/docs/2.1/opensearch/supported-field-types/geo-shape/
|
||||
// Define the new document
|
||||
// const document = {
|
||||
// title: 'Your Document Name',
|
||||
// id: dataset.publish_id,
|
||||
// doctype: 'GIS',
|
||||
// // "location" : {
|
||||
// // "type" : "point",
|
||||
// // "coordinates" : [74.00, 40.71]
|
||||
// // },
|
||||
// geo_location: {
|
||||
// type: 'linestring',
|
||||
// coordinates: [
|
||||
// [-77.03653, 38.897676],
|
||||
// [-77.009051, 38.889939],
|
||||
// ],
|
||||
// },
|
||||
// // geo_location: 'BBOX (71.0589, 74.0060, 42.3601, 40.7128)'
|
||||
// // geo_location: {
|
||||
// // type: 'envelope',
|
||||
// // coordinates: [
|
||||
// // [13.0, 53.0],
|
||||
// // [14.0, 52.0],
|
||||
// // ], // Define your BBOX coordinates
|
||||
// // },
|
||||
// };
|
||||
|
||||
// Update the document
|
||||
var test = await client.index({
|
||||
id: dataset.publish_id,
|
||||
index: index_name,
|
||||
body: document,
|
||||
refresh: true,
|
||||
});
|
||||
|
||||
// Return the result
|
||||
return response.json(test.body);
|
||||
} catch (error) {
|
||||
// Handle any errors
|
||||
console.error(error);
|
||||
return response.status(500).json({ error: 'An error occurred while updating the data.' });
|
||||
}
|
||||
}
|
||||
|
||||
public async destroy({}: HttpContextContract) {}
|
||||
|
||||
public async syncOpensearch({}: HttpContextContract) {}
|
||||
|
||||
private async createXmlRecord(dataset: Dataset, datasetNode: XMLBuilder) {
|
||||
const domNode = await this.getDatasetXmlDomNode(dataset);
|
||||
if (domNode) {
|
||||
datasetNode.import(domNode);
|
||||
}
|
||||
}
|
||||
|
||||
private async getDatasetXmlDomNode(dataset: Dataset) {
|
||||
const xmlModel = new XmlModel(dataset);
|
||||
// xmlModel.setModel(dataset);
|
||||
xmlModel.excludeEmptyFields();
|
||||
xmlModel.caching = true;
|
||||
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
|
||||
// dataset.load('xmlCache');
|
||||
if (dataset.xmlCache) {
|
||||
xmlModel.xmlCache = dataset.xmlCache;
|
||||
}
|
||||
|
||||
// return cache.getDomDocument();
|
||||
const domDocument: XMLBuilder | null = await xmlModel.getDomDocument();
|
||||
return domDocument;
|
||||
}
|
||||
}
|
|
@ -85,7 +85,7 @@ export default class XmlModel {
|
|||
this.cache = this.cache || new DocumentXmlCache();
|
||||
this.cache.document_id = dataset.id;
|
||||
this.cache.xml_version = 1; // (int)$this->strategy->getVersion();
|
||||
this.cache.server_date_modified = dataset.server_date_modified.toFormat("yyyy-MM-dd HH:mm:ss");
|
||||
this.cache.server_date_modified = dataset.server_date_modified.toFormat('yyyy-MM-dd HH:mm:ss');
|
||||
this.cache.xml_data = domDocument.end();
|
||||
await this.cache.save();
|
||||
}
|
||||
|
@ -97,7 +97,7 @@ export default class XmlModel {
|
|||
false,
|
||||
true,
|
||||
)?.node;
|
||||
if(node != undefined) {
|
||||
if (node != undefined) {
|
||||
domDocument = builder({ version: '1.0', encoding: 'UTF-8', standalone: true }, node);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,7 +13,6 @@ import { BaseModel as LucidBaseModel } from '@ioc:Adonis/Lucid/Orm';
|
|||
// }
|
||||
// }
|
||||
|
||||
|
||||
/**
|
||||
* Helper to find if value is a valid Object or
|
||||
* not
|
||||
|
@ -22,7 +21,7 @@ export function isObject(value: any): boolean {
|
|||
return value !== null && typeof value === 'object' && !Array.isArray(value);
|
||||
}
|
||||
|
||||
export default class BaseModel extends LucidBaseModel {
|
||||
export default class BaseModel extends LucidBaseModel {
|
||||
/**
|
||||
* When `fill` method is called, then we may have a situation where it
|
||||
* removed the values which exists in `original` and hence the dirty
|
||||
|
@ -117,7 +116,6 @@ export default class BaseModel extends LucidBaseModel {
|
|||
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// export class DatasetRelatedBaseModel extends LucidBaseModel {
|
||||
|
|
|
@ -49,5 +49,4 @@ export default class Collection extends BaseModel {
|
|||
foreignKey: 'role_id',
|
||||
})
|
||||
public collectionRole: BelongsTo<typeof CollectionRole>;
|
||||
|
||||
}
|
||||
|
|
|
@ -30,7 +30,6 @@ export type DatasetRelatedModel =
|
|||
| typeof DatasetIdentifier
|
||||
| typeof File;
|
||||
|
||||
|
||||
export default abstract class DatasetExtension extends LucidBaseModel {
|
||||
public abstract id;
|
||||
public externalFields: Record<string, any> = this.getExternalFields();
|
||||
|
@ -323,7 +322,7 @@ export default abstract class DatasetExtension extends LucidBaseModel {
|
|||
private convertColumnToFieldname(columnName: string): string {
|
||||
return columnName
|
||||
.split(/[-_]/)
|
||||
.map((word) => (word.charAt(0).toUpperCase() + word.slice(1)))
|
||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join('');
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
editor.link_modal.header
Reference in a new issue