- first commit

This commit is contained in:
Arno Kaimbacher 2022-11-07 13:55:02 +01:00
commit 407717d4b5
57 changed files with 5510 additions and 0 deletions

76
src/app.ts Normal file
View file

@ -0,0 +1,76 @@
import { Server } from "@overnightjs/core";
import express from "express";
import bodyParser from "body-parser";
import HomeRoutes from "./routes/home.routes.js";
import { initDB } from "./config/db.config";
import { DatasetController } from "./controllers/dataset.controller";
import { OaiController } from "./controllers/oai.controller";
import * as path from 'path';
export class App extends Server {
// private app;
constructor() {
super();
// this.app = express();
this.app.use('/prefixes', express.static(path.join(__dirname, '../prefixes')));
this.applyMiddleWares();
// init db and add routes
this.boostrap();
}
public start(): void {
const port = process.env.PORT || 3000;
this.app.set("port", port);
this.app.listen(port, () => {
console.log("Server listening on port: " + port);
});
}
private applyMiddleWares(): void {
this.app.all("*", function (req, res, next) {
res.setHeader("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Methods", "POST, PUT, OPTIONS, DELETE, GET");
res.header("Access-Control-Max-Age", "3600");
res.header(
"Access-Control-Allow-Headers",
"Content-Type, Access-Control-Allow-Headers, Authorization, X-Requested-With, x-access-token",
);
next();
});
// this.app.use(bodyParser.json());
// this.app.use(bodyParser.urlencoded({ extended: true }));
this.app.use(bodyParser.json({ limit: "100mb" }));
this.app.use(bodyParser.urlencoded({ limit: "50mb", extended: true }));
this.app.use(bodyParser.json({ type: "application/vnd.api+json" }));
}
private boostrap(): void {
// Connect to db
initDB()
.then(() => {
console.log("Connection has been established successfully.");
// addroutes
// this.app.use('/api/dataset', DatasetRoutes);
super.addControllers([new DatasetController(), new OaiController()]);
this.app.use("/api/", HomeRoutes);
this.app.get("/", (request, response) => {
// response.send('Hello World, from express');
response.sendFile("/home/administrator/api/new-book.html");
});
// Error handling middleware
// this.app.use(errorHandler); // registration of handler
// this.app.use((err: HTTPException, req: Request, res: Response, next: NextFunction) => {
// console.log('Oops !!, Error occured in req->res cycle ', err.message);
// res.status(err.status).json({ err }); // Send back Error to the Frontend
// })
})
.catch((err) => {
console.error("Unable to connect to the database:", err);
});
}
}

60
src/config/db.config.ts Normal file
View file

@ -0,0 +1,60 @@
import { Dialect } from "sequelize";
import { Sequelize } from "sequelize-typescript";
// import * as dotenv from "dotenv"; // see https://github.com/motdotla/dotenv#how-do-i-use-dotenv-with-import
// dotenv.config();
// import 'dotenv/config';
import dotenv from "dotenv";
dotenv.config();
// import { Dataset } from '../models/Dataset';
// import { Abstract } from "../models/Abstract";
// import DocumentXmlCache from '../models/DocumentXmlCache';
// const dbName = process.env.DB_NAME as string
// const dbUser = process.env.DB_USER as string
// const dbHost = process.env.DB_HOST
// const dbDriver = process.env.DB_DRIVER as Dialect
// const dbPassword = process.env.DB_PASSWORD
const dbSchema = process.env.DB_SCHEMA;
const dbName = process.env.DB_NAME as string;
const dbUser = process.env.DB_USER as string;
const dbPassword = process.env.DB_PASSWORD;
const dbHost = process.env.DB_HOST;
const dbDriver = process.env.DB_DRIVER as Dialect;
const sequelizeConnection = new Sequelize(dbName, dbUser, dbPassword, {
schema: dbSchema,
host: dbHost || "localhost",
port: 5432,
dialect: dbDriver || "postgres",
dialectOptions: {
ssl: process.env.DB_SSL == "true",
useUTC: false, //for reading from database
dateStrings: true,
typeCast: true,
},
pool: {
max: 10,
min: 0,
acquire: 30000,
idle: 10000,
},
logging: false,
timezone: "+02:00", //for writing to database
});
// sequelizeConnection.addModels([Dataset, Abstract]);
// sequelizeConnection.addModels([DocumentXmlCache]);
export const initDB = async () => {
await sequelizeConnection.authenticate();
// .then(() => {
// console.log("Connection has been established successfully.");
// })
// .catch((err) => {
// console.error("Unable to connect to the database:", err);
// });
};
export default sequelizeConnection;
// export { Dataset, Abstract };

12
src/config/oai.config.ts Normal file
View file

@ -0,0 +1,12 @@
const config = {
max: {
listidentifiers: 100,
listrecords: 50,
},
workspacePath: "workspace",
};
export default config;
// config.api = {};
// config.api.url = process.env.API_URL || http://my.api.com;
// config.axios = 'General http send information'
// module.exports = config;

View file

@ -0,0 +1,160 @@
import { Controller, Get } from "@overnightjs/core";
import dbContext from "../models/init-models.js";
import { Dataset, User, Person } from "../models/init-models.js";
import Sequelize from "sequelize";
const Op = Sequelize.Op;
import { Request, Response } from "express";
// import Logger from 'jet-logger';
import { StatusCodes } from "http-status-codes";
@Controller("api/dataset")
export class DatasetController {
@Get("")
public async findAll(req: Request, res: Response) {
// const type = req.query.type;
// var condition = type ? { type: { [Op.iLike]: `%${type}%` } } : null;,
const server_state = "published";
const condition = { server_state: { [Op.eq]: `${server_state}` } };
Dataset.findAll({
where: condition,
include: ["abstracts"],
order: ["server_date_published"],
})
.then((data) => {
res.send(data);
})
.catch((err) => {
res.status(500).send({
message: err.message || "Some error occurred while retrieving datasets.",
});
});
}
@Get(":publish_id")
public async findOne(req: Request, res: Response) {
const publish_id = req.params.publish_id;
const dataset = await dbContext.Dataset.findOne({
where: { publish_id: publish_id },
include: [
"titles",
"abstracts",
{
model: User,
as: "user",
},
{
model: Person,
through: { where: { role: "author" } },
as: "authors",
},
{
model: Person,
through: { where: { role: "contributor" } },
as: "contributors",
},
"subjects",
"coverage",
"licenses",
"project",
"files",
"identifier",
],
// order: ['server_date_published'],
});
// .then((data) => {
// if (data) {
// res.send(data);
// } else {
// res.status(404).send({
// message: `Cannot find Dataset with publish_id=${publish_id}.`,
// });
// }
// })
// .catch((err) => {
// res.status(500).send({
// message: "Error retrieving Dataset with publish_id=" + publish_id,
// });
// });
if (dataset) {
res.status(StatusCodes.OK).send(dataset);
} else {
res.status(StatusCodes.NOT_FOUND).send({
message: `Cannot find Dataset with publish_id=${publish_id}.`,
});
}
}
}
// Retrieve all Tutorials from the database.
// export async function findAll(req: Request, res: Response) {
// // const type = req.query.type;
// // var condition = type ? { type: { [Op.iLike]: `%${type}%` } } : null;,
// const server_state = "published";
// var condition = { server_state: { [Op.eq]: `${server_state}` } };
// Dataset.findAll({
// where: condition,
// include: [
// "abstracts"
// ],
// order: ['server_date_published'],
// })
// .then((data) => {
// res.send(data);
// })
// .catch((err) => {
// res.status(500).send({
// message:
// err.message || "Some error occurred while retrieving datasets.",
// });
// });
// }
// export async function findOne(req: Request, res: Response) {
// const publish_id = req.params.publish_id;
// dbContext.Dataset.findOne({
// where: { publish_id: publish_id },
// include: [
// "titles",
// "abstracts",
// {
// model: User,
// as: "user",
// },
// {
// model: Person,
// through: { where: { role: "author" } },
// as: "authors",
// },
// {
// model: Person,
// through: { where: { role: "contributor" } },
// as: "contributors",
// },
// "subjects",
// "coverage",
// "licenses",
// "project",
// "files",
// "identifier"
// ],
// // order: ['server_date_published'],
// })
// .then((data) => {
// if (data) {
// res.send(data);
// } else {
// res.status(404).send({
// message: `Cannot find Dataset with publish_id=${publish_id}.`,
// });
// }
// })
// .catch((err) => {
// res.status(500).send({
// message: "Error retrieving Dataset with publish_id=" + publish_id,
// });
// });
// }

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,77 @@
import sequelizeConnection from "../config/db.config";
import dbContext from "../models/init-models.js";
import Sequelize from "sequelize";
const Op = Sequelize.Op;
import { Person } from "../models/init-models.js";
export async function findYears(req, res) {
const serverState = "published";
// Use raw SQL queries to select all cars which belongs to the user
const datasets = await sequelizeConnection.query(
"SELECT distinct EXTRACT(YEAR FROM server_date_published) as published_date FROM gba.documents WHERE server_state = (:serverState)",
{
replacements: { serverState: serverState },
type: sequelizeConnection.QueryTypes.SELECT,
// attributes: [[sequelizeConnection.fn('DISTINCT', sequelizeConnection.col('published_date')), 'alias_name']],
},
);
// Pluck the ids of the cars
const years = datasets.map((dataset) => dataset.published_date);
// check if the cars is returned
if (years.length > 0) {
return res.status(200).json(years);
}
}
export async function findDocumentsPerYear(req, res) {
const year = req.params.year;
const from = parseInt(year);
const serverState = "published";
const conditions = {
[Op.and]: [
{
server_state: `${serverState}`,
},
{
[Op.eq]: sequelizeConnection.where(
sequelizeConnection.fn("date_part", "year", sequelizeConnection.col("server_date_published")),
from,
),
// [Op.eq]: sequelizeConnection.where(sequelizeConnection.fn('date', sequelizeConnection.col('server_date_published')), '>=', fromYear)
},
],
};
dbContext.Dataset.findAll({
attributes: [
"publish_id",
"server_date_published",
[sequelizeConnection.fn("date_part", "year", sequelizeConnection.col("server_date_published")), "pub_year"],
],
where: conditions,
include: [
"titles",
{
model: Person,
through: { where: { role: "author" } },
as: "authors",
},
],
order: ["server_date_published"],
})
.then((data) => {
res.send(data);
})
.catch((err) => {
res.status(500).send({
message: err.message || "Some error occurred while retrieving datasets.",
});
});
}
// function sendToElasticAndLogToConsole(sql, queryObject) {
// // save the `sql` query in Elasticsearch
// console.log(sql);
// // use the queryObject if needed (e.g. for debugging)
// }

View file

@ -0,0 +1,738 @@
import { Controller, Get } from "@overnightjs/core";
import Sequelize from "sequelize";
import { NextFunction, Request, Response } from "express";
import { StatusCodes } from "http-status-codes";
import { create } from "xmlbuilder2";
import { XMLBuilder } from "xmlbuilder2/lib/interfaces";
import { readFileSync } from "fs";
// @ts-ignore
import { transform } from "saxon-js";
import dayjs, { Dayjs, OpUnitType } from "dayjs";
import { Dataset, Project, License } from "../models/init-models";
import Logger from "jet-logger";
import { BadOaiModelException, OaiModelException } from "../exceptions/OaiModelException";
import PageNotFoundException from "../exceptions/PageNotFoundException";
import { OaiErrorCodes } from "../exceptions/OaiErrorCodes";
import XmlModel from "../library/XmlModel";
import Configuration from "../library/oai/OaiConfiguration";
import ResumptionToken from "../library/oai/ResumptionToken";
import TokenWorker from "../library/oai/TokenWorker";
interface XslTParameter {
[key: string]: any;
}
interface OaiParameter {
[key: string]: any;
}
interface IDictionary {
[index: string]: string;
}
function preg_match(regex: RegExp, str: string) {
const result: boolean = regex.test(str);
return result;
}
@Controller("oai")
export class OaiController {
private deliveringDocumentStates = ["published", "deleted"];
private sampleRegEx = /^[A-Za-zäüÄÜß0-9\-_.!~]+$/;
private xsltParameter: XslTParameter;
private configuration: Configuration;
private tokenWorker: TokenWorker;
/**
* Holds xml representation of document information to be processed.
*
* @var xmlbuilder.XMLDocument | null Defaults to null.
*/
private xml: XMLBuilder;
private proc;
constructor() {
this.proc = readFileSync(__dirname + "/datasetxml2oai.sef.json");
this.configuration = new Configuration();
}
@Get("")
public async index(request: Request, response: Response, next: NextFunction) {
this.xml = create(
{ version: "1.0", encoding: "UTF-8", standalone: true },
"<root></root>",
// {
// keepNullNodes: false,
// keepNullAttributes: false,
// headless: false,
// ignoreDecorators: false,
// separateArrayItems: false,
// noDoubleEncoding: false,
// noValidation: false,
// invalidCharReplacement: undefined,
// stringify: {},
// },
);
// this.proc = new XSLTProcessor();
// const stylesheet = readFileSync(__dirname + "/datasetxml2oai.sef.json");
const xsltParameter = (this.xsltParameter = {});
let earliestDateFromDb;
const firstPublishedDataset: Dataset | null = await Dataset.earliestPublicationDate();
firstPublishedDataset != null &&
(earliestDateFromDb = dayjs(firstPublishedDataset.server_date_published).format("YYYY-MM-DDThh:mm:ss[Z]"));
this.xsltParameter["earliestDatestamp"] = earliestDateFromDb;
const oaiRequest: OaiParameter = request.query;
try {
await this.handleRequest(oaiRequest, request);
} catch (error) {
// return next(error);
if (error instanceof OaiModelException) {
this.xsltParameter["oai_error_code"] = error.oaiCode;
this.xsltParameter["oai_error_message"] = error.message;
} else {
// return next(error); // passing to default express middleware error handler
this.xsltParameter["oai_error_code"] = "unknown";
this.xsltParameter["oai_error_message"] = "An internal error occured.";
}
}
// catch (error) { // manually catching
// return next(error); // passing to default express middleware error handler
// }
const xmlString = this.xml.end({ prettyPrint: true });
// let data = await transform({
// stylesheetText: stylesheet,
// // stylesheetBaseURI: "my-stylesheet.sef.json",
// sourceText: xmlString,
// destination: "serialized"
// });
// .then((data: any) => {
// response.writeHead(200, {'Content-Type': 'application/xml'});
// response.write(data.principalResult);
// response.end();
// });
let xmlOutput;
try {
const result = await transform({
// stylesheetFileName: `${config.TMP_BASE_DIR}/data-quality/rules/iati.sef.json`,
stylesheetText: this.proc,
destination: "serialized",
// sourceFileName: sourceFile,
sourceText: xmlString,
stylesheetParams: xsltParameter,
// logLevel: 10,
});
xmlOutput = result.principalResult;
} catch (error) {
// return next(error);
// if (error instanceof OaiModelException) {
// this.xsltParameter["oai_error_code"] = error.oaiCode;
// this.xsltParameter["oai_error_message"] = error.message;
// } else {
// // return next(error); // passing to default express middleware error handler
// this.xsltParameter["oai_error_code"] = "unknown";
// this.xsltParameter["oai_error_message"] = "An internal error occured.";
// }
return next(error);
}
response
.header("Content-Type", "application/xml")
.header("Access-Control-Allow-Origin", "*")
.header("Access-Control-Allow-Methods", "GET,POST");
response.status(StatusCodes.OK).send(xmlOutput);
// response.end();
}
protected async handleRequest(oaiRequest: OaiParameter, request: Request) {
// Setup stylesheet
// $this->loadStyleSheet('datasetxml2oai-pmh.xslt');
// Set response time
const now: dayjs.Dayjs = dayjs();
this.xsltParameter["responseDate"] = now.format("YYYY-MM-DDThh:mm:ss[Z]");
this.xsltParameter["unixTimestamp"] = now.unix();
// set OAI base url
const baseDomain = process.env.BASE_DOMAIN || "localhost";
this.xsltParameter["baseURL"] = baseDomain + "/oai";
this.xsltParameter["repURL"] = request.protocol + "://" + request.get("host");
this.xsltParameter["downloadLink"] = request.protocol + "://" + request.get("host") + "/file/download/";
this.xsltParameter["doiLink"] = "https://doi.org/";
this.xsltParameter["doiPrefix"] = "info:eu-repo/semantics/altIdentifier/doi/";
if (oaiRequest["verb"]) {
const verb = oaiRequest["verb"];
this.xsltParameter["oai_verb"] = verb;
if (verb == "Identify") {
this.handleIdentify();
} else if (verb == "ListMetadataFormats") {
this.handleListMetadataFormats();
} else if (verb == "GetRecord") {
await this.handleGetRecord(oaiRequest);
} else if (verb == "ListRecords") {
await this.handleListRecords(oaiRequest);
} else if (verb == "ListIdentifiers") {
await this.handleListIdentifiers(oaiRequest);
} else if (verb == "ListSets") {
await this.handleListSets();
} else {
this.handleIllegalVerb();
}
} else {
// const err = new HttpException(404, 'Not Found')
// next(err);
// try {
// console.log("Async code example.")
// const err = new HttpException(404, 'Not Found');
const err = new PageNotFoundException("verb not found");
throw err;
// } catch (error) { // manually catching
// next(error); // passing to default middleware error handler
// }
}
}
protected handleIdentify() {
const email = "repository@geologie.ac.at";
const repositoryName = "Tethys RDR";
const repIdentifier = "tethys.at";
const sampleIdentifier = "oai:" + repIdentifier + ":1"; //$this->_configuration->getSampleIdentifier();
// Dataset::earliestPublicationDate()->server_date_published->format('Y-m-d\TH:i:s\Z') : null;
// earliestDateFromDb!= null && (this.xsltParameter['earliestDatestamp'] = earliestDateFromDb?.server_date_published);
// set parameters for oai-pmh.xslt
this.xsltParameter["email"] = email;
this.xsltParameter["repositoryName"] = repositoryName;
this.xsltParameter["repIdentifier"] = repIdentifier;
this.xsltParameter["sampleIdentifier"] = sampleIdentifier;
// $this->proc->setParameter('', 'earliestDatestamp', $earliestDateFromDb);
this.xml.root().ele("Datasets");
}
/**
* Implements response for OAI-PMH verb 'ListMetadataFormats'.
*
* @param array &$oaiRequest Contains full request information
* @return void
*/
protected handleListMetadataFormats() {
this.xml.root().ele("Datasets");
}
protected async handleListSets() {
const repIdentifier = "tethys.at";
this.xsltParameter["repIdentifier"] = repIdentifier;
const datasetElement = this.xml.root().ele("Datasets");
const sets: { [key: string]: string } = {
open_access: "Set for open access licenses",
// 'bibliography:true' => 'Set for bibliographic entries',
// 'bibliography:false' => 'Set for non-bibliographic entries',
...(await this.getSetsForDatasetTypes()),
// ... await this.getSetsForProjects(),
} as IDictionary;
for (const [key, value] of Object.entries(sets)) {
const setElement = datasetElement.ele("Rdr_Sets");
setElement.att("Type", key);
setElement.att("TypeName", value);
}
}
protected async handleGetRecord(oaiRequest: OaiParameter) {
// GetRecord&metadataPrefix=oai_dc&identifier=oai:tethys.at:1
const repIdentifier = "tethys.at";
this.xsltParameter["repIdentifier"] = repIdentifier;
// Identifier references metadata Urn, not plain Id!
// Currently implemented as 'oai:foo.bar.de:{docId}' or 'urn:nbn...-123'
if (!("identifier" in oaiRequest)) {
// throw new BadOaiModelException('The prefix of the identifier argument is unknown.');
throw new BadOaiModelException("The prefix of the identifier argument is unknown.");
}
const dataId = Number(this.getDocumentIdByIdentifier(oaiRequest.identifier));
// let dataset: Dataset | null;
const dataset = await Dataset.findOne({
where: { publish_id: dataId },
include: ["xmlCache"],
// order: ['server_date_published'],
});
if (!dataset || !dataset.publish_id) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
"The value of the identifier argument is unknown or illegal in this repository.",
OaiErrorCodes.IDDOESNOTEXIST,
);
}
let metadataPrefix = null;
if ("metadataPrefix" in oaiRequest) {
metadataPrefix = oaiRequest["metadataPrefix"];
} else {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
"The prefix of the metadata argument is unknown.",
OaiErrorCodes.BADARGUMENT,
);
}
this.xsltParameter["oai_metadataPrefix"] = metadataPrefix;
// do not deliver datasets which are restricted by document state
if (dataset.server_state == null || !this.deliveringDocumentStates.includes(dataset.server_state)) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
"Document is not available for OAI export!",
OaiErrorCodes.NORECORDSMATCH,
);
}
// add xml elements
const datasetNode = this.xml.root().ele("Datasets");
await this.createXmlRecord(dataset, datasetNode);
// let domNode = await this.getDatasetXmlDomNode(dataset);
// // add frontdoor url
// // dataset.publish_id = dataset.publish_id != null ? dataset.publish_id : 0;
// this.addLandingPageAttribute(domNode, dataset.publish_id.toString());
// this.addSpecInformation(domNode, "data-type:" + dataset.type);
// datasetNode.import(domNode);
}
/**
* Implements response for OAI-PMH verb 'ListRecords'.
*
* @param array &$oaiRequest Contains full request information
* @return void
*/
protected async handleListRecords(oaiRequest: OaiParameter) {
if (!this.tokenWorker) {
this.tokenWorker = new TokenWorker(86400);
}
!this.tokenWorker.Connected && (await this.tokenWorker.connect());
//$maxRecords = 30; //$this->_configuration->getMaxListRecords();
const maxRecords = this.configuration.maxListRecs;
await this.handlingOfLists(oaiRequest, maxRecords);
await this.tokenWorker.close();
}
/**
* Implements response for OAI-PMH verb 'ListIdentifiers'.
*
* @param array &$oaiRequest Contains full request information
* @return void
*/
protected async handleListIdentifiers(oaiRequest: OaiParameter) {
// if ("resumptionToken" in oaiRequest) {
if (!this.tokenWorker) {
this.tokenWorker = new TokenWorker(86400);
}
!this.tokenWorker.Connected && (await this.tokenWorker.connect());
//$maxIdentifier = 5; //$this->_configuration->getMaxListIdentifiers();
const maxIdentifier = this.configuration.maxListIds; //->getMaxListIdentifiers();
await this.handlingOfLists(oaiRequest, maxIdentifier);
await this.tokenWorker.close();
}
private async handlingOfLists(oaiRequest: OaiParameter, maxRecords: number) {
if (!maxRecords) {
maxRecords = 100;
}
const repIdentifier = "tethys.at";
// //$this->_configuration->getResumptionTokenPath();
// $tokenTempPath = storage_path('app' . DIRECTORY_SEPARATOR . 'resumption');
this.xsltParameter["repIdentifier"] = repIdentifier;
const datasetNode = this.xml.root().ele("Datasets");
// // do some initialisation
let cursor = 0;
let totalIds = 0;
let start = maxRecords + 1;
let reldocIds: (number | null)[] = [];
let metadataPrefix = null;
// const tokenWorker = new TokenWorker(86400);
// await tokenWorker.connect();
// $tokenWorker->setResumptionPath($tokenTempPath);
// const url = process.env.REDIS_URL || "redis://redis:6379";
// const redisClient = createClient({
// url
// });
// redisClient.on('error', (error) => {
// const err = new InternalServerErrorException("Error occured while connecting or accessing redis server'");
// throw err;
// });
// resumptionToken is defined
if ("resumptionToken" in oaiRequest) {
const resParam = oaiRequest["resumptionToken"]; //e.g. "158886496600000"
// let token = await tokenWorker.getResumptionToken(resParam);
const token = await this.tokenWorker.get(resParam);
if (!token) {
throw new OaiModelException(StatusCodes.INTERNAL_SERVER_ERROR, "cache is outdated.", OaiErrorCodes.BADRESUMPTIONTOKEN);
}
cursor = token.StartPosition - 1; //startet dann bei Index 10
start = token.StartPosition + maxRecords;
totalIds = token.TotalIds;
reldocIds = token.DocumentIds;
metadataPrefix = token.MetadataPrefix;
this.xsltParameter["oai_metadataPrefix"] = metadataPrefix;
} else {
// no resumptionToken is given
if ("metadataPrefix" in oaiRequest) {
metadataPrefix = oaiRequest["metadataPrefix"];
} else {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
"The prefix of the metadata argument is unknown.",
OaiErrorCodes.BADARGUMENT,
);
}
this.xsltParameter["oai_metadataPrefix"] = metadataPrefix;
// add server state restrictions
const includeArray: Array<any> = [];
const andArray: Array<any> = new Array({
server_state: {
[Sequelize.Op.in]: this.deliveringDocumentStates,
},
});
// andArray.push({
// server_state: {
// [Sequelize.Op.in]: this.deliveringDocumentStates,
// },
// });
if ("set" in oaiRequest) {
const set = oaiRequest["set"] as string;
const setArray = set.split(":");
if (setArray[0] == "data-type") {
if (setArray.length == 2 && setArray[1]) {
andArray.push({
type: {
[Sequelize.Op.eq]: setArray[1],
},
});
}
} else if (setArray[0] == "open_access") {
const openAccessLicences = ["CC-BY-4.0", "CC-BY-SA-4.0"];
let icncludeFilter = {
model: License,
as: "licenses",
required: true, //return only records which have an associated model INNER JOIN
where: {
name: {
[Sequelize.Op.in]: openAccessLicences,
},
},
};
includeArray.push(icncludeFilter);
}
}
// &from=2020-09-03&until2020-09-03
// &from=2020-09-11&until=2021-05-11
if ("from" in oaiRequest && "until" in oaiRequest) {
const from = oaiRequest["from"] as string;
let fromDate = dayjs(from);
const until = oaiRequest["until"] as string;
let untilDate = dayjs(until);
if (from.length != until.length) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
"The request has different granularities for the from and until parameters.",
OaiErrorCodes.BADARGUMENT,
);
}
fromDate.hour() == 0 && (fromDate = fromDate.startOf("day"));
untilDate.hour() == 0 && (untilDate = untilDate.endOf("day"));
andArray.push({
server_date_published: {
// [Sequelize.Op.between]: [fromDate, untilDate]
[Sequelize.Op.and]: {
[Sequelize.Op.gte]: fromDate.format("YYYY-MM-DD HH:mm:ss"),
[Sequelize.Op.lte]: untilDate.format("YYYY-MM-DD HH:mm:ss"),
},
},
});
} else if ("from" in oaiRequest && !("until" in oaiRequest)) {
const from = oaiRequest["from"] as string;
let fromDate = dayjs(from);
fromDate.hour() == 0 && (fromDate = fromDate.startOf("day"));
const now = dayjs();
if (fromDate.isAfter(now)) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
"Given from date is greater than now. The given values results in an empty list.",
OaiErrorCodes.NORECORDSMATCH,
);
} else {
// $finder->where('server_date_published', '>=', $fromDate);
andArray.push({
server_date_published: {
[Sequelize.Op.gte]: fromDate.format("YYYY-MM-DD HH:mm:ss"),
},
});
}
} else if (!("from" in oaiRequest) && "until" in oaiRequest) {
const until = oaiRequest["until"] as string;
let untilDate = dayjs(until);
untilDate.hour() == 0 && (untilDate = untilDate.endOf("day"));
const firstPublishedDataset: Dataset = (await Dataset.earliestPublicationDate()) as Dataset;
const earliestPublicationDate = dayjs(firstPublishedDataset.server_date_published); //format("YYYY-MM-DDThh:mm:ss[Z]"));
if (earliestPublicationDate.isAfter(untilDate)) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
`earliestDatestamp is greater than given until date.
The given values results in an empty list.`,
OaiErrorCodes.NORECORDSMATCH,
);
} else {
// $finder->where('server_date_published', '<=', $untilDate);
andArray.push({
server_date_published: {
[Sequelize.Op.lte]: untilDate.format("YYYY-MM-DD HH:mm:ss"),
},
});
}
}
reldocIds = (
await Dataset.findAll({
attributes: ["publish_id"],
where: andArray,
order: ["publish_id"],
include: includeArray,
raw: true,
// logging: (sql, queryObject) => {
// const test = sql;
// },
})
).map((dat) => dat.publish_id);
// reldocIds = await Dataset.findAll({
// // attributes: ["publish_id"],
// where: andArray,
// include: ["xmlCache"],
// order: ["server_date_published"],
// // logging: (sql, queryObject) => {
// // const test = sql;
// // },
// });
totalIds = reldocIds.length; //184
} //else resumptionToekn
// // handling of document ids
const restIds = reldocIds as number[];
const workIds = restIds.splice(0, maxRecords) as number[]; // array_splice(restIds, 0, maxRecords);
// no records returned
if (workIds.length == 0) {
// await tokenWorker.close();
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
"The combination of the given values results in an empty list.",
OaiErrorCodes.NORECORDSMATCH,
);
}
//foreach ($datasets as $dataset)
const datasets: Dataset[] = await Dataset.findAll({
// attributes: ["publish_id"],
where: {
publish_id: {
[Sequelize.Op.in]: workIds,
},
},
include: ["xmlCache"],
order: ["publish_id"],
});
for (const dataset of datasets) {
// let dataset = Dataset.findOne({
// where: {'publish_id': dataId}
// });
await this.createXmlRecord(dataset, datasetNode);
}
// store the further Ids in a resumption-file
const countRestIds = restIds.length; //84
if (countRestIds > 0) {
const token = new ResumptionToken();
token.StartPosition = start; //101
token.TotalIds = totalIds; //184
token.DocumentIds = restIds; //101 -184
token.MetadataPrefix = metadataPrefix;
// $tokenWorker->storeResumptionToken($token);
const res = await this.tokenWorker.set(token);
// set parameters for the resumptionToken-node
// const res = token.ResumptionId;
this.setParamResumption(res, cursor, totalIds);
}
}
/**
* Set parameters for resumptionToken-line.
*
* @param string $res value of the resumptionToken
* @param int $cursor value of the cursor
* @param int $totalIds value of the total Ids
*/
private setParamResumption(res: string, cursor: number, totalIds: number) {
const tomorrow = dayjs().add(1, "day").format("YYYY-MM-DDThh:mm:ss[Z]");
this.xsltParameter["dateDelete"] = tomorrow;
this.xsltParameter["res"] = res;
this.xsltParameter["cursor"] = cursor;
this.xsltParameter["totalIds"] = totalIds;
}
private addSpecInformation(domNode: XMLBuilder, information: string) {
domNode.ele("SetSpec").att("Value", information);
}
private addLandingPageAttribute(domNode: XMLBuilder, dataid: string) {
const baseDomain = process.env.BASE_DOMAIN || "localhost";
const url = "https://" + this.getDomain(baseDomain) + "/dataset/" + dataid;
// add attribute du dataset xml element
domNode.att("landingpage", url);
}
private getDomain(host: string): string {
// $myhost = strtolower(trim($host));
let myHost: string = host.trim().toLocaleLowerCase();
// $count = substr_count($myhost, '.');
const count: number = myHost.split(",").length - 1;
if (count == 2) {
const words = myHost.split(".");
if (words[1].length > 3) {
myHost = myHost.split(".", 2)[1];
}
} else if (count > 2) {
myHost = this.getDomain(myHost.split(".", 2)[1]);
}
myHost = myHost.replace(new RegExp(/^.*:\/\//i, "g"), "");
return myHost;
}
private getDocumentIdByIdentifier(oaiIdentifier: string): string {
const identifierParts: string[] = oaiIdentifier.split(":"); // explode(":", $oaiIdentifier);
const dataId: string = identifierParts[2];
// switch (identifierParts[0]) {
// case 'oai':
// if (isset($identifierParts[2])) {
// $dataId = $identifierParts[2];
// }
// break;
// default:
// throw new OaiModelException(
// 'The prefix of the identifier argument is unknown.',
// OaiModelError::BADARGUMENT
// );
// break;
// }
// if (empty($dataId) or !preg_match('/^\d+$/', $dataId)) {
// throw new OaiModelException(
// 'The value of the identifier argument is unknown or illegal in this repository.',
// OaiModelError::IDDOESNOTEXIST
// );
return dataId;
}
private async createXmlRecord(dataset: Dataset, datasetNode: XMLBuilder) {
const domNode = await this.getDatasetXmlDomNode(dataset);
// add frontdoor url and data-type
// if (dataset.publish_id) {
dataset.publish_id && this.addLandingPageAttribute(domNode, dataset.publish_id.toString());
// }
this.addSpecInformation(domNode, "data-type:" + dataset.type);
datasetNode.import(domNode);
}
private async getDatasetXmlDomNode(dataset: Dataset) {
// dataset.fetchValues();
const xmlModel = new XmlModel(dataset);
// xmlModel.setModel(dataset);
xmlModel.excludeEmptyFields();
// const cache = dataset.xmlCache ? dataset.xmlCache : new DocumentXmlCache();
if (dataset.xmlCache) {
xmlModel.setXmlCache = dataset.xmlCache;
}
xmlModel.caching = true;
// return cache.getDomDocument();
const domDocument = await xmlModel.getDomDocument();
return domDocument;
}
private async getSetsForProjects(): Promise<IDictionary> {
// const setSpecPattern = this.SET_SPEC_PATTERN;
const sets: { [key: string]: string } = {} as IDictionary;
const projects: Array<Project> = await Project.findAll({
attributes: ["label"],
raw: true,
});
projects.forEach((project) => {
if (false == preg_match(this.sampleRegEx, project.label)) {
const msg = `Invalid SetSpec (project='${project.label}').
Allowed characters are [${this.sampleRegEx}].`;
Logger.err(`OAI: ${msg}`);
// Log::error("OAI-PMH: $msg");
return;
}
const setSpec = "project:" + project.label;
sets[setSpec] = `Set for project '${project.label}'`;
});
return sets;
}
private async getSetsForDatasetTypes(): Promise<IDictionary> {
const sets: { [key: string]: string } = {} as IDictionary;
const datasets: Array<Dataset> = await Dataset.findAll({
attributes: ["type"],
where: { server_state: { [Sequelize.Op.eq]: "published" } },
});
datasets.forEach((dataset) => {
if (dataset.type && false == preg_match(this.sampleRegEx, dataset.type)) {
const msg = `Invalid SetSpec (data-type='${dataset.type}').
Allowed characters are [${this.sampleRegEx}].`;
Logger.err(`OAI: ${msg}`);
// Log::error("OAI-PMH: $msg");
return;
}
const setSpec = "data-type:" + dataset.type;
sets[setSpec] = `Set for document type '${dataset.type}'`;
});
return sets;
}
private handleIllegalVerb() {
this.xsltParameter["oai_error_code"] = "badVerb";
this.xsltParameter["oai_error_message"] = "The verb provided in the request is illegal.";
}
}

View file

@ -0,0 +1,11 @@
import { StatusCodes } from "http-status-codes";
import HTTPException from "./HttpException";
class BadRequestException extends HTTPException {
constructor(message?: string) {
super(StatusCodes.BAD_REQUEST, message || "bad Request");
this.stack = "";
}
}
export default BadRequestException;

View file

@ -0,0 +1,12 @@
class HTTPException extends Error {
public status: number;
public message: string;
constructor(status: number, message: string) {
super(message);
this.status = status;
this.message = message;
}
}
export default HTTPException;

View file

@ -0,0 +1,11 @@
import { StatusCodes } from "http-status-codes";
import HTTPException from "./HttpException";
class InternalServerErrorException extends HTTPException {
constructor(message?: string) {
super(StatusCodes.INTERNAL_SERVER_ERROR, message || "Server Error");
this.stack = "";
}
}
export default InternalServerErrorException;

View file

@ -0,0 +1,11 @@
export enum OaiErrorCodes {
BADVERB = 1010,
BADARGUMENT = 1011,
CANNOTDISSEMINATEFORMAT = 1012,
BADRESUMPTIONTOKEN = 1013,
NORECORDSMATCH = 1014,
IDDOESNOTEXIST = 1015,
}
// 👇️ default export
// export { OaiErrorCodes };

View file

@ -0,0 +1,77 @@
import { StatusCodes } from "http-status-codes";
// import HTTPException from './HttpException';
import { OaiErrorCodes } from "./OaiErrorCodes";
export class ErrorCode {
public static readonly Unauthenticated = "Unauthenticated";
public static readonly NotFound = "NotFound";
public static readonly MaximumAllowedGrade = "MaximumAllowedGrade";
public static readonly AsyncError = "AsyncError";
public static readonly UnknownError = "UnknownError";
}
export class ErrorModel {
/**
* Unique error code which identifies the error.
*/
public code: string;
/**
* Status code of the error.
*/
public status: number;
/**
* Any additional data that is required for translation.
*/
// public metaData?: any;
}
export class OaiModelException extends Error {
public status: number;
public message: string;
public oaiCode: number;
// constructor(status: number, message: string) {
// super(message);
// this.status = status;
// this.message = message;
// }
constructor(status: number, message: string, oaiCode: number) {
super(message);
this.status = status;
this.message = message;
this.oaiCode = oaiCode;
}
// constructor(code: string = ErrorCode.UnknownError, message: any = null) {
// super(code);
// Object.setPrototypeOf(this, new.target.prototype);
// this.name = code;
// this.status = 500;
// this.message = message;
// // switch (code) {
// // case ErrorCode.Unauthenticated:
// // this.status = 401;
// // break;
// // case ErrorCode.MaximumAllowedGrade:
// // this.status = 400;
// // break;
// // case ErrorCode.AsyncError:
// // this.status = 400;
// // break;
// // case ErrorCode.NotFound:
// // this.status = 404;
// // break;
// // default:
// // this.status = 500;
// // break;
// // }
// }
}
export class BadOaiModelException extends OaiModelException {
constructor(message?: string) {
super(StatusCodes.INTERNAL_SERVER_ERROR, message || "bad Request", OaiErrorCodes.BADARGUMENT);
this.stack = "";
}
}
// export default OaiModelexception;

View file

@ -0,0 +1,11 @@
import { StatusCodes } from "http-status-codes";
import HTTPException from "./HttpException";
class PageNotFoundException extends HTTPException {
constructor(message?: string) {
super(StatusCodes.NOT_FOUND, message || "Page not found");
this.stack = "";
}
}
export default PageNotFoundException;

View file

@ -0,0 +1,23 @@
import { Request, Response } from "express";
// import { ErrorCode } from './error-code';
// import { ErrorException } from './error-exception';
// import { ErrorModel } from './error-model';
import HTTPException from "./HttpException";
import { StatusCodes } from "http-status-codes";
import { OaiModelException } from "./OaiModelException";
export const errorHandler = (err: HTTPException | OaiModelException, req: Request, res: Response) => {
console.log("Error handling middleware called.");
console.log("Path:", req.path);
console.error("Error occured:", err);
if (err instanceof HTTPException) {
console.log("Http Error is known.");
res.status(err.status).send(err);
} else if (err instanceof OaiModelException) {
console.log("Oai-Error is known.");
res.status(err.status).send(err);
} else {
// For unhandled errors.
res.status(500).send({ code: StatusCodes.INTERNAL_SERVER_ERROR, status: 500 });
}
};

184
src/library/XmlModel.ts Normal file
View file

@ -0,0 +1,184 @@
import DocumentXmlCache from "../models/DocumentXmlCache";
// import { XMLDocument } from "xmlbuilder";
import { XMLBuilder } from "xmlbuilder2/lib/interfaces";
import Dataset from "../models/Dataset";
import Logger from "jet-logger";
import { create } from "xmlbuilder2";
import dayjs from "dayjs";
/**
* This is the description of the interface
*
* @interface Conf
* @member {Model} model holds the current dataset model
* @member {XMLBuilder} dom holds the current DOM representation
*/
export interface Conf {
/**
* Holds the current model either directly set or deserialized from XML.
*/
model: Dataset;
/**
* Holds the current DOM representation.
*/
dom?: XMLBuilder;
/**
* List of fields to skip on serialization.
*/
excludeFields: Array<string>;
/**
* True, if empty fields get excluded from serialization.
*/
excludeEmpty: boolean;
/**
* Base URI for xlink:ref elements
*/
baseUri: string;
}
export default class XmlModel {
// private config: { [key: string]: any } = {};
private config: Conf; // = { excludeEmpty: false, baseUri: "" };
// private strategy = null;
private cache: DocumentXmlCache;
private _caching = false;
constructor(dataset: Dataset) {
// $this->strategy = new Strategy();// Opus_Model_Xml_Version1;
// $this->config = new Conf();
// $this->strategy->setup($this->config);
// this.strategy = new Strategy();
this.config = {
excludeEmpty: false,
baseUri: "",
excludeFields: [],
model: dataset,
};
}
/**
* Set the Model for XML generation.
*
* @param \App\Models\Dataset model Model to serialize.
*
*/
set setModel(model: Dataset) {
this.config.model = model;
// return this;
}
/**
* Define that empty fields (value===null) shall be excluded.
*
*/
public excludeEmptyFields(): void {
this.config.excludeEmpty = true;
// return this;
}
/**
* Return cache table.
*
* @returns {DocumentXmlCache}
*/
get getXmlCache(): DocumentXmlCache {
return this.cache;
}
/**
* Set a new XML version with current configuration up.
*
* @param { DocumentXmlCache } cache table
*/
set setXmlCache(cache: DocumentXmlCache) {
this.cache = cache;
}
get caching(): boolean {
return this._caching;
}
set caching(caching: boolean) {
this._caching = caching;
}
public async getDomDocument() {
const dataset = this.config.model;
let domDocument: XMLBuilder | null = await this.getDomDocumentFromXmlCache();
if (domDocument) {
return domDocument;
} else {
//create domDocument during runtime
// domDocument = $this->strategy->getDomDocument();
domDocument = create({ version: "1.0", encoding: "UTF-8", standalone: true }, "<root></root>");
}
//if caching isn't wanted return only dom Document
if (this._caching != true) {
return domDocument;
//otherwise caching is desired:
// save xml cache to db and return domDocument
} else {
// save new DocumentXmlCache
if (!this.cache) {
this.cache = new DocumentXmlCache();
this.cache.document_id = dataset.id;
}
// if (!this.cache.document_id) {
// this.cache.document_id = dataset.id;
// }
this.cache.xml_version = 1; // (int)$this->strategy->getVersion();
this.cache.server_date_modified = dayjs(dataset.server_date_modified).format("YYYY-MM-DD HH:mm:ss");
this.cache.xml_data = domDocument.end();
//save xml cache
this.cache.save();
// return newly created xml cache
return domDocument;
}
}
private async getDomDocumentFromXmlCache(): Promise<XMLBuilder | null> {
const dataset: Dataset = this.config.model;
if (null == this.cache) {
//$logger->debug(__METHOD__ . ' skipping cache for ' . get_class($model));
// Log::debug(__METHOD__ . ' skipping cache for ' . get_class($dataset));
Logger.warn(`__METHOD__ . skipping cache for ${dataset}`);
return null;
}
// const dataset: Dataset = this.config.model;
const actuallyCached: boolean = await DocumentXmlCache.hasValidEntry(dataset.id, dataset.server_date_modified);
//no cache or no actual cache
if (true != actuallyCached) {
Logger.warn(" cache missing for " + "#" + dataset.id);
return null;
}
//cache is actual return it for oai:
// Log::debug(__METHOD__ . ' cache hit for ' . get_class($dataset) . '#' . $dataset->id);
try {
// return $this->_cache->get($model->getId(), (int) $this->_strategy->getVersion());
// const cache = await DocumentXmlCache.findOne({
// where: { document_id: dataset.id },
// });
if (this.cache) {
return this.cache.getDomDocument();
} else {
Logger.warn(" Access to XML cache failed on " + dataset + "#" + dataset.id + ". Trying to recover.");
return null;
}
// return this.cache.getDomDocument();
} catch (error) {
Logger.warn(" Access to XML cache failed on " + dataset + "#" + dataset.id + ". Trying to recover.");
return null;
}
}
}
// export default XmlModel;

View file

@ -0,0 +1,71 @@
import config from "../../config/oai.config";
export default class Configuration {
/**
* Hold path where to store temporary resumption token files.
*
* @var string
*/
private _pathTokens = "";
private _maxListIds = 15;
/**
* Return maximum number of listable identifiers per request.
*
* @return {number} Maximum number of listable identifiers per request.
*/
public get maxListIds(): number {
return this._maxListIds;
}
public set maxListIds(value: number) {
this._maxListIds = value;
}
/**
* Holds maximum number of records to list per request.
*
* @var number
*/
private _maxListRecs = 15;
/**
* Return maximum number of listable records per request.
*
* @return {number} Maximum number of listable records per request.
*/
public get maxListRecs() {
return this._maxListRecs;
}
public set maxListRecs(value) {
this._maxListRecs = value;
}
constructor() {
this._maxListIds = config.max.listidentifiers as number;
this._maxListRecs = config.max.listrecords as number;
// $this->maxListIds = config('oai.max.listidentifiers');
// $this->maxListRecs = config('oai.max.listrecords');
// $this->pathTokens = config('app.workspacePath')
// . DIRECTORY_SEPARATOR .'tmp'
// . DIRECTORY_SEPARATOR . 'resumption';
}
/**
* Return temporary path for resumption tokens.
*
* @returns {string} token path.
*/
get getResumptionTokenPath(): string {
return this._pathTokens;
}
/**
* Return maximum number of listable records per request.
*
* @return {number} Maximum number of listable records per request.
*/
// get getMaxListRecords(): number {
// return this._maxListRecs;
// }
}

View file

@ -0,0 +1,107 @@
export default class ResumptionToken {
/**
* Holds dcoument ids
*
* @var array
*/
private _documentIds: number[] = [];
/**
* Holds metadata prefix information
*
* @var {string}
*/
private _metadataPrefix = "";
/**
* Holds resumption id (only if token is stored)
*
* @var {string}
*/
private _resumptionId = "";
/**
* Holds start postion
*
* @var {number}
*/
private _startPosition = 0;
/**
* Holds total amount of document ids
*
* @var {number}
*/
private _totalIds = 0;
//#region properties
get Key(): string{
return this.MetadataPrefix + this.StartPosition + this.TotalIds;
}
/**
* Returns current holded document ids.
*
* @return array
*/
public get DocumentIds(): number[] {
return this._documentIds;
}
public set DocumentIds(idsToStore: number | number[]) {
if (!Array.isArray(idsToStore)) {
idsToStore = new Array(idsToStore);
}
this._documentIds = idsToStore;
}
/**
* Returns metadata prefix information.
*
* @return string
*/
public get MetadataPrefix(): string {
return this._metadataPrefix;
}
public set MetadataPrefix(value) {
this._metadataPrefix = value;
}
/**
* Return setted resumption id after successful storing of resumption token.
*
* @return string
*/
public get ResumptionId() {
return this._resumptionId;
}
public set ResumptionId(resumptionId) {
this._resumptionId = resumptionId;
}
/**
* Returns start position.
*
* @return in
*/
public get StartPosition() {
return this._startPosition;
}
public set StartPosition(startPosition) {
this._startPosition = startPosition;
}
/**
* Returns total number of document ids for this request
*
* @return int
*/
public get TotalIds() {
return this._totalIds;
}
public set TotalIds(totalIds) {
this._totalIds = totalIds;
}
//#endregion properties
}

View file

@ -0,0 +1,194 @@
import ResumptionToken from "./ResumptionToken";
import { realpathSync } from "fs";
import { createClient, RedisClientType } from "redis";
import InternalServerErrorException from "../../exceptions/InternalServerError";
import { sprintf } from "sprintf-js";
import dayjs from "dayjs";
import * as crypto from "crypto";
export default class TokenWorker {
private resumptionPath = "";
// private resumptionId = null;
protected filePrefix = "rs_";
protected fileExtension = "txt";
private cache: RedisClientType;
private ttl: number;
private url: string;
private connected: boolean = false;
constructor(ttl: number) {
// if (resPath) {
// this.setResumptionPath(resPath);
// }
// [1] define ttl and create redis connection
this.ttl = ttl;
this.url = process.env.REDIS_URL || "redis://127.0.0.1:6379";
// this.cache.on("connect", () => {
// console.log(`Redis connection established`);
// });
// this.cache.on("error", (error: string) => {
// console.error(`Redis error, service degraded: ${error}`);
// });
// The Redis client must be created in an async closure
// (async () => {
// this.cache = createClient({
// url,
// });
// this.cache.on("error", (err) => console.log("[Redis] Redis Client Error: ", err));
// await this.cache.connect();
// console.log("[Redis]: Successfully connected to the Redis server");
// })();
}
public async connect() {
const url = process.env.REDIS_URL || "redis://localhost:6379";
this.cache = createClient({
url,
});
this.cache.on("error", (err) => {
this.connected = false;
console.log("[Redis] Redis Client Error: ", err);
});
this.cache.on("connect", () => {
this.connected = true;
// console.log(`Redis connection established`);
});
await this.cache.connect();
}
public get Connected(): boolean {
return this.connected;
}
public async has(key: string): Promise<boolean> {
const result = await this.cache.get(key);
return result !== undefined && result !== null;
}
public async set(token: ResumptionToken) {
let fc = 0;
const uniqueId = dayjs().unix().toString(); // 1548381600;
let uniqueName: string;
let cacheKeyExists = true;
do {
// format values
// %s - String
// %d - Signed decimal number (negative, zero or positive)
// [0-9] (Specifies the minimum width held of to the variable value)
uniqueName = sprintf("%s%05d", uniqueId, fc++);
// let file = uniqueName;
cacheKeyExists = await this.has(uniqueName);
} while (cacheKeyExists);
// uniqueName = this.checksum(token.Key);
const serialToken = JSON.stringify(token);
await this.cache.setEx(uniqueName, this.ttl, serialToken);
return uniqueName;
// token.ResumptionId = uniqueName;
}
// public connected(): boolean {
// return this.cache.connected;
// }
public async get(key: string): Promise<ResumptionToken | null> {
if (!this.cache) {
throw new InternalServerErrorException("Dataset is not available for OAI export!");
}
const result = await this.cache.get(key);
if (result) {
const rToken: ResumptionToken = new ResumptionToken();
const parsed = JSON.parse(result);
Object.assign(rToken, parsed);
return rToken;
} else {
return null;
}
}
public del(key: string) {
this.cache.del(key);
}
public flush() {
this.cache.flushAll();
}
public async close() {
await this.cache.disconnect();
this.connected = false;
}
private checksum(str: string, algorithm?: string, encoding?: string): string {
/**
* @type {BinaryToTextEncoding}
*/
const ENCODING_OUT = "hex"; // Initializer type string is not assignable to variable type BinaryToTextEncoding
return crypto
.createHash(algorithm || 'md5')
.update(str, 'utf8')
.digest(ENCODING_OUT)
}
/**
* Set resumption path where the resumption token files are stored.
*
* @throws Oai_Model_ResumptionTokenException Thrown if directory operations failed.
* @return void
*/
public setResumptionPath(resPath: string): void {
// expanding all symbolic links and resolving references
const realPath = realpathSync(resPath);
// if (empty($realPath) or false === is_dir($realPath)) {
// throw new Oai_Model_ResumptionTokenException(
// 'Given resumption path "' . $resPath . '" (real path: "' . $realPath . '") is not a directory.'
// );
// }
// if (false === is_writable($realPath)) {
// throw new Oai_Model_ResumptionTokenException(
// 'Given resumption path "' . $resPath . '" (real path: "' . $realPath . '") is not writeable.'
// );
// }
this.resumptionPath = realPath;
}
/**
* Store a resumption token
*
* @param Oai_Model_Resumptiontoken $token Token to store.
* @throws Oai_Model_ResumptionTokenException Thrown on file operation error.
* @return void
*/
public storeResumptionToken(token: ResumptionToken): void {
// $fileName = $this->generateResumptionName();
const uniqueName = "100";
const serialToken = JSON.stringify(token);
// Cache::put($uniqueName, $serialToken, now()->addMinutes(60));
this.cache.setEx(uniqueName, 86400, serialToken);
// $token->setResumptionId($this->resumptionId);
}
// private async get(key: string) {
// return await this.redisClient.get(key);
// }
// public async getResumptionToken(resId: string): Promise<ResumptionToken | null> {
// let token: ResumptionToken | null = null;
// var data = await this.get(resId);
// if (data) {
// token = JSON.parse(data);
// if (token instanceof ResumptionToken) {
// return token;
// }
// }
// return token;
// }
}

155
src/models/Dataset.ts Normal file
View file

@ -0,0 +1,155 @@
// import {
// Table,
// Column,
// DataType,
// IsEmail,
// HasMany,
// Model,
// CreatedAt, UpdatedAt,
// BelongsToMany,
// BelongsTo
// } from "sequelize-typescript";
// import { Abstract } from "./Abstract";
// @Table({
// tableName: "documents"
// })
// export class Dataset extends Model<Dataset> {
// @Column({
// // field: 'type',
// type: DataType.STRING(255),
// allowNull: false,
// })
// type!: string;
// @CreatedAt
// @Column({
// field: 'created_at',
// type: DataType.DATE,
// defaultValue: DataType.NOW
// })
// created_at!: Date;
// @UpdatedAt
// @Column({
// field: 'server_date_modified',
// type: DataType.DATE,
// defaultValue: DataType.NOW
// })
// server_date_modified!: Date;
// @Column({
// field: 'server_state',
// type: DataType.STRING(),
// })
// serverState!: string;
// @HasMany(() => Abstract, "document_id")
// public readonly abstracts?: Abstract[];
// }
import { Op, Model, DataTypes, InferAttributes, InferCreationAttributes, CreationOptional, NonAttribute, Association } from "sequelize";
import sequelizeConnection from "../config/db.config";
import DocumentXmlCache from "./DocumentXmlCache";
class Dataset extends Model<InferAttributes<Dataset>, InferCreationAttributes<Dataset>> {
// id can be undefined during creation when using `autoIncrement`
declare id: CreationOptional<number>;
declare contributing_corporation: string | null; // for nullable fields
declare creating_corporation: string; // not nullable fields
declare publisher_name: string | null;
declare embargo_date: Date | null;
declare publish_id: number | null;
declare type: string | null;
declare language: string | null; // for nullable fields
declare server_state: string | null;
declare server_date_published: Date | null;
// createdAt can be undefined during creation
declare created_at: CreationOptional<Date>;
// updatedAt can be undefined during creation
declare server_date_modified: CreationOptional<Date>;
// You can also pre-declare possible inclusions, these will only be populated if you
// actively include a relation.
declare xmlCache?: NonAttribute<DocumentXmlCache>; // Note this is optional since it's only populated when explicitly requested in code
// getters that are not attributes should be tagged using NonAttribute
// to remove them from the model's Attribute Typings.
get fullName(): NonAttribute<string | null> {
return this.type;
}
declare static associations: {
xmlCache: Association<Dataset, DocumentXmlCache>;
};
public static async earliestPublicationDate(): Promise<Dataset | null> {
const server_state = "published";
const condition = {
[Op.and]: [
{
server_state: { [Op.eq]: server_state },
},
],
};
const model = await this.findOne({
attributes: ["server_date_published"],
where: condition,
order: [["server_date_published", "asc"]],
});
if (model) {
return model;
} else {
return null;
}
}
}
Dataset.init(
{
id: {
type: DataTypes.INTEGER,
primaryKey: true,
},
contributing_corporation: { type: DataTypes.STRING(255) },
creating_corporation: { type: DataTypes.STRING(255), allowNull: false },
publisher_name: DataTypes.STRING(255),
embargo_date: DataTypes.DATE,
publish_id: DataTypes.INTEGER,
type: {
type: DataTypes.STRING,
},
language: DataTypes.STRING,
server_state: {
type: DataTypes.STRING,
},
server_date_published: DataTypes.DATE,
// project_id: DataTypes.INTEGER,
// embargo_date: DataTypes.DATE,
// belongs_to_bibliography: DataTypes.BOOLEAN,
// editor_id: DataTypes.INTEGER,
// preferred_reviewer: DataTypes.STRING,
// preferred_reviewer_email: DataTypes.STRING,
// reviewer_id: DataTypes.INTEGER,
// reject_reviewer_note: DataTypes.STRING,
// reject_editor_note: DataTypes.STRING,
// reviewer_note_visible: DataTypes.BOOLEAN,
created_at: DataTypes.DATE,
server_date_modified: DataTypes.DATE,
},
{
createdAt: "created_at",
updatedAt: "server_date_modified",
sequelize: sequelizeConnection,
tableName: "documents",
},
);
export default Dataset;

View file

@ -0,0 +1,167 @@
// import {
// Table,
// PrimaryKey,
// Column,
// DataType,
// Model,
// UpdatedAt,
// } from "sequelize-typescript";
// @Table({
// tableName: "document_xml_cache",
// createdAt: false,
// })
// export default class DocumentXmlCache extends Model<DocumentXmlCache> {
// @PrimaryKey
// @Column({
// type: DataType.INTEGER,
// allowNull: false,
// })
// declare document_id: number;
// @Column({
// type: DataType.INTEGER,
// allowNull: false,
// })
// declare xml_version: number;
// @UpdatedAt
// @Column({
// field: 'server_date_modified',
// type: DataType.DATE,
// defaultValue: DataType.NOW
// })
// declare server_date_modified: Date;
// @Column({
// type: DataType.TEXT(),
// })
// declare xml_data: string;
// }
import { Op, Model, DataTypes, InferAttributes, InferCreationAttributes } from "sequelize";
import sequelizeConnection from "../config/db.config";
import { builder, create } from "xmlbuilder2";
import { XMLBuilder } from "xmlbuilder2/lib/interfaces";
// import { select, SelectedValue } from "xpath";
import dayjs from "dayjs";
class DocumentXmlCache extends Model<InferAttributes<DocumentXmlCache>, InferCreationAttributes<DocumentXmlCache>> {
declare document_id: number;
declare xml_version: number;
// updatedAt can be undefined during creation
// declare server_date_modified: CreationOptional<Date>;
declare server_date_modified: string;
declare xml_data: string;
// // getters that are not attributes should be tagged using NonAttribute
// // to remove them from the model's Attribute Typings.
// get fullName(): NonAttribute<string | null> {
// return this.type;
// }
/**
* Check if a dataset in a specific xml version is already cached or not.
*
* @param mixed datasetId
* @param mixed serverDateModified
* @returns {bool} Returns true on cached hit else false.
*/
public static async hasValidEntry(datasetId: number, datasetServerDateModified: Date): Promise<boolean> {
const condition = {
[Op.and]: [
{
document_id: { [Op.eq]: datasetId },
server_date_modified: { [Op.eq]: dayjs(datasetServerDateModified).format("YYYY-MM-DD HH:mm:ss") },
},
],
};
// $select = DB::table('document_xml_cache');
// $select->where('document_id', '=', $datasetId)
// ->where('server_date_modified', '=', $serverDateModified);
const model = await this.findOne({
where: condition,
order: [["server_date_modified", "asc"]],
});
if (model) {
return true;
} else {
return false;
}
}
/**
* Get dom document of 'xml_data' string
*
* @returns {XMLBuilder}
*/
public getDomDocument(): XMLBuilder {
// const dom = xmlbuilder.create({ version: "1.0", encoding: "UTF-8", standalone: true });
let dom: XMLBuilder = create({ version: "1.0", encoding: "UTF-8", standalone: true }, this.xml_data);
// return dom.first();
const rdrDataset = dom.find(
(n) => {
const test = n.node.nodeName == "Rdr_Dataset";
return test;
},
false,
true,
)?.node;
if (rdrDataset == undefined) {
return dom.first();
} else {
dom = builder({ version: "1.0", encoding: "UTF-8", standalone: true }, rdrDataset);
return dom;
}
// const doc2 = create().doc();
// rdrDataset && doc2.import(rdrDataset);
// return doc2.first();
// rdrDataset && (dom = builder({ version: '1.0', encoding: 'UTF-8', standalone : true }, rdrDataset));
// let domNode: Node = dom.node;
// // run the xpath query
// const result: Array<SelectedValue> = select("//Rdr_Dataset", domNode);
// if (result && result.length && result.length > 0) {
// // convert the DOM node to a builder object
// const recordNode = builder(result.at(0));
// const doc2 = create().doc();
// // import into result document
// doc2.import(recordNode);
// // console.log(doc2.end({ prettyPrint: true }));
// return doc2;
// }
// return dom.first();
}
}
DocumentXmlCache.init(
{
document_id: {
type: DataTypes.INTEGER,
primaryKey: true,
},
xml_version: {
type: DataTypes.INTEGER,
allowNull: false,
},
server_date_modified: DataTypes.STRING(50),
xml_data: {
type: DataTypes.TEXT,
},
},
{
createdAt: false,
updatedAt: false, //"server_date_modified",
sequelize: sequelizeConnection,
tableName: "document_xml_cache",
},
);
export default DocumentXmlCache;

43
src/models/Identifier.ts Normal file
View file

@ -0,0 +1,43 @@
import { Model, DataTypes, InferAttributes, InferCreationAttributes, CreationOptional } from "sequelize";
import sequelizeConnection from "../config/db.config";
class Identifier extends Model<InferAttributes<Identifier>, InferCreationAttributes<Identifier>> {
// id can be undefined during creation when using `autoIncrement`
declare id: CreationOptional<number>;
declare value: string; // not nullable fields
declare type: string;
declare status: string | null;
// createdAt can be undefined during creation
declare created_at: CreationOptional<Date>;
// updatedAt can be undefined during creation
declare updated_at: CreationOptional<Date>;
}
Identifier.init(
{
id: {
type: DataTypes.INTEGER,
primaryKey: true,
},
value: {
type: DataTypes.STRING(255),
allowNull: false,
},
type: {
type: DataTypes.STRING(255),
allowNull: false,
},
status: DataTypes.STRING(255),
created_at: DataTypes.DATE,
updated_at: DataTypes.DATE,
},
{
createdAt: "created_at",
updatedAt: "updated_at",
tableName: "dataset_identifiers",
sequelize: sequelizeConnection,
},
);
export default Identifier;

64
src/models/Person.ts Normal file
View file

@ -0,0 +1,64 @@
import { Model, DataTypes, InferAttributes, InferCreationAttributes, CreationOptional } from "sequelize";
import sequelizeConnection from "../config/db.config";
class Person extends Model<InferAttributes<Person>, InferCreationAttributes<Person>> {
// id can be undefined during creation when using `autoIncrement`
declare id: CreationOptional<number>;
declare academic_title: string | null; // for nullable fields
declare email: string; // not nullable fields
declare first_name: string | null; // for nullable fields
declare last_name: string;
declare place_of_birth: string | null;
declare identifier_orcid: string | null;
declare identifier_gnd: string | null;
declare identifier_misc: string | null;
declare status: boolean | null;
declare name_type: string | null; // for nullable fields
// getters that are not attributes should be tagged using NonAttribute
// to remove them from the model's Attribute Typings.
get full_name(): string {
return this.first_name + " " + this.last_name;
}
}
Person.init(
{
id: {
type: DataTypes.INTEGER,
primaryKey: true,
},
academic_title: DataTypes.STRING(255),
email: {
type: DataTypes.STRING(100),
allowNull: false,
},
first_name: DataTypes.STRING(255),
last_name: {
type: DataTypes.STRING(255),
allowNull: false,
},
place_of_birth: DataTypes.STRING(255),
identifier_orcid: DataTypes.STRING(50),
identifier_gnd: DataTypes.STRING(50),
identifier_misc: DataTypes.STRING(50),
status: {
type: DataTypes.BOOLEAN,
defaultValue: true,
},
name_type: DataTypes.STRING(255),
full_name: {
type: new DataTypes.VIRTUAL(DataTypes.STRING(255), ["first_name", "last_name"]),
get(): string {
return this.first_name + " " + this.last_name;
},
},
},
{
timestamps: false,
tableName: "persons",
sequelize: sequelizeConnection,
},
);
export default Person;

41
src/models/Project.ts Normal file
View file

@ -0,0 +1,41 @@
// import Sequelize from "sequelize";
import { Model, DataTypes, InferAttributes, InferCreationAttributes, CreationOptional } from "sequelize";
import sequelizeConnection from "../config/db.config";
class Project extends Model<InferAttributes<Project>, InferCreationAttributes<Project>> {
// id can be undefined during creation when using `autoIncrement`
declare id: CreationOptional<number>;
declare label: string; // not nullable fields
declare name: string;
// createdAt can be undefined during creation
declare created_at: CreationOptional<Date>;
// updatedAt can be undefined during creation
declare updated_at: CreationOptional<Date>;
}
Project.init(
{
id: {
type: DataTypes.INTEGER,
primaryKey: true,
},
label: {
type: DataTypes.STRING(50),
allowNull: false,
},
name: {
type: DataTypes.STRING(255),
allowNull: false,
},
created_at: DataTypes.DATE,
updated_at: DataTypes.DATE,
},
{
createdAt: "created_at",
updatedAt: "updated_at",
sequelize: sequelizeConnection,
tableName: "projects",
},
);
export default Project;

View file

@ -0,0 +1,32 @@
import Sequelize from "sequelize";
import sequelizeConnection from "../config/db.config";
const Abstract = sequelizeConnection.define(
"dataset_abstracts",
{
type: {
type: Sequelize.STRING,
allowNull: false,
length: 255,
},
value: {
type: Sequelize.TEXT,
allowNull: false,
},
language: {
type: Sequelize.STRING,
allowNull: false,
length: 3,
},
},
{
timestamps: false,
tableName: "dataset_abstracts",
},
);
// Abstract.belongsTo(Dataset, {
// foreignKey: "document_id",
// as: "dataset",
// });
export default Abstract;

View file

@ -0,0 +1,34 @@
import Sequelize from "sequelize";
import sequelizeConnection from "../config/db.config";
// module.exports = (sequelize, DataTypes) => {
const Coverage = sequelizeConnection.define(
"coverage",
{
elevation_min: Sequelize.INTEGER,
elevation_max: Sequelize.INTEGER,
elevation_absolut: Sequelize.INTEGER,
depth_min: Sequelize.INTEGER,
depth_max: Sequelize.INTEGER,
depth_absolut: Sequelize.INTEGER,
time_min: Sequelize.INTEGER,
time_max: Sequelize.INTEGER,
time_absolut: Sequelize.INTEGER,
x_min: Sequelize.DOUBLE,
x_max: Sequelize.DOUBLE,
y_min: Sequelize.DOUBLE,
y_max: Sequelize.DOUBLE,
created_at: Sequelize.DATE,
updated_at: Sequelize.DATE,
},
{
createdAt: "created_at",
updatedAt: "updated_at",
// schema: 'public',
tableName: "coverage",
},
);
export default Coverage;
// };

View file

@ -0,0 +1,49 @@
import Sequelize from "sequelize";
import sequelizeConnection from "../config/db.config";
const Dataset = sequelizeConnection.define(
"documents",
{
type: {
type: Sequelize.STRING,
},
language: Sequelize.STRING,
server_state: {
type: Sequelize.STRING,
},
server_date_published: Sequelize.DATE,
publisher_name: Sequelize.STRING,
publish_id: Sequelize.INTEGER,
creating_corporation: Sequelize.STRING,
project_id: Sequelize.INTEGER,
embargo_date: Sequelize.DATE,
belongs_to_bibliography: Sequelize.BOOLEAN,
editor_id: Sequelize.INTEGER,
preferred_reviewer: Sequelize.STRING,
preferred_reviewer_email: Sequelize.STRING,
reviewer_id: Sequelize.INTEGER,
reject_reviewer_note: Sequelize.STRING,
reject_editor_note: Sequelize.STRING,
reviewer_note_visible: Sequelize.BOOLEAN,
created_at: Sequelize.DATE,
server_date_modified: Sequelize.DATE,
},
{
createdAt: "created_at",
updatedAt: "server_date_modified",
// schema: 'public',
tableName: "documents",
},
);
// // relations
// Dataset.hasMany(Title, {
// as: "titles",
// foreignKey: "document_id"
// });
// Title.belongsTo(Dataset, {
// foreignKey: "document_id",
// as: "dataset",
// });
export default Dataset;

44
src/models/file.model.js Normal file
View file

@ -0,0 +1,44 @@
import Sequelize from "sequelize";
import sequelizeConnection from "../config/db.config";
const File = sequelizeConnection.define(
"document_files",
{
path_name: {
type: Sequelize.STRING,
allowNull: false,
length: 100,
},
label: Sequelize.STRING(100),
comment: Sequelize.STRING(255),
mime_type: Sequelize.STRING(255),
language: Sequelize.STRING(3),
file_size: {
type: Sequelize.BIGINT,
allowNull: false,
},
visible_in_frontdoor: {
type: Sequelize.BOOLEAN,
allowNull: false,
defaultValue: true,
},
visible_in_oai: {
type: Sequelize.BOOLEAN,
allowNull: false,
defaultValue: true,
},
sort_order: {
type: Sequelize.INTEGER,
allowNull: false,
},
created_at: Sequelize.DATE,
updated_at: Sequelize.DATE,
},
{
createdAt: "created_at",
updatedAt: "updated_at",
tableName: "document_files",
},
);
export default File;

210
src/models/init-models.js Normal file
View file

@ -0,0 +1,210 @@
// import Sequelize from 'sequelize'
import sequelizeConnection from "../config/db.config";
import Dataset from "./Dataset";
import Title from "./title.model.js";
import Abstract from "./abstract.model.js";
import Coverage from "./coverage.model.js";
import License from "./license.model.js";
import User from "./user.model.js";
// import Person from "./person.model.js";
import Person from "./Person";
import { Sequelize } from "sequelize";
import Subject from "./subject.model.js";
import Project from "./Project";
import File from "./file.model.js";
import Identifier from "./Identifier";
import DocumentXmlCache from "./DocumentXmlCache";
const dbContext = initModels();
export { Dataset, Title, Abstract, User, Person, Subject, Coverage, License, Project, Identifier, DocumentXmlCache };
export default dbContext;
export function initModels() {
// title identifier
Dataset.hasOne(Identifier, {
as: "identifier",
foreignKey: "dataset_id",
});
Identifier.belongsTo(Dataset, {
foreignKey: "dataset_id",
as: "dataset",
});
// title relations
Dataset.hasMany(Title, {
as: "titles",
foreignKey: "document_id",
});
Title.belongsTo(Dataset, {
foreignKey: "document_id",
as: "dataset",
});
// abstract relations
Dataset.hasMany(Abstract, {
as: "abstracts",
foreignKey: "document_id",
});
Abstract.belongsTo(Dataset, {
foreignKey: "document_id",
as: "dataset",
});
//user relations
User.hasMany(Dataset, {
foreignKey: "account_id",
as: "datasets",
});
Dataset.belongsTo(User, {
foreignKey: "account_id",
as: "user",
});
// authors and contributors
const DocumentPersons = sequelizeConnection.define(
"link_documents_persons",
{
person_id: { type: Sequelize.INTEGER, primaryKey: true },
document_id: { type: Sequelize.INTEGER, primaryKey: true },
// role: { type: Sequelize.STRING(255), defaultValue: "other", primaryKey: true},
role: {
type: Sequelize.ENUM("author", "contributor", "other"),
// scopes: {
// author: ["author"],
// contributor: ["contributor"],
// },
},
sort_order: Sequelize.INTEGER,
allow_email_contact: Sequelize.BOOLEAN,
contributor_type: Sequelize.STRING(255),
},
{
// schema: 'public',
tableName: "link_documents_persons",
timestamps: false,
},
);
// relation authors
Dataset.belongsToMany(Person, {
through: {
model: DocumentPersons,
// scope: { role: "author" },
},
as: "authors",
foreignKey: "document_id",
});
Person.belongsToMany(Dataset, {
through: {
model: DocumentPersons,
},
foreignKey: "person_id",
as: "a_datasets",
});
// relation contributors
Dataset.belongsToMany(Person, {
through: {
model: DocumentPersons,
},
as: "contributors",
foreignKey: "document_id",
});
Person.belongsToMany(Dataset, {
through: {
model: DocumentPersons,
},
foreignKey: "person_id",
as: "c_datasets",
});
//subjects
const DatasetSubject = sequelizeConnection.define(
"link_dataset_subjects",
{},
{
tableName: "link_dataset_subjects",
timestamps: false,
},
);
Dataset.belongsToMany(Subject, {
through: DatasetSubject,
as: "subjects",
foreignKey: "document_id",
});
Subject.belongsToMany(Dataset, {
through: DatasetSubject,
foreignKey: "subject_id",
as: "datasets",
});
// coverage
Dataset.hasOne(Coverage, {
as: "coverage",
foreignKey: "dataset_id",
});
Coverage.belongsTo(Dataset, {
foreignKey: "dataset_id",
as: "dataset",
});
// xmlCache
Dataset.hasOne(DocumentXmlCache, {
as: "xmlCache",
foreignKey: "document_id",
});
// Coverage.belongsTo(Dataset, {
// foreignKey: "dataset_id",
// as: "dataset",
// });
//licences
const DatasetLicense = sequelizeConnection.define(
"link_documents_licences",
{},
{
tableName: "link_documents_licences",
timestamps: false,
},
);
Dataset.belongsToMany(License, {
through: DatasetLicense,
as: "licenses",
foreignKey: "document_id",
});
License.belongsToMany(Dataset, {
through: DatasetLicense,
foreignKey: "licence_id",
as: "datasets",
});
//project relations
Project.hasMany(Dataset, {
foreignKey: "project_id",
as: "datasets",
});
Dataset.belongsTo(Project, {
foreignKey: "project_id",
as: "project",
});
//file relations
// title relations
Dataset.hasMany(File, {
as: "files",
foreignKey: "document_id",
});
File.belongsTo(Dataset, {
foreignKey: "document_id",
as: "dataset",
});
return {
Dataset: Dataset,
Title: Title,
Coverage: Coverage,
Subject: Subject,
License: License,
};
}

View file

@ -0,0 +1,51 @@
import Sequelize from "sequelize";
import sequelizeConnection from "../config/db.config";
// module.exports = (sequelize, DataTypes) => {
const License = sequelizeConnection.define(
"coverage",
{
active: {
type: Sequelize.BOOLEAN,
allowNull: false,
defaultValue: true,
},
comment_internal: Sequelize.STRING,
desc_markup: Sequelize.STRING,
desc_text: Sequelize.STRING,
language: Sequelize.STRING,
link_licence: {
type: Sequelize.STRING,
allowNull: false,
},
link_logo: Sequelize.STRING,
link_sign: Sequelize.STRING,
mime_type: Sequelize.STRING,
name_long: {
type: Sequelize.STRING,
allowNull: false,
},
name: {
type: Sequelize.STRING,
allowNull: false,
},
pod_allowed: {
type: Sequelize.BOOLEAN,
allowNull: false,
defaultValue: false,
},
sort_order: {
type: Sequelize.BOOLEAN,
allowNull: false,
defaultValue: false,
},
},
{
timestamps: false,
tableName: "document_licences",
},
);
export default License;
// };

View file

@ -0,0 +1,20 @@
import Sequelize from "sequelize";
import sequelizeConnection from "../config/db.config";
const Person = sequelizeConnection.define(
"persons",
{
email: {
type: Sequelize.STRING,
allowNull: false,
length: 255,
},
first_name: Sequelize.STRING(255),
last_name: Sequelize.STRING(255),
},
{
timestamps: false,
tableName: "persons",
},
);
export default Person;

View file

@ -0,0 +1,33 @@
import Sequelize from "sequelize";
import sequelizeConnection from "../config/db.config";
const Subject = sequelizeConnection.define(
"dataset_subjects",
{
type: {
type: Sequelize.STRING,
allowNull: false,
length: 255,
},
value: {
type: Sequelize.STRING,
allowNull: false,
length: 255,
},
external_key: {
type: Sequelize.STRING,
allowNull: true,
length: 255,
},
language: Sequelize.STRING(3),
created_at: Sequelize.DATE,
updated_at: Sequelize.DATE,
},
{
createdAt: "created_at",
updatedAt: "updated_at",
tableName: "dataset_subjects",
},
);
export default Subject;

26
src/models/title.model.js Normal file
View file

@ -0,0 +1,26 @@
import Sequelize from "sequelize";
import sequelizeConnection from "../config/db.config";
// module.exports = (sequelize, Sequelize) => {
const Title = sequelizeConnection.define(
"dataset_titles",
{
type: Sequelize.STRING,
value: Sequelize.STRING,
language: Sequelize.STRING,
},
{
// schema: 'public',
timestamps: false,
tableName: "dataset_titles",
},
);
// Title.belongsTo(Dataset, {
// foreignKey: "document_id",
// as: "dataset",
// });
export default Title;
// return Title;
// };

39
src/models/user.model.js Normal file
View file

@ -0,0 +1,39 @@
import Sequelize from "sequelize";
import sequelizeConnection from "../config/db.config";
const User = sequelizeConnection.define(
"accounts",
{
login: {
type: Sequelize.STRING,
allowNull: false,
length: 20,
},
password: {
type: Sequelize.STRING,
allowNull: false,
length: 60,
},
email: {
type: Sequelize.STRING,
allowNull: false,
length: 255,
},
first_name: Sequelize.STRING(255),
last_name: Sequelize.STRING(255),
remember_token: Sequelize.STRING(255),
created_at: Sequelize.DATE,
updated_at: Sequelize.DATE,
},
{
defaultScope: {
attributes: {
exclude: ["password", "remember_token"],
},
},
createdAt: "created_at",
updatedAt: "updated_at",
tableName: "accounts",
},
);
export default User;

View file

@ -0,0 +1,38 @@
// module.exports = app => {
// const datasetController = require("../controllers/dataset.controller.js");
// var router = require("express").Router();
// // // Create a new Dataset
// // router.post("/", datasetController.create);
// // Retrieve all Tutorials
// router.get("/", datasetController.findAll);
// // // Retrieve all published Dataset
// // router.get("/published", tutorials.findAllPublished);
// // Retrieve a single Dataset with publish_id
// router.get("/:publish_id", datasetController.findOne);
// app.use('/api/dataset', router);
// };
import { Router } from "express";
// import "@babel/polyfill"
import { findAll, findOne } from "../controllers/dataset.controller";
const router = new Router();
// Retrieve all Tutorials
router.get("/", findAll);
// router.get("/years/", findYears);
// Retrieve a single Dataset with publish_id
router.get("/:publish_id", findOne);
// router.post('/getUsers', getUsers);
// router.post('/getUser', getUser);
// router.post('/create', createUser);
// router.delete('/removeUser', deleteUser);
// router.put('/updateUser', updateUsers);
export default router;

12
src/routes/home.routes.js Normal file
View file

@ -0,0 +1,12 @@
import { Router } from "express";
// import "@babel/polyfill"
import { findYears, findDocumentsPerYear } from "../controllers/home.controller.js";
const router = Router();
// Retrieve all Tutorials
// router.get("/", findAll);
router.get("/years", findYears);
router.get("/sitelinks/:year", findDocumentsPerYear);
export default router;

158
src/server.ts Normal file
View file

@ -0,0 +1,158 @@
// const express = require('express');
// // const bodyParser = require('body-parser');
// // const cors = require('cors');
// // https://www.bezkoder.com/node-express-sequelize-postgresql/
// // Next, we'll want to instantiate the Express app:
// const app = express();
// app.use(express.static(__dirname + '/client'));
// https://github.com/ivan-shaban/nodemon-babel-preset-typescript/blob/master/README.md
// https://github.com/microsoft/TypeScript-Babel-Starter
import "core-js/stable";
import "regenerator-runtime/runtime";
import { App } from "./app";
const app: App = new App();
app.start();
// import express, { Express, Request, Response } from 'express';
// import bodyParser from 'body-parser';
// //Importing Routes
// import DatasetRoutes from './routes/dataset.routes.js';
// import HomeRoutes from './routes/home.routes.js';
// const app: Express = express();
//middlewares
// app.all('*', function(req, res, next) {
// res.setHeader("Access-Control-Allow-Origin", "*");
// res.header("Access-Control-Allow-Methods", "POST, PUT, OPTIONS, DELETE, GET");
// res.header("Access-Control-Max-Age", "3600");
// res.header("Access-Control-Allow-Headers", "Content-Type, Access-Control-Allow-Headers, Authorization, X-Requested-With, x-access-token");
// next();
// });
// app.use(bodyParser.json({limit: '100mb'}));
// app.use(bodyParser.urlencoded({limit: '50mb', extended: true}));
// app.use(bodyParser.json({type: 'application/vnd.api+json'}));
// //routes
// app.use('/api/dataset', DatasetRoutes);
// app.use('/api/', HomeRoutes);
// Where we will keep books
// const books = [
// {
// isbn: "9781593275846",
// title: "Eloquent JavaScript, Second Edition",
// author: "Marijn Haverbeke",
// publish_date: "2014-12-14",
// publisher: "No Starch Press",
// numOfPages: 472,
// },
// {
// isbn: "9781449331818",
// title: "Learning JavaScript Design Patterns",
// author: "Addy Osmani",
// publish_date: "2012-07-01",
// publisher: "O'Reilly Media",
// numOfPages: 254,
// },
// {
// isbn: "9781449365035",
// title: "Speaking JavaScript",
// author: "Axel Rauschmayer",
// publish_date: "2014-02-01",
// publisher: "O'Reilly Media",
// numOfPages: 460,
// },
// ];
// load middlewar:
// app.use(cors());
// Configuring body parser middleware
// app.use(bodyParser.urlencoded({ extended: false }));
// app.use(bodyParser.json());
// app.use(express.json());
// const db = require("./models");
// db.sequelize.sync()
// .then(() => {
// console.log("Synced db.");
// })
// .catch((err) => {
// console.log("Failed to sync db: " + err.message);
// });
// app.post('/book', (req, res) => {
// const book = req.body;
// // Output the book to the console for debugging
// console.log(book);
// books.push(book);
// res.send('Book is added to the database');
// });
// app.get('/books', async (req, res) => {
// res.json(books);
// // const allDogs = await Dataset.findAll();
// // return res.status(200).json(allDogs);
// });
// app.get('/book/:isbn', (req, res) => {
// // reading isbn from the URL
// const isbn = req.params.isbn;
// // searching books for the isbn
// for (let book of books) {
// if (book.isbn === isbn) {
// res.json(book);
// return;
// }
// }
// // sending 404 when not found something is a good practice
// res.status(404).send('Book not found');
// });
// app.post('/book/:isbn', (req, res) => {
// // reading isbn from the URL
// const isbn = req.params.isbn;
// const newBook = req.body;
// // remove item from the books array
// for (let i = 0; i < books.length; i++) {
// let book = books[i]
// if (book.isbn === isbn) {
// books[i] = newBook;
// }
// }
// // sending 404 when not found something is a good practice
// res.send('Book is edited');
// });
// // ow, we can create a simple GET endpoint right beneath the boilerplate.
// // We'd like to set it to be on the home page, so the URL for the endpoint is /:
// app.get('/', (request, response) => {
// // response.send('Hello World, from express');
// response.sendFile('/home/administrator/api/new-book.html');
// });
// app.get('/booklist', (request, response) => {
// // response.send('Hello World, from express');
// response.sendFile('/home/administrator/api/client/book-list.html');
// });
// // require("./routes/dataset.routes")(app);
// // set port, listen for requests
// const port = process.env.PORT || 3000;
// app.set('port', port);
// // At this point, let's start our clients:
// app.listen(app.get('port'), function () {
// console.log('Express server listening on port ' + port);
// });
// export default app;