- feat: Enhance README with setup instructions, usage, and command documentation
- fix: Update API routes to include DOI URL handling and improve route organization - chore: Add ORCID preload rule file and ensure proper registration - docs: Add MIT License to the project for open-source compliance - feat: Implement command to detect and fix missing dataset cross-references - feat: Create command for updating DataCite DOI records with detailed logging and error handling - docs: Add comprehensive documentation for dataset indexing command - docs: Create detailed documentation for DataCite update command with usage examples and error handling
This commit is contained in:
parent
8f67839f93
commit
c049b22723
11 changed files with 2187 additions and 555 deletions
|
|
@ -1,23 +1,35 @@
|
|||
import type { HttpContext } from '@adonisjs/core/http';
|
||||
// import Person from 'App/Models/Person';
|
||||
import Dataset from '#models/dataset';
|
||||
import { StatusCodes } from 'http-status-codes';
|
||||
|
||||
// node ace make:controller Author
|
||||
export default class DatasetController {
|
||||
public async index({}: HttpContext) {
|
||||
// Select datasets with server_state 'published' or 'deleted' and sort by the last published date
|
||||
const datasets = await Dataset.query()
|
||||
.where(function (query) {
|
||||
query.where('server_state', 'published').orWhere('server_state', 'deleted');
|
||||
})
|
||||
.preload('titles')
|
||||
.preload('identifier')
|
||||
.orderBy('server_date_published', 'desc');
|
||||
/**
|
||||
* GET /api/datasets
|
||||
* Find all published datasets
|
||||
*/
|
||||
public async index({ response }: HttpContext) {
|
||||
try {
|
||||
const datasets = await Dataset.query()
|
||||
.where(function (query) {
|
||||
query.where('server_state', 'published').orWhere('server_state', 'deleted');
|
||||
})
|
||||
.preload('titles')
|
||||
.preload('identifier')
|
||||
.orderBy('server_date_published', 'desc');
|
||||
|
||||
return datasets;
|
||||
return response.status(StatusCodes.OK).json(datasets);
|
||||
} catch (error) {
|
||||
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
|
||||
message: error.message || 'Some error occurred while retrieving datasets.',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/dataset
|
||||
* Find all published datasets
|
||||
*/
|
||||
public async findAll({ response }: HttpContext) {
|
||||
try {
|
||||
const datasets = await Dataset.query()
|
||||
|
|
@ -33,48 +45,142 @@ export default class DatasetController {
|
|||
}
|
||||
}
|
||||
|
||||
public async findOne({ params }: HttpContext) {
|
||||
const datasets = await Dataset.query()
|
||||
.where('publish_id', params.publish_id)
|
||||
.preload('titles')
|
||||
.preload('descriptions')
|
||||
.preload('user', (builder) => {
|
||||
builder.select(['id', 'firstName', 'lastName', 'avatar', 'login']);
|
||||
})
|
||||
.preload('authors', (builder) => {
|
||||
builder
|
||||
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
|
||||
.withCount('datasets', (query) => {
|
||||
query.as('datasets_count');
|
||||
})
|
||||
.pivotColumns(['role', 'sort_order'])
|
||||
.orderBy('pivot_sort_order', 'asc');
|
||||
})
|
||||
.preload('contributors', (builder) => {
|
||||
builder
|
||||
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
|
||||
.withCount('datasets', (query) => {
|
||||
query.as('datasets_count');
|
||||
})
|
||||
.pivotColumns(['role', 'sort_order', 'contributor_type'])
|
||||
.orderBy('pivot_sort_order', 'asc');
|
||||
})
|
||||
.preload('subjects')
|
||||
.preload('coverage')
|
||||
.preload('licenses')
|
||||
.preload('references')
|
||||
.preload('project')
|
||||
.preload('referenced_by', (builder) => {
|
||||
builder.preload('dataset', (builder) => {
|
||||
builder.preload('identifier');
|
||||
});
|
||||
})
|
||||
.preload('files', (builder) => {
|
||||
builder.preload('hashvalues');
|
||||
})
|
||||
.preload('identifier')
|
||||
.firstOrFail();
|
||||
/**
|
||||
* GET /api/dataset/:publish_id
|
||||
* Find one dataset by publish_id
|
||||
*/
|
||||
public async findOne({ response, params }: HttpContext) {
|
||||
try {
|
||||
const dataset = await Dataset.query()
|
||||
.where('publish_id', params.publish_id)
|
||||
.preload('titles')
|
||||
.preload('descriptions') // Using 'descriptions' instead of 'abstracts'
|
||||
.preload('user', (builder) => {
|
||||
builder.select(['id', 'firstName', 'lastName', 'avatar', 'login']);
|
||||
})
|
||||
.preload('authors', (builder) => {
|
||||
builder
|
||||
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
|
||||
.withCount('datasets', (query) => {
|
||||
query.as('datasets_count');
|
||||
})
|
||||
.pivotColumns(['role', 'sort_order'])
|
||||
.orderBy('pivot_sort_order', 'asc');
|
||||
})
|
||||
.preload('contributors', (builder) => {
|
||||
builder
|
||||
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
|
||||
.withCount('datasets', (query) => {
|
||||
query.as('datasets_count');
|
||||
})
|
||||
.pivotColumns(['role', 'sort_order', 'contributor_type'])
|
||||
.orderBy('pivot_sort_order', 'asc');
|
||||
})
|
||||
.preload('subjects')
|
||||
.preload('coverage')
|
||||
.preload('licenses')
|
||||
.preload('references')
|
||||
.preload('project')
|
||||
.preload('referenced_by', (builder) => {
|
||||
builder.preload('dataset', (builder) => {
|
||||
builder.preload('identifier');
|
||||
});
|
||||
})
|
||||
.preload('files', (builder) => {
|
||||
builder.preload('hashvalues');
|
||||
})
|
||||
.preload('identifier')
|
||||
.first(); // Use first() instead of firstOrFail() to handle not found gracefully
|
||||
|
||||
return datasets;
|
||||
if (!dataset) {
|
||||
return response.status(StatusCodes.NOT_FOUND).json({
|
||||
message: `Cannot find Dataset with publish_id=${params.publish_id}.`,
|
||||
});
|
||||
}
|
||||
|
||||
return response.status(StatusCodes.OK).json(dataset);
|
||||
} catch (error) {
|
||||
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
|
||||
message: error.message || `Error retrieving Dataset with publish_id=${params.publish_id}.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /:prefix/:value
|
||||
* Find dataset by identifier (e.g., https://doi.tethys.at/10.24341/tethys.99.2)
|
||||
*/
|
||||
public async findByIdentifier({ response, params }: HttpContext) {
|
||||
const identifierValue = `${params.prefix}/${params.value}`;
|
||||
|
||||
// Optional: Validate DOI format
|
||||
if (!identifierValue.match(/^10\.\d+\/[a-zA-Z0-9._-]+\.[0-9]+(?:\.[0-9]+)*$/)) {
|
||||
return response.status(StatusCodes.BAD_REQUEST).json({
|
||||
message: `Invalid DOI format: ${identifierValue}`,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
// Method 1: Using subquery with whereIn (most similar to your original)
|
||||
const dataset = await Dataset.query()
|
||||
// .whereIn('id', (subQuery) => {
|
||||
// subQuery.select('dataset_id').from('dataset_identifiers').where('value', identifierValue);
|
||||
// })
|
||||
.whereHas('identifier', (builder) => {
|
||||
builder.where('value', identifierValue);
|
||||
})
|
||||
.preload('titles')
|
||||
.preload('descriptions') // Using 'descriptions' instead of 'abstracts'
|
||||
.preload('user', (builder) => {
|
||||
builder.select(['id', 'firstName', 'lastName', 'avatar', 'login']);
|
||||
})
|
||||
.preload('authors', (builder) => {
|
||||
builder
|
||||
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
|
||||
.withCount('datasets', (query) => {
|
||||
query.as('datasets_count');
|
||||
})
|
||||
.pivotColumns(['role', 'sort_order'])
|
||||
.wherePivot('role', 'author')
|
||||
.orderBy('pivot_sort_order', 'asc');
|
||||
})
|
||||
.preload('contributors', (builder) => {
|
||||
builder
|
||||
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
|
||||
.withCount('datasets', (query) => {
|
||||
query.as('datasets_count');
|
||||
})
|
||||
.pivotColumns(['role', 'sort_order', 'contributor_type'])
|
||||
.wherePivot('role', 'contributor')
|
||||
.orderBy('pivot_sort_order', 'asc');
|
||||
})
|
||||
.preload('subjects')
|
||||
.preload('coverage')
|
||||
.preload('licenses')
|
||||
.preload('references')
|
||||
.preload('project')
|
||||
.preload('referenced_by', (builder) => {
|
||||
builder.preload('dataset', (builder) => {
|
||||
builder.preload('identifier');
|
||||
});
|
||||
})
|
||||
.preload('files', (builder) => {
|
||||
builder.preload('hashvalues');
|
||||
})
|
||||
.preload('identifier')
|
||||
.first();
|
||||
|
||||
if (!dataset) {
|
||||
return response.status(StatusCodes.NOT_FOUND).json({
|
||||
message: `Cannot find Dataset with identifier=${identifierValue}.`,
|
||||
});
|
||||
}
|
||||
|
||||
return response.status(StatusCodes.OK).json(dataset);
|
||||
} catch (error) {
|
||||
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
|
||||
message: error.message || `Error retrieving Dataset with identifier=${identifierValue}.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue