Compare commits

...
Sign in to create a new pull request.

96 commits

Author SHA1 Message Date
6b04ad9910 Release: merge develop for v2.1.0 2025-11-13 11:42:59 +01:00
6457d233e7 Merge branch 'fix/table-persons' into develop
fix: Enhance dataset editing by ensuring user authentication and ownership checks across multiple controllers
2025-11-13 11:30:58 +01:00
d44d08abcd fix: Enhance dataset controllers with user authentication checks and improve mail configuration 2025-11-13 11:02:00 +01:00
38c05f6714 fix: Update TablePersons components for improved event handling and layout consistency 2025-11-05 15:38:28 +01:00
e8a34379f3 Merge branch 'fix/datset-listing-ux' into develop 2025-11-05 13:22:07 +01:00
4229001572 Enhance Map Zoom Control and Improve Map Page Layout
- Refactored zoom control component for better accessibility and styling.
- Added hover effects and improved button states for zoom in/out buttons.
- Updated map page layout with enhanced dataset card design and responsive styles.
- Introduced empty state for no datasets found and improved results header.
- Added icons for dataset cards and improved author display.
2025-11-05 13:15:23 +01:00
88e37bfee8 feat: Enhance Dataset Index with Dynamic Legend and Improved State Management for submitter, editor and reviewer
- Added a collapsible legend to display dataset states and available actions.
- Implemented localStorage persistence for legend visibility.
- Refactored dataset state handling with dynamic classes and labels.
- Improved table layout and styling for better user experience.
- Updated Tailwind CSS configuration to define new background colors for dataset states.
2025-10-30 14:42:36 +01:00
a4e6f88e07 fix: Update TablePersons component with improved layout and name type handling also for organizations 2025-10-29 14:36:05 +01:00
39f1bcee46 Merge branch 'feat/create-projects' into develop 2025-10-29 11:25:12 +01:00
3d8f2354cb feat: Enhance Dataset Edit Page with Unsaved Changes Indicator and Improved Structure
- Added a progress indicator for unsaved changes at the top of the dataset edit page.
- Enhanced the title section with a dataset status badge and improved layout.
- Introduced collapsible sections for better organization of form fields.
- Improved notifications for success/error messages.
- Refactored form fields into distinct sections: Basic Information, Licenses, Titles, Descriptions, Creators & Contributors, Additional Metadata, Geographic Coverage, and Files.
- Enhanced loading spinner with a more visually appealing overlay.
- Added new project validation logic in the backend with create and update validators.
2025-10-29 11:20:27 +01:00
f39fe75340 feat: Implement project management functionality with CRUD operations and UI integration
feat: Implement project management functionality with CRUD operations and UI integration
- added projects_controller.ts for crud operations-
added views Edit-vue , Index.vue and Create.vue
- small adaptions in menu.ts
additional routes is start/routes.ts for projects
2025-10-16 15:37:55 +02:00
04269ce9cf - fix: Update TablePersons component for improved UI and functionality;
- refactor file scan options
2025-10-16 12:04:46 +02:00
5e424803ed Merge branch 'feat/admin-diagnose-overview' into develop 2025-10-14 12:30:15 +02:00
b5bbe26ec2 feat: Enhance background job settings UI and functionality
- Updated BackgroundJob.vue to improve the display of background job statuses, including missing cross-references and current job mode.
- Added auto-refresh functionality for background job status.
- Introduced success toast notifications for successful status refreshes.
- Modified the XML serialization process in DatasetXmlSerializer for better caching and performance.
- Implemented a new RuleProvider for managing custom validation rules.
- Improved error handling in routes for loading background job settings.
- Enhanced ClamScan configuration with socket support for virus scanning.
- Refactored dayjs utility to streamline locale management.
2025-10-14 12:19:09 +02:00
6757bdb77c feat: Enhance ClamAV Docker entrypoint and configuration
- Updated docker-entrypoint.sh to improve ClamAV service initialization and logging.
- Added checks for ClamAV and freshclam daemon status.
- Optimized freshclam configuration for container usage, including logging to stdout and setting database directory.
- Introduced caching mechanism for enabled file extensions in vinejs_provider.ts to reduce database queries.
- Implemented a new command to list datasets needing DataCite DOI updates, with options for verbose output, count only, and IDs only.
- Updated package dependencies to include p-limit and pino-pretty.
- finalized ace command 'detect:missing-cross-references'
2025-09-26 12:19:35 +02:00
4c8cce27da feat: Update form field labels from "Main Title Language*" to "Main Description Language*" for clarity 2025-09-19 17:21:21 +02:00
2f079e6fdd feat: Add optional ORCID identifier to dataset validation also for the subitter
npm updates
2025-09-19 16:26:01 +02:00
c049b22723 - feat: Enhance README with setup instructions, usage, and command documentation
- fix: Update API routes to include DOI URL handling and improve route organization

- chore: Add ORCID preload rule file and ensure proper registration

- docs: Add MIT License to the project for open-source compliance

- feat: Implement command to detect and fix missing dataset cross-references

- feat: Create command for updating DataCite DOI records with detailed logging and error handling

- docs: Add comprehensive documentation for dataset indexing command

- docs: Create detailed documentation for DataCite update command with usage examples and error handling
2025-09-19 14:35:23 +02:00
8f67839f93 hot-fix: Add ORCID validation and improve dataset editing UX
### Major Features
- Add comprehensive ORCID validation with checksum verification
- Implement unsaved changes detection and auto-save functionality
- Enhanced form component reactivity and state management

### ORCID Implementation
- Create custom VineJS ORCID validation rule with MOD-11-2 algorithm
- Add ORCID fields to Person model and TablePersons component
- Update dataset validators to include ORCID validation
- Add descriptive placeholder text for ORCID input fields

### UI/UX Improvements
- Add UnsavedChangesWarning component with detailed change tracking
- Improve FormCheckRadio and FormCheckRadioGroup reactivity
- Enhanced BaseButton with proper disabled state handling
- Better error handling and user feedback in file validation

### Data Management
- Implement sophisticated change detection for all dataset fields
- Add proper handling of array ordering for authors/contributors
- Improve license selection with better state management
- Enhanced subject/keyword processing with duplicate detection

### Technical Improvements
- Optimize search indexing with conditional updates based on modification dates
- Update person model column mapping for ORCID
- Improve validation error messages and user guidance
- Better handling of file uploads and deletion tracking

### Dependencies
- Update various npm packages (AWS SDK, Babel, Vite, etc.)
- Add baseline-browser-mapping for better browser compatibility

### Bug Fixes
- Fix form reactivity issues with checkbox/radio groups
- Improve error handling in file validation rules
- Better handling of edge cases in change detection
2025-09-15 14:07:59 +02:00
06ed2f3625 feat: Enhance Person data structure and improve TablePersons component
- Updated Person interface to include first_name and last_name fields for better clarity and organization handling.
- Modified TablePersons.vue to support new fields, including improved pagination and drag-and-drop functionality.
- Added loading states and error handling for form controls within the table.
- Enhanced the visual layout of the table with responsive design adjustments.
- Updated solr.xslt to correctly reference ServerDateModified and EmbargoDate attributes.
- updated AvatarController
- improved download method for editor, and reviewer
- improved security for officlial download file file API: filterd by server_state
2025-09-08 12:28:26 +02:00
e1ccf0ddc8 hotfix(dataset): enhance file download with embargo validation and improve API data handling
- Add embargo date validation to file download process with date-only comparison
- Require first_name for authors/contributors only when name_type is 'Personal'
- Remove sensitive personal data from dataset API responses
- Improve dataset validation logic for better data integrity
2025-09-03 12:48:44 +02:00
fbc34a7456 Merge branch 'develop'
nerge latest hotfix updates from develop branch into master branch
2025-07-03 10:44:39 +02:00
89d91d5e12 hotfix: improve mimetype creation and dashboard data loading
- Added a NotificationBar component to display flash messages on the Mimetype creation page.
- Modified FormCheckRadioGroup to handle both numeric and string keys for input values.
- Removed unused code and API calls from Dashboard.vue and moved API calls to the component level where they are used.
- Added authentication middleware to the 'clients' and 'authors' API routes in `start/routes/api.ts`.
- Updated the file download route in `start/routes/api.ts` to include "file" in the path.
- Corrected the validation message key for file extension minLength in MimetypeController.ts.
- Updated favicon path in `resources/views/app.edge`.
- Added argon2 dependency in `package.json`.
2025-07-03 10:17:19 +02:00
0bf442be96 hotfix(dataset): improve dataset classification UI and data handling
- Updated the Dataset and Submitter Category Vue components to enhance the UI for library classification. The collection names and numbers are now displayed with distinct styling using `span` elements with specific classes for better readability.
- Modified the DatasetController and Editor/DatasetController to filter collection roles by 'ddc' and 'ccs' names when preloading collections, improving data retrieval efficiency.
- Added `left_id` and `right_id` columns to the `collections` table in the `dataset_7_collections.ts` migration file.
- Added a migration to reorder the collection_roles table.
2025-05-06 17:43:37 +02:00
be6b38d0a3 hotfix (dataset): implement reject to reviewer functionality for editors
- Added "rejected_to_reviewer" state to the `ServerStates` enum.
- Implemented routes and controller actions (`rejectToReviewer`, `rejectToReviewerUpdate`) for editors to reject datasets back to reviewers with a rejection note.
- Added UI elements (button) in the editor dataset index and publish views to trigger the "reject to reviewer" action.
- Updated the reviewer dataset index view to display datasets in the "rejected_to_reviewer" state and show the editor's rejection note.
- Modified the reviewer dataset review page to allow reviewers to view and accept datasets that have been rejected back to them by editors.
- Updated the database migration to include the "rejected_to_reviewer" state in the `documents_server_state_check` constraint.
- Updated dependencies (pinia, redis).
2025-05-02 14:35:58 +02:00
c245c8e97d hotfix (dataset): improve dataset classification and review workflow
- Renamed "Collections" to "Classify" in dataset category views for submitters and editors to better reflect the page's purpose.
- Modified the `SectionTitleLineWithButton` component to conditionally render the cog button based on the `showCogButton` prop.
- Updated the Dataset Edit and Create views to use `textarea` instead of `text` for title and description input fields, allowing for multi-line text.
- Added authorization checks for dataset review and reject actions in the `Reviewer/DatasetController`, and passed the `can` object to the `Review` view.
- Added a "Reject" button to the dataset review page, visible only to users with the `dataset-review-reject` permission and when the dataset is in the 'approved' state.
- Improved the display of dataset information in index views by adding dark mode styling to table headers.
- Removed unused code and comments from the Dashboard.vue file.
- Removed the `show-header-icon` property from the CardBox component in the Create.vue file.
- Updated dependencies
2025-04-24 18:25:07 +02:00
c3ae4327b7 hotfix (dataset): enhance dataset editing and validation
- Modified the TableKeywords component to remove the external_key reset when the type is updated, only resetting the value.
- Updated the DatasetController to pass authorization checks (`can.edit`, `can.delete`) to the edit view.
- Updated the arrayContainsTypes validation rule to improve the error messages for titles and descriptions, clarifying the requirements for main and translated entries.
- Updated the Dataset Edit view to:
  - Remove unused code and comments.
  - Add authorization checks to the save button.
  - Add a release button.
  - Add icons to the save and release buttons.
  - Add a computed property `hasUnsavedChanges` to determine if there are unsaved changes in the form.
2025-04-18 11:39:19 +02:00
2cb33a779c hotfix(dataset): improve dataset management and UI enhancements
- Added tooltips to display reject notes from editors and reviewers on dataset index pages for submitters and editors.
- Implemented custom ordering for datasets in submitter and editor index views, prioritizing datasets rejected by editors or reviewers.
- Changed "Review" button label to "View" on the reviewer dataset index page.
- Changed "Review" button label to "Accept" on the reviewer dataset review page.
- Added project_id to the dataset model.
- Updated dependencies (vite, @pkgr/core, caniuse-lite, electron-to-chromium, http-proxy-middleware).
- Replaced the static doctypes array with the DatasetTypes enum.
- Updated favicon.
2025-04-16 17:07:45 +02:00
dbd2bf2e9d hotfix(dataset): correct embargo date validation message
- Corrected the embargo date validation message in both the DatasetController and dataset validator to ensure consistency.
- Updated the `embargo_date.date.afterOrEqual` message to dynamically display the correct date (10 days from now) in the desired format.
2025-04-09 15:59:22 +02:00
106f8d5f27 hotfix(dataset): enhance dataset creation and editing forms
- Added functionality to add new authors and contributors directly within the dataset creation and editing forms.
- Implemented `addNewAuthor` and `addNewContributor` methods to dynamically add new person objects to the authors and contributors arrays in the form data.
- Added header icons with click events to the `CardBox` component for authors and contributors sections to trigger the addition of new entries.
- Updated the dataset index views for reviewers and editors to improve the display of dataset titles, including adding a CSS class to truncate long titles.
- Ensured authors and contributors are ordered by `pivot_sort_order` when preloading in the Dataset and Editor controllers.
- Fixed an issue where pressing enter in the `SearchAutocomplete` component would submit the form.
- Updated validation messages to be available in the `updateEditorDatasetValidator`.
2025-04-09 13:00:37 +02:00
f04c1f6327 hotfix: enhance editor dataset management and UI improvements
- Implemented dataset editing functionality for editor roles, including fetching, updating, and categorizing datasets.
- Added routes and controller actions for editing, updating, and categorizing datasets within the editor interface.
- Integrated UI components for managing dataset metadata, subjects, references, and files.
- Enhanced keyword management with features for adding, editing, and deleting keywords, including handling keywords used by multiple datasets.
- Improved reference management with features for adding, editing, and deleting dataset references.
- Added validation for dataset updates using the `updateEditorDatasetValidator`.
- Updated the dataset edit form to include components for managing titles, descriptions, authors, contributors, licenses, coverage, subjects, references, and files.
- Implemented transaction management for dataset updates to ensure data consistency.
- Added a download route for files associated with datasets.
- Improved the UI for displaying and interacting with datasets in the editor index view, including adding edit and categorize buttons.
- Fixed an issue where the file size was not correctly calculated.
- Added a tooltip to the keyword value column in the TableKeywords component to explain the editability of keywords.
- Added a section to display keywords that are marked for deletion.
- Added a section to display references that are marked for deletion.
- Added a restore button to the references to delete section to restore references.
- Updated the SearchCategoryAutocomplete component to support read-only mode.
- Updated the FormControl component to support read-only mode.
- Added icons and styling improvements to various components.
- Added a default value for subjectsToDelete and referencesToDelete in the dataset model.
- Updated the FooterBar component to use the JustboilLogo component.
- Updated the app.ts file to fetch chart data without a year parameter.
- Updated the Login.vue file to invert the logo in dark mode.
- Updated the AccountInfo.vue file to add a Head component.
2025-04-08 14:16:35 +02:00
10d159a57a hotfix(dataset): enhance file upload and update functionality
- Added file upload functionality to the dataset update form.
- Implemented file size validation and aggregated upload limit.
- Added temporary file storage and cleanup to handle large file uploads.
- Added a clear button to the file upload component.
- Added the ability to sort files in the file upload component.
- Fixed an issue where the file upload component was not correctly updating the model value.
- Updated the dataset edit form to use the new file upload component.
- Added the ability to sort files in the file upload component.
- Added a global declaration for the `sort_order` property on the `File` interface.
- Added helper functions for byte size parsing, configuration retrieval, and temporary file path generation.
2025-04-01 13:39:02 +02:00
8fbda9fc64 hotfixfix: enhance FormControl styling for read-only state
- Improved the styling of the `FormControl` component when in a read-only state.
- Added specific styles for read-only fields, including a grayed-out background, a disabled cursor, and removal of the focus ring.
- Updated the border color to match the read-only state.
- Ensured the text color is grayed out in read-only mode.
2025-03-31 17:42:59 +02:00
7bb4bd06cf hotfix: disable username and email fields in profile form
- Disabled the username and email fields in the profile update form.
- Set the `is-read-only` property to `true` for the username and email `FormControl` components.
2025-03-31 15:49:25 +02:00
f89b119b18 hotfix (dashboard): display allow email contact in card box client
- Added the `allowEmailContact` property to the `CardBoxClient` component to display the email contact status.
- Added the `allowEmailContact` computed property to the `Person` model to determine if email contact is allowed based on the related datasets.
- Preloaded the datasets relation in the `AuthorsController` to access the pivot attributes.
- Updated the `Dashboard.vue` to pass the `allowEmailContact` prop to the `CardBoxClient` component.
- Updated the `array_contains_types` validation rule to correct the error message for descriptions.
- Updated the `FormCheckRadio.vue` to correctly handle the radio button and checkbox components.
2025-03-31 15:14:34 +02:00
09f65359f9 hotfix(dataset): enhance radio button and checkbox components and add arrayContainsTypes validation
- Added checkbox support to the `FormCheckRadio` component.
- Updated the styling of the radio button and checkbox components.
- Added the `arrayContainsTypes` validation rule to ensure that arrays contain specific types.
- Updated the `dataset` validators and controllers to use the new validation rule.
- Updated the `FormCheckRadioGroup` component to correctly handle the `input-value` as a number.
- Removed the default value from the `id` column in the `collections` migration.
- Added the `array_contains_types` rule to the `adonisrc.ts` file.
2025-03-28 17:34:46 +01:00
9823364670 hotfix: enhance radio button and file upload components
- Improved the styling and functionality of the radio button component, including a new radio button style.
- Added a loading spinner to the file upload component to indicate when large files are being processed.
- Added the ability to sort files in the file upload component.
- Fixed an issue where the radio button component was not correctly updating the model value.
- Updated the dataset creation and edit forms to use the new radio button component.
- Added a global declaration for the `sort_order` property on the `File` interface.
- Updated the API to filter authors by first and last name.
- Removed the import of `_checkbox-radio-switch.css` as the radio button styling is now handled within the component.
2025-03-27 16:04:23 +01:00
b93e46207f hotfix-feat(dataset): implement file upload with validation and error handling
- Implemented file upload functionality for datasets using multipart requests.
- Added file size and type validation using VineJS.
- Added file name length validation.
- Added file scan to remove infected files.
- Implemented aggregated upload limit to prevent exceeding the server's capacity.
- Added error handling for file upload failures, including temporary file cleanup.
- Updated the `DatasetController` to handle file uploads, validation, and database transactions.
- Updated the `bodyparser.ts` config to process the file upload manually.
- Updated the `api.ts` routes to fetch the statistic data.
- Updated the `main.ts` store to fetch the statistic data.
- Updated the `Dashboard.vue` to display the submitters only for administrator role.
- Updated the `CardBoxWidget.vue` to display the submitters.
- Updated the `ServerError.vue` to use the LayoutGuest.vue.
- Updated the `AuthController.ts` and `start/routes.ts` to handle the database connection errors.
- Updated the `app/exceptions/handler.ts` to handle the database connection errors.
- Updated the `package.json` to use the correct version of the `@adonisjs/bodyparser`.
2025-03-26 14:19:06 +01:00
a25f8bf6f7 hotfix: update dependencies and UI elements
- Updated various npm packages in `package-lock.json` including `@adonisjs/drive`, `@adonisjs/env`, `axios`, `electron-to-chromium`, `nanoid`, `pg`, and `quansync`.
- Removed the GitHub link from the navbar and dashboard.
- Added an OAI interface link to the navbar and menu.
- Removed the "Star on GitHub" button from the dashboard.
- Updated the chart data fetching logic in `HomeController.ts` to calculate the last 4 years dynamically.
- Removed unused imports and commented-out code.
2025-03-20 10:29:34 +01:00
70f016422c hotfix(admin/user): implement password reset and update user password
- Implemented password reset functionality for admin users.
- Updated the user edit and create forms to use a password meter component for password strength validation.
- Modified the `AdminuserController` to handle the new password field and update user passwords.
- Updated the `createUserValidator` and `updateUserValidator` to validate the new password field.
- Updated the password field to `new_password` in the `Edit.vue` and `Create.vue` components.
- Added `showRequiredMessage` prop to `SimplePasswordMeter` component.
- Added conditional rendering for password strength bar in `SimplePasswordMeter` component.
- Added `fieldLabel` prop to `SimplePasswordMeter` component.
- Updated form submission to handle errors and reset password field.
2025-03-19 15:52:37 +01:00
9f5d35f7ba hotfix: enhance dataset creation and modal styling
- Added `@adonisjs/bodyparser` as a dependency.
- Improved the layout and styling of the consent modal in `Create.vue`.
- Enhanced the placeholder text for reference values in `Create.vue`.
- Added a default empty string for the subject in `Create.vue`.
- Updated the styling of the `CardBoxModal` component.
2025-03-19 13:28:02 +01:00
a934626721 hotfix: ensure selected collection is draggable
- Update draggable attribute and class logic so that the selected collection remains draggable
- Preserve proper styling while allowing user interaction with the selected collection
2025-03-18 12:51:19 +01:00
0d259b6464 feat(checkReferenceType): add check reference type feature
Update npm packages and related dependencies
Adapt tailwind.config.js with new utilities and configuration adjustments
Implement categorizeUpdate() method in Submitter/DatasetController.ts for synchronizing dataset collections
Apply style updates in Category.vue for improved drag-and-drop experience and visual cues
Add new route in start/routes.ts for dataset categorization flow
2025-03-17 17:26:29 +01:00
c350e9c373 hotfix: update edit mode of dataset ('ver fogotten to adapt edit mode)
Update DatasetController.ts: use correct moveToDisk method in update method
2025-03-17 12:54:26 +01:00
51a5673a3d hotfix: update @types/leaflet and adjust map styling
Update package.json to bump @types/leaflet
Define leaflet map z-index directly in the main CSS via apps.cc for consistent component use
Scope all SearchMap.vue styles locally
2025-03-17 12:17:47 +01:00
b540547e4c feat: update API controllers, validations, and Vue components
- Modified Api/Authors.Controller.ts to use only personal types and sort by dataset_count.
- Completely rewritten AvatarController.ts.
- Added new Api/CollectionsController.ts for querying collections and collection_roles.
- Modified Api/DatasetController.ts to preload titles, identifier and order by server_date_published.
- Modified FileController.ts to serve files from /storage/app/data/ instead of /storage/app/public.
- Added new Api/UserController for requesting submitters (getSubmitters).
- Improved OaiController.ts with performant DB queries for better ResumptionToken handling.
- Modified Submitter/DatasetController.ts by adding a categorize method for library classification.
- Rewritten ResumptionToken.ts.
- Improved TokenWorkerService.ts to utilize browser fingerprint.
- Edited dataset.ts by adding the doiIdentifier property.
- Enhanced person.ts to improve the fullName property.
- Completely rewritten AsideMenuItem.vue component.
- Updated CarBoxClient.vue to use TypeScript.
- Added new CardBoxDataset.vue for displaying recent datasets on the dashboard.
- Completely rewritten TableSampleClients.vue for the dashboard.
- Completely rewritten UserAvatar.vue.
- Made small layout changes in Dashboard.vue.
- Added new Category.vue for browsing scientific collections.
- Adapted the pinia store in main.ts.
- Added additional routes in start/routes.ts and start/api/routes.ts.
- Improved referenceValidation.ts for better ISBN existence checking.
- NPM dependency updates.
2025-03-14 17:39:58 +01:00
36cd7a757b feat: Integrate official drive_provider, update user profile features & UI improvements
- adonisrc.ts: Load official drive_provider and unload custom driver_provider.
- packages.json: Add @headlessui/vue dependency for tab components.
- AvatarController.ts: Rewrite avatar generation logic to always return the same avatar per user.
- auth/UserController.ts: Add profile and profileUpdate methods to support user profile editing.
- Submitter/datasetController.ts & app/models/file.ts: Adapt code to use the official drive_provider.
- app/models/user.ts: Introduce “isAdmin” getter.
- config/drive.ts: Create new configuration for the official drive_provider.
- providers/vinejs_provider.ts: Adapt allowedExtensions control to use provided options or database enabled extensions.
- resource/js/app.ts: Load default Head and Link components.
- resources/js/menu.ts: Add settings-profile.edit menu point.
- resources/js/Components/action-message.vue: Add new component for improved user feedback after form submissions.
- New avatar-input.vue component: Enable profile picture selection.
- Components/CardBox.vue: Alter layout to optionally show HeaderIcon in title bar.
- FormControl.vue: Define a readonly prop for textareas.
- Improve overall UI with updates to NavBar.vue, UserAvatar.vue, UserAvatarCurrentUser.vue, and add v-model support to password-meter.vue.
- Remove profile editing logic from AccountInfo.vue and introduce new profile components (show.vue, update-password-form.vue, update-profile-information.vue).
- app.edge: Modify page (add @inertiaHead tag) for better meta management.
- routes.ts: Add new routes for editing user profiles.
- General npm updates.
2025-02-27 16:24:25 +01:00
a41b091214 feat: Adjust z-index values for map components, enhance ISBN validation message, and add dynamic placeholders for reference inputs, add additional mimetypes 2025-02-17 16:08:36 +01:00
a3031169ca feat: Add alternate mimetype support, enhance validation for alternate mimetypes, and improve script loading performance
- mime_type.ts: Added a new column `public alternate_mimetype: string;`
- MimetypeController.ts: Extended validation and storage logic to accommodate the new `alternate_mimetype` attribute
- adonisrc.ts: Integrated new validation rule to validate user-provided mimetypes
- vite.ts: Set `defer: true` for script attributes to improve loading performance
- update_1_to_mime_types.ts: Added migration for the new `alternate_mimetype` column in the database
- UI improvements: Updated components such as AsideMenuLayer.vue, FormCheckRadioGroup.vue, MimeTypeInput.vue, NavBar.vue (lime-green background), NavBarMenu.vue, SectionBannerStarOnGitea.vue, Admin/mimetype/Create.vue, Admin/mimetype/Delete.vue, Admin/mimetype/Index.vue
- allowed_extensions_mimetype.ts: Enhanced rule to also check for alternate mimetypes
- referenceValidation.ts: Improved validation to allow only ISBNs with a '-' delimiter
- package-lock.json: Updated npm dependencie
2025-02-13 15:49:09 +01:00
4c5a8f5a42 feat: update to vite.js, Refactor configuration files, remove unused assets, and clean up commented code:
- ace.js: use ts-node-maintained
- adonisrc.ts: load vite_provider, sett assetBundler to false, addd hooks property
- Dockerfile: change to node version 22
- package.json: remove babel depencies; add @swc/wasm, add vitejs/plugin-vue, add hot-hook, add vite,  update eslint-config-prettier, tailwindcss, ts-node-maintained
- new vite.config.js and config/vite.ts
- inertia.js
- improved own vinejs_provider.ts
- adapted app.css needed for vitejs
- adapted app.ts: new resolve method neede for vitejs
relocated resources/js/logo.svg
- remove Buffer import into FileUpload.vue
- Create.vue: improved submit needed for @inertiajs/vue3 form helper
- Edit.vue: mproved submit needed for @inertiajs/vue3 form helper
- kernel.ts: load vite_middleware
- formated rotes.ts file
- rewritten allowed_extensions_mimetypes.ts file (removed typescript errors)
2025-02-07 10:14:57 +01:00
8d47a58d29 feat: Update .gitignore and refine TypeScript configuration; clean up commented code and enhance dataset validation; npm updates
- Updated .gitignore to include new patterns
- Refined TypeScript configuration for better performance and readability
- Cleaned up commented code in several files
- Enhanced dataset validation logic
- Updated npm dependencies to the latest versions
2025-01-29 11:26:21 +01:00
a5e0a36327 feat: Update CI workflow for reference validation tests and add environment variable configurations 2025-01-27 12:20:49 +01:00
c0496be51b - workflow adaptions for new tests 2025-01-24 17:45:58 +01:00
2c4f51be68 feat: Enhance reference validation and add support for Handle URLs
- Updated reference validation to handle various identifier types including DOI, ISBN, ISSN, URN, and Handle.
- Improved regex patterns for DOI and Handle validation to correctly extract and validate identifiers from URLs.
- Added asynchronous checks to verify the existence of DOI and Handle URLs.
- Added asynchronous checks to verify the existence of  ISBNs
- Included detailed comments explaining the regex patterns and validation logic.
- Adjusted the validation logic to handle any URL prefix for Handle identifiers.
- Ensured that the Handle format `handle/20.500.12854/36478` is correctly validated.
- Updated the CI workflow to trigger on push and pull request events.
2025-01-24 17:11:10 +01:00
537c6fd81a feat: Add and refactor MIME type management
- Added BaseModel with fillable attributes and mergeFillableAttributes method
- Refactored MimeType model to extend BaseModel
- Implemented destroy method in MimetypeController for deleting MIME types
- Updated Create.vue component with refactoring and improved type safety
- Fixed issues with ref usage in Create.vue
- Updated routes to include new and refactored endpoints
2025-01-12 15:47:25 +01:00
d1480b1240 feat: enhanced dataset management and UI improvements
- Submitter/DatasetController.ts: improved validations for time_absolute, time_min, and time_max.
- validators/dataset.ts: enhanced validations for time_absolute, time_min, and time_max.
- Added new favicon.ico for better branding.
- Improved password-meter.vue component with clearer hint messages.
- Updated checkStrength.ts: enhanced checkStrength() method for password strength validation.
- submitter/Dataset/Create.vue: added form controls for time_min, time_max, and/or time_absolute fields.
- submitter/Dataset/Edit.vue: introduced a loading spinner during file upload for better UX.
2025-01-08 11:45:03 +01:00
f67b736a88 feat: Enhance dataset management and improve frontend components
- Added preloads 'allowed_extensions_mimetypes' and 'dependent_array_min_length' in adonisrc.ts
- Updated @symfony/webpack-encore from ^4.6.1 to ^5.0.1
- AdminuserController: Implemented pagination for 10 records in index method
- Enabled reviewers to reject datasets to editors with email notifications (DatasetController.ts)
- Submitter DatasetController: Files now loaded in ascending order (sort_order) in edit mode
- file.ts: Removed serialization of fileData due to browser issues
- Modified FileUpload.vue to mark already uploaded files as deleted
- Improved keyword search in SearchCategoryAutocomplete.vue
- Started development on Category.vue for submitters to categorize DDC
- Added new route /dataset/categorize in routes.ts
- Introduced 2 new rules in start/rules: allowed_extensions_mimetypes.ts and dependent_array_min_length.ts
- Performed npm updates
2024-11-29 15:46:26 +01:00
49bd96ee77 feat: enhance user management, mimetype creation, and validation
- **AdminuserController.ts**: enable editing `first_name` and `last_name` for user creation and updates
- **MimetypeController.ts**: add creation support for mimetypes with selectable extensions
- **Models**: add `Mimetype` model (mime_type.ts); add `SnakeCaseNamingStrategy` for User model
- **Validators**:
  - **updateDatasetValidator**: increase title length to 255 and description length to 2500
  - **User Validators**: refine `createUserValidator` and `updateUserValidator` to include `first_name` and `last_name`
- **vanilla_error_reporter**: improve error reporting for wildcard fields
- **SKOS Query**: refine keyword request in `SearchCategoryAutocomplete.vue`
- **UI Enhancements**:
  - improve icon design in wizard (Wizard.vue)
  - add components for mimetype creation (Create.vue and button in Index.vue)
- **Routes**: update `routes.ts` to include new AdonisJS routes
2024-10-31 11:02:36 +01:00
2235f3905a - improved vies and controllers for rejecting datasets with email for reviewer and editor role
- falsh also error via config/inertia.ts
- npm updates
2024-09-26 13:51:35 +02:00
b06ccae603 - added @adonisjs/mail
- mail_settings_controller for setting smtp settings
- added view ror rjecting dataset for editor
- added new model AppConfig for stroing appwide config values
- better validate_chesum.ts command with process chunking
- added vue3 apps 'BasicSettings' like email, profile settings
- started with 2 multilingual capabilities
- npm updates
2024-09-16 17:59:46 +02:00
010bead723 - add password strength meter for creating or editing user passwords
- add public opensearch api host
2024-08-07 14:22:36 +02:00
f4854d70b9 - npm updates
- added migration files for creating db table for 'backupcodes'
2024-07-29 10:43:36 +02:00
49ea0fc967 - small adaption for OpenAire inside OaiController.ts
- validate abstratct for max 2500 characters
- small changes inside page for releasing a dataset
- npm updates
- adapted validating messages
2024-07-26 14:51:57 +02:00
005df2e454 - added backup codes for 2 factor authentication
- npm updates
- coverage validation: elevation ust be positive, depth must be negative
- vinejs-provider.js: get enabled extensions from database, not via validOptions.extnames
- vue components for backup codes: e.g.: PersonalSettings.vue
- validate spaital coverage in leaflet map: draw.component.vue, map.component.vue
- add backup code authentication into Login.vue
- preset to use no preferred reviewer: Release.vue
- 2 new vinejs validation rules: file_scan.ts and file-length.ts
2024-07-08 13:52:20 +02:00
ac473b1e72 - added LicenseController.ts and MimetypeController for enabling mime_types and licences
- add new authors and contributors only by unique email addresses
- allow multiple file upload
- added validation rule for validating length of uploaded files
- modified Dockerfile for starting "bin/server.js" instead of *server.js"
- npm updates
2024-06-14 12:38:04 +02:00
770e791613 - typsafe vanilla_error_reporter.ts
- updated Dockerfole for using node 20
- remove validator_old.ts
- npm updates
2024-05-21 14:41:10 +02:00
ec17d79cf2 - replaced validation library @adonisjs/validator with @vinejs/vine (performance)
- npm updates
2024-05-16 13:47:06 +02:00
08c2edca3b - npm updates
- renamed 'models' and 'validators' folders
- removed unneccessary files in contracts folder
2024-04-30 11:50:50 +02:00
a29865b781 - renamings to the new naming convetion for adonisjs version 6
- npm updates
2024-04-29 11:25:50 +02:00
bee76f8d5b - npm added @japa/api-client, @japa/assert, @types/supertest
- webpack added opions['__VUE_PROD_HYDRATION_MISMATCH_DETAILS__'] = false;
- bodyparser config replaced whitelistedMethods with allowedMethods
- extended stardust_provider
- adapted tests for adonisjs v6
2024-04-25 15:17:22 +02:00
296c8fd46e - added own provider for drive methods
- renamed middleware Role and Can to role_middleware and can_middleware
- added some typing for inertia vue3 components
- npm updates
2024-04-23 19:36:45 +02:00
cb51a4136f - update to AdonisJS 6 2024-03-14 20:25:27 +01:00
f828ca4491 - added 2fa authentication during login. see resources/js/Pages/Auth/login.vue
- added validate() method inside app/Srvices/TwoFactorProvider.ts
- added twoFactorChallenge() method inside app/Controllers/Http/Auth/AuthController.ts for logging in via 2fa-code
2024-02-16 15:32:47 +01:00
b2dce0259a - npm updates
- Admins can no longer delete any users or roles
- Additionally, the name of a role in edit mode is now read-only
- extra new SetupConfirmation.vue compoenent for verifying qrcode 2FA
- adapted ci.yaml
2024-02-14 13:47:10 +01:00
4efa53673f - npm updates
- removed all controller methods from 'app/Controlles/Http/Admin/UsersControllers.ts'
- merged all authentication methods inside 'app/Controllers/Http/Auth/UserController.ts'
2024-02-06 16:39:33 +01:00
68928b5e07 - HomeController.ts: addes api method for showing number of publications per month for given year
- adapted command ValidateChecksum.ts: on published files are checked. better information logging
- better LineChart.vue component: showing real statistics
- start/routes/apu.ts: added Route.get('/statistic/:year', 'HomeController.findPublicationsPerMonth');
2024-02-02 14:00:54 +01:00
8cef7390d7 - removed extra test datacite accounts from .env
- updated DoiClient.ts
- removed test error via /welcome page
- npm updates
2024-01-31 13:19:46 +01:00
c9ba7d6adc - added doi registration
- npm updates for webpack-encore and postcss-loader
- DatasetExtension.ts: use relation contributors for PersonContributor
- added DoiClient.ts and DoiClientContract.ts
- rozes.ts: addes routes for creating and storing doi identifier
- addes xslt doi_datacite.xslt needed for registering DOI identifier
2024-01-26 09:39:03 +01:00
ebc62d9117 - added api UserController.ts for 2FA
- added PersonalTotpSettings.vue vor enablin/disabling 2FA
- changed User.ts: added attributes: state, twoFactorSecret and twoFactorRecoveryCodes
- added resources/js/utils/toast.ts for notifications
- modified start/routes/api.ts
- npm updates
2024-01-19 15:33:46 +01:00
18635f77b3 - npm updates
- added views and controller coder for reviewer role
- added program logic for publishing a dataset by editor
- added reviewer menu
- adapted routes.ts for additional routes
2024-01-04 16:40:05 +01:00
c70fa4a0d8 - aded npm packages @types/qrcode, qrcode and node-f2a
- corrected UsersController.ts and RoleController.ts with correct routes for settings
- added migration script and ui and Controller for 2 Factor Authentication
- npm updates
2023-12-29 15:54:49 +01:00
87e9314b00 - added NcModal.vue, NcActions.vue, NcButton.vue, FirstrunWizard.vue, Card.vue, Page0.vue, Page1.vue, Page2.vue, Page3.vue and some icons
- added lime color inside tailwind.config.js
- added some utilities scripts needed for components
- npm updates
- changed postcss.config.js for nesting css styles
- added about function to NavBar.vue
2023-12-21 09:30:21 +01:00
cefd9081ae - add AvatarController.ts
- adapted menu.ts, NavBar.vue, NavBarItem.vue for highlighting active nav item
- NavBarItemLabel.vue for app menu highlighting
- adapted routes.ts
- adapted app.edge for new favicon
- adapted LayoutAuthenticated.vue (:showAsideMenu="false") for showing AsideMenu optional
- new material icons: BriefcaseCheck.vue, SwapHorizontal.vue, AccountGroup.vue, Lock.vue
- started with FirstRunWizard
2023-12-15 17:17:33 +01:00
ae0c471e93 - now authenticated user can change password with check of old password and password confirmination
- cchanged route app.dashboard to apps.dashboard
- add editor and reviewer relation to Dataset.ts
- added personal menu in asideMenu
- added Approve.vue for editor
- show warning in Index.vue  (editor), if no dataset is loaded
- user Receive.vue without inertia helper form
- npm updates
- added routes in routes.ts
2023-12-12 15:22:25 +01:00
0d51002903 - default routing to "/app/dashboard"
- default route after login "/app/dashboard" in AuthController.ts
- npm updates
- corrected route in menu.ts
- better styling for listing datasets for editor and submitter in Index.vue
- personal setting to route "/settings/user"
2023-12-01 10:44:19 +01:00
6fef581dd0 - small adaptions for AsideMenuItem.vue, AsideMenuLayer.vue
- new routes editor.dataset.list and editor.dataset.update
- fir functionalities for editor role, suche as listing and receiving released datasets
- npm updates
2023-11-30 13:40:32 +01:00
c1e056b9fc Uploaded organigram to support reply to reviewer 2 in R05 2023-11-30 11:43:25 +00:00
bf9d25ae3e - advanced AsideMenuList.vue, AsideMenuItem.vue
- npm updates
- load menu in AsideMenu.vue via main.ts store for saving the satus of menu items
- extended jappa tests: test also permission on dataset controller code
2023-11-29 16:52:41 +01:00
b6fdfbff41 - addes @adonisjs/redis fo saving session into redis with redis.ts contract and config
- npm updated
- added createHashValues and dlete inside File.ts
- added dataset_count property inside Subject.ts
- corrected rotes.ts with correct permissions
2023-11-27 17:17:22 +01:00
d8bdce1369 - added npm package dotenv-webpack for using env variables on clientside
- added API File Controller for downloading files e.g. /api/download/1022
- also create has codes by submitting new dataset
- added edit dataset functionalities for role submitter
- added the following route for role submitter: /dataset/:id/update', 'DatasetController.update'
- created extra UpdateDatasetValidator.ts for validating updated dataset
- npm updates
2023-11-22 17:06:55 +01:00
a7142f694f - prettier formatting
- npm updates
- new SearchMap.vue component
2023-10-31 15:38:43 +01:00
7bc9f90cca - implemented spatial filtering
- Component 'draw.component.vue' has been extended with the 'preserve' property to control whether the drawn rectangle disappears again
- npm updates
2023-10-23 15:27:39 +02:00
2360a81d1e - added route for showing map with all bounding boxes
- npm updates
- new Map.vue
2023-10-20 15:26:25 +02:00
cf859ba402 - remove VOLUME assignments from DOXKERFILE
- add package @opensearch-project/opensearch for manipulating opensearch index
- index tethys datasets via new command  IndexDatasets, callable node ace index:datasets or node ace index:datasets -p 193
- add mapping file for opensearch index in public/records.json
- added solr.xslt for transforming Datset model to json for opensearch adding in opensearch
- added route /editor/ dataset/:id/update (beginning of editor/DatasetController.ts
- npm updates
2023-10-17 15:45:41 +02:00
7915f66dd6 - added earliestPublicationDate for App/Models/Dataset.ts
- new classes TokenWorkerService.ts, TokenWorker.ts and ResumptionToken.ts for using REDIS with paging OAI results
- deletd public/asstes2/langCodeMap.xml: integrated it directly in datasetxml2oai-pmh.xslt
- added redis npm package
- added TokenWorkerProvider.ts for using singleton of TokenWorkerService inside OaiController.ts
- added config/oai.ts for oai related configs from .env-file
- adapted XmlModel.ts for grting domDocument from database
2023-10-03 21:11:02 +02:00
2a7480d2ed - added new class CollectionRole.ts
- added relation 'collectionRole' to Collection.ts class
- added 'ListSets' and  =GetRecord request for OaiController.ts
- npm updates
- added utility-functions.ts
2023-09-28 22:43:46 +02:00
472 changed files with 62609 additions and 26996 deletions

View file

@ -1,72 +0,0 @@
{
"typescript": true,
"commands": [
"./commands",
"@adonisjs/core/build/commands/index.js",
"@adonisjs/repl/build/commands",
"@eidellev/inertia-adonisjs/build/commands",
"@adonisjs/lucid/build/commands"
],
"exceptionHandlerNamespace": "App/Exceptions/Handler",
"aliases": {
"App": "app",
"Config": "config",
"Database": "database",
"Contracts": "contracts"
},
"preloads": [
"./start/routes",
"./start/kernel",
{
"file": "./start/inertia",
"environment": [
"web"
]
},
{
"file": "./start/validator",
"environment": [
"web"
]
}
],
"providers": [
"./providers/AppProvider",
"@adonisjs/core",
"@adonisjs/session",
"@adonisjs/view",
"@adonisjs/shield",
"@eidellev/inertia-adonisjs",
"@adonisjs/lucid",
"@adonisjs/auth",
"@eidellev/adonis-stardust",
"./providers/QueryBuilderProvider"
],
"metaFiles": [
{
"pattern": "public/**",
"reloadServer": false
},
{
"pattern": "resources/views/**/*.edge",
"reloadServer": false
}
],
"aceProviders": [
"@adonisjs/repl"
],
"tests": {
"suites": [
{
"name": "functional",
"files": [
"tests/functional/**/*.spec(.ts|.js)"
],
"timeout": 60000
}
]
},
"testProviders": [
"@japa/preset-adonis/TestsProvider"
]
}

View file

@ -1,37 +1,37 @@
{
"presets": [
[
"@babel/preset-env",
{
"targets": {
"edge": "17",
"firefox": "60",
"chrome": "67",
"safari": "11.1"
}
// "useBuiltIns": "usage",
// "corejs": "3.16"
// "targets":{"node":"16"}
// "useBuiltIns": "entry",
// "targets": "> 0.25%, not dead"
}
],
// "@babel/preset-env",
"@babel/preset-typescript"
],
"plugins": [
// [
// "@babel/plugin-transform-typescript", {
// "allowDeclareFields": true
// }],
[
"@babel/plugin-proposal-decorators",
{
"legacy": true
}
],
{
"presets": [
[
"@babel/preset-env",
{
"targets": {
"edge": "17",
"firefox": "60",
"chrome": "67",
"safari": "11.1"
}
// "useBuiltIns": "usage",
// "corejs": "3.16"
// "targets":{"node":"16"}
// "useBuiltIns": "entry",
// "targets": "> 0.25%, not dead"
}
],
// "@babel/preset-env",
"@babel/preset-typescript"
],
"plugins": [
// [
// "@babel/plugin-transform-typescript", {
// "allowDeclareFields": true
// }],
[
"@babel/plugin-proposal-decorators",
{
"legacy": true
}
],
"@babel/proposal-class-properties"
// "@babel/proposal-object-rest-spread"
]
"@babel/proposal-class-properties"
// "@babel/proposal-object-rest-spread"
]
}

View file

@ -2,7 +2,7 @@ root = true
[*]
indent_style = space
indent_size = 2
indent_size = 4
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true

View file

@ -11,3 +11,12 @@ PG_PORT=5432
PG_USER=lucid
PG_PASSWORD=
PG_DB_NAME=lucid
REDIS_CONNECTION=local
REDIS_HOST=127.0.0.1
REDIS_PORT=6379
REDIS_PASSWORD=
SMTP_HOST=
SMTP_PORT=
RESEND_API_KEY=
OPENSEARCH_HOST=http://localhost
OPENSEARCH_CORE=tethys-records

View file

@ -1,24 +1,14 @@
{
"extends": [
"plugin:adonis/typescriptApp",
"prettier"
],
"plugins": [
"prettier"
],
"extends": ["plugin:adonis/typescriptApp", "prettier"],
"plugins": ["prettier"],
"rules": {
"prettier/prettier": [
"error",
{ "singleQuote": true }
],
"@typescript-eslint/indent": ["error", 4, { "ignoredNodes": ["PropertyDefinition", "TSUnionType"] }],
"prettier/prettier": ["error", { "singleQuote": true }],
"@typescript-eslint/indent": ["error", 4, { "ignoredNodes": ["PropertyDefinition", "TSUnionType"] }],
"@typescript-eslint/naming-convention": [
"warn",
{
"selector": "interface",
"format": [
"PascalCase"
],
"format": ["PascalCase"],
"custom": {
"regex": "^I[A-Z]",
"match": false
@ -26,4 +16,4 @@
}
]
}
}
}

View file

@ -13,7 +13,7 @@ jobs:
uses: actions/checkout@v3
- run: echo "The ${{ github.repository }} repository has been cloned to the runner."
- run: echo "The workflow is now ready to test your code on the runner."
- name: List files in the repository:
- name: List files in the repository
run: |
ls ${{ github.workspace }}
- run: echo "This job's status is ${{ job.status }}."

View file

@ -0,0 +1,78 @@
# This is a Gitea Actions workflow configuration file for running CI tests on the `feat/checkReferenceType` branch.
# The workflow is named "CI" and runs on the latest Ubuntu environment using a Node.js 20 Docker container.
# It sets up a PostgreSQL service with specified environment variables and health checks.
# The workflow includes the following steps:
# 1. Checkout the repository using the actions/checkout@v3 action.
# 2. Install Node.js dependencies using `npm ci`.
# 3. Create a `.env.test` file by copying from `.env.example`.
# 4. Set up environment variables in the `.env.test` file, including database connection details and other app-specific settings.
# 5. Run functional tests using the `node ace test functional --groups "ReferenceValidation"` command.
name: CI
run-name: Running tests for checkReferenceType branch
on:
push:
branches:
- feat/checkReferenceType
jobs:
container-job:
runs-on: ubuntu-latest
# Docker Hub image that `container-job` executes in
container: node:20-bullseye
services:
# Label used to access the service container
postgres:
image: postgres:latest
env:
POSTGRES_USER: alice
POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
POSTGRES_DB: tethys_dev
# ports:
# - 5432:5432
options: |
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout repository
uses: actions/checkout@v3
# - name: Set up Node.js
# uses: actions/setup-node@v2
# with:
# node-version: '20'
- name: Install dependencies
run: npm ci
- name: Create .env.test file
run: cp .env.example .env.test
- name: Set up environment variables
run: |
echo "DB_CONNECTION=pg" >> .env.test
echo "PG_HOST=postgres" >> .env.test
echo "PG_PORT=5432" >> .env.test
echo "PG_USER=alice" >> .env.test
echo "PG_PASSWORD=${{ secrets.POSTGRES_PASSWORD }}" >> .env.test
echo "PG_DB_NAME=tethys_dev" >> .env.test
echo "NODE_ENV=test" >> .env.test
echo "ASSETS_DRIVER=fake" >> .env.test
echo "SESSION_DRIVER=memory" >> .env.test
echo "HASH_DRIVER=bcrypt" >> .env.test
echo "HOST=127.0.0.1" >> .env.test
echo "PORT=3333" >> .env.test
echo "APP_NAME=TethysCloud" >> .env.test
echo "APP_URL=http://${HOST}:${PORT}" >> .env.test
echo "CACHE_VIEWS=false" >> .env.test
echo "APP_KEY=pfi5N2ACN4tMJ5d8d8BPHfh3FEuvleej" >> .env.test
echo "DRIVE_DISK=local" >> .env.test
echo "OAI_LIST_SIZE=200" >> .env.test
echo "OPENSEARCH_HOST=${{ secrets.OPENSEARCH_HOST }}" >> .env.test
echo "OPENSEARCH_CORE=tethys-records" >> .env.test
- name: Run tests
run: node ace test functional --groups "ReferenceValidation"

View file

@ -4,7 +4,13 @@
name: CI Pipeline
run-name: ${{ github.actor }} is running CI pipeline
# trigger build when pushing, or when creating a pull request
on: [push, pull_request]
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
# Label of the container job
@ -12,12 +18,12 @@ jobs:
# run build on latest ubuntu
runs-on: ubuntu-latest
container: node:16-bullseye
container: node:20-bullseye
services:
mydb:
image: postgres:latest
container_name: mydb
# container_name: mydb
env:
POSTGRES_USER: alice
POSTGRES_PASSWORD: iEx4Vj7zBb6
@ -27,8 +33,7 @@ jobs:
- 5432:5432
# Set health checks to wait until postgres has started
options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
steps:
# this will check out the current branch (https://github.com/actions/checkout#Push-a-commit-using-the-built-in-token)
- name: 1 Check out repository code
@ -66,11 +71,12 @@ jobs:
&& echo "HASH_DRIVER=bcrypt" >> .env.test
&& echo "HOST=127.0.0.1" >> .env.test
&& echo "PORT=3333" >> .env.test
&& echo "APP_NAME=AdonisJs" >> .env.test
&& echo "APP_NAME=TethysCloud" >> .env.test
&& echo "APP_URL=http://${HOST}:${PORT}" >> .env.test
&& echo "CACHE_VIEWS=false" >> .env.test
&& echo "APP_KEY=pfi5N2ACN4tMJ5d8d8BPHfh3FEuvleej" >> .env.test
&& echo "DRIVE_DISK=local" >> .env.test
&& echo "OAI_LIST_SIZE=200" >> .env.test
# finally run the tests
# - run: npm test
@ -96,3 +102,4 @@ jobs:
# uses: coverallsapp/github-action@master
# with:
# github-token: ${{ secrets.GITHUB_TOKEN }}

1
.gitignore vendored
View file

@ -7,3 +7,4 @@ coverage
tmp
docker-compose.yml
.env.test
public/assets

View file

@ -1,57 +1,63 @@
################## First Stage - Creating base #########################
# Created a variable to hold our node base image
ARG NODE_IMAGE=node:18-bookworm-slim
ARG NODE_IMAGE=node:22-trixie-slim
FROM $NODE_IMAGE AS base
# Install dumb-init and ClamAV, and perform ClamAV database update
RUN apt update \
&& apt-get install -y dumb-init clamav clamav-daemon nano \
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
dumb-init \
clamav \
clamav-daemon \
clamdscan \
ca-certificates \
&& rm -rf /var/lib/apt/lists/* \
# Creating folders and changing ownerships
&& mkdir -p /home/node/app && chown node:node /home/node/app \
&& mkdir -p /var/lib/clamav \
&& mkdir -p /home/node/app \
&& mkdir -p /var/lib/clamav \
&& mkdir /usr/local/share/clamav \
&& chown -R node:clamav /var/lib/clamav /usr/local/share/clamav /etc/clamav \
# permissions
&& mkdir /var/run/clamav \
&& chown node:clamav /var/run/clamav \
&& chmod 750 /var/run/clamav
# -----------------------------------------------
# --- ClamAV & FeshClam -------------------------
# -----------------------------------------------
# RUN \
# chmod 644 /etc/clamav/freshclam.conf && \
# freshclam && \
# mkdir /var/run/clamav && \
# chown -R clamav:root /var/run/clamav
&& mkdir -p /var/log/clamav \
&& mkdir -p /tmp/clamav-logs \
# Set ownership and permissions
&& chown node:node /home/node/app \
# && chown -R node:clamav /var/lib/clamav /usr/local/share/clamav /etc/clamav /var/run/clamav \
&& chown -R node:clamav /var/lib/clamav /usr/local/share/clamav /etc/clamav /var/run/clamav /var/log/clamav \
&& chown -R node:clamav /etc/clamav \
&& chmod 755 /tmp/clamav-logs \
&& chmod 750 /var/run/clamav \
&& chmod 755 /var/lib/clamav \
&& chmod 755 /var/log/clamav \
# Add node user to clamav group and allow sudo for clamav commands
&& usermod -a -G clamav node
# && chmod 666 /var/run/clamav/clamd.socket
# Make directories group-writable so node (as member of clamav group) can access them
# && chmod 750 /var/run/clamav /var/lib/clamav /var/log/clamav /tmp/clamav-logs
# # initial update of av databases
# RUN freshclam
# Configure Clam AV...
# Configure ClamAV - copy config files before switching user
# COPY --chown=node:clamav ./*.conf /etc/clamav/
COPY --chown=node:clamav ./*.conf /etc/clamav/
# # permissions
# RUN mkdir /var/run/clamav && \
# chown node:clamav /var/run/clamav && \
# chmod 750 /var/run/clamav
# Setting the working directory
WORKDIR /home/node/app
# Changing the current active user to "node"
# Download initial ClamAV database as root before switching users
USER node
RUN freshclam --quiet || echo "Initial database download failed - will retry at runtime"
# initial update of av databases
RUN freshclam
VOLUME /var/lib/clamav
# Copy entrypoint script
COPY --chown=node:clamav docker-entrypoint.sh /home/node/app/docker-entrypoint.sh
RUN chmod +x /home/node/app/docker-entrypoint.sh
ENV TZ="Europe/Vienna"
################## Second Stage - Installing dependencies ##########
# In this stage, we will start installing dependencies
FROM base AS dependencies
@ -66,8 +72,10 @@ COPY --chown=node:node . .
################## Third Stage - Building Stage #####################
# In this stage, we will start building dependencies
FROM dependencies AS build
ENV NODE_ENV=production
# We run "node ace build" to build the app (dist folder) for production
RUN node ace build --production
RUN node ace build --ignore-ts-errors
# RUN node ace build --production
################## Final Stage - Production #########################
@ -85,7 +93,8 @@ RUN npm ci --omit=dev
# Copy files to the working directory from the build folder the user
COPY --chown=node:node --from=build /home/node/app/build .
# Expose port
EXPOSE $PORT
# EXPOSE 3310
EXPOSE 3333
ENTRYPOINT ["/home/node/app/docker-entrypoint.sh"]
# Run the command to start the server using "dumb-init"
CMD [ "dumb-init", "node", "server.js" ]
CMD [ "dumb-init", "node", "bin/server.js" ]

22
LICENSE Normal file
View file

@ -0,0 +1,22 @@
MIT License
Copyright (c) 2025 Tethys Research Repository
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE

16
ace
View file

@ -1,16 +0,0 @@
/*
|--------------------------------------------------------------------------
| Ace Commands
|--------------------------------------------------------------------------
|
| This file is the entry point for running ace commands.
|
*/
require('reflect-metadata')
require('source-map-support').install({ handleUncaughtExceptions: false })
const { Ignitor } = require('@adonisjs/core/build/standalone')
new Ignitor(__dirname)
.ace()
.handle(process.argv.slice(2))

View file

@ -1,609 +0,0 @@
{
"commands": {
"validate:checksum": {
"settings": {
"loadApp": true,
"stayAlive": false
},
"commandPath": "./commands/ValidateChecksum",
"commandName": "validate:checksum",
"description": "",
"args": [],
"aliases": [],
"flags": []
},
"dump:rcfile": {
"settings": {},
"commandPath": "@adonisjs/core/build/commands/DumpRc",
"commandName": "dump:rcfile",
"description": "Dump contents of .adonisrc.json file along with defaults",
"args": [],
"aliases": [],
"flags": []
},
"list:routes": {
"settings": {
"loadApp": true,
"stayAlive": true
},
"commandPath": "@adonisjs/core/build/commands/ListRoutes/index",
"commandName": "list:routes",
"description": "List application routes",
"args": [],
"aliases": [],
"flags": [
{
"name": "verbose",
"propertyName": "verbose",
"type": "boolean",
"description": "Display more information"
},
{
"name": "reverse",
"propertyName": "reverse",
"type": "boolean",
"alias": "r",
"description": "Reverse routes display"
},
{
"name": "methods",
"propertyName": "methodsFilter",
"type": "array",
"alias": "m",
"description": "Filter routes by method"
},
{
"name": "patterns",
"propertyName": "patternsFilter",
"type": "array",
"alias": "p",
"description": "Filter routes by the route pattern"
},
{
"name": "names",
"propertyName": "namesFilter",
"type": "array",
"alias": "n",
"description": "Filter routes by route name"
},
{
"name": "json",
"propertyName": "json",
"type": "boolean",
"description": "Output as JSON"
},
{
"name": "table",
"propertyName": "table",
"type": "boolean",
"description": "Output as Table"
},
{
"name": "max-width",
"propertyName": "maxWidth",
"type": "number",
"description": "Specify maximum rendering width. Ignored for JSON Output"
}
]
},
"generate:key": {
"settings": {},
"commandPath": "@adonisjs/core/build/commands/GenerateKey",
"commandName": "generate:key",
"description": "Generate a new APP_KEY secret",
"args": [],
"aliases": [],
"flags": []
},
"repl": {
"settings": {
"loadApp": true,
"environment": "repl",
"stayAlive": true
},
"commandPath": "@adonisjs/repl/build/commands/AdonisRepl",
"commandName": "repl",
"description": "Start a new REPL session",
"args": [],
"aliases": [],
"flags": []
},
"ssr:build": {
"settings": {
"stayAlive": true
},
"commandPath": "@eidellev/inertia-adonisjs/build/commands/Build",
"commandName": "ssr:build",
"description": "Build and watch files for changes",
"args": [],
"aliases": [],
"flags": []
},
"ssr:watch": {
"settings": {
"stayAlive": true
},
"commandPath": "@eidellev/inertia-adonisjs/build/commands/Watch",
"commandName": "ssr:watch",
"description": "Build and watch files for changes",
"args": [],
"aliases": [],
"flags": []
},
"db:seed": {
"settings": {
"loadApp": true
},
"commandPath": "@adonisjs/lucid/build/commands/DbSeed",
"commandName": "db:seed",
"description": "Execute database seeders",
"args": [],
"aliases": [],
"flags": [
{
"name": "connection",
"propertyName": "connection",
"type": "string",
"description": "Define a custom database connection for the seeders",
"alias": "c"
},
{
"name": "interactive",
"propertyName": "interactive",
"type": "boolean",
"description": "Run seeders in interactive mode",
"alias": "i"
},
{
"name": "files",
"propertyName": "files",
"type": "array",
"description": "Define a custom set of seeders files names to run",
"alias": "f"
},
{
"name": "compact-output",
"propertyName": "compactOutput",
"type": "boolean",
"description": "A compact single-line output"
}
]
},
"db:wipe": {
"settings": {
"loadApp": true
},
"commandPath": "@adonisjs/lucid/build/commands/DbWipe",
"commandName": "db:wipe",
"description": "Drop all tables, views and types in database",
"args": [],
"aliases": [],
"flags": [
{
"name": "connection",
"propertyName": "connection",
"type": "string",
"description": "Define a custom database connection",
"alias": "c"
},
{
"name": "drop-views",
"propertyName": "dropViews",
"type": "boolean",
"description": "Drop all views"
},
{
"name": "drop-types",
"propertyName": "dropTypes",
"type": "boolean",
"description": "Drop all custom types (Postgres only)"
},
{
"name": "force",
"propertyName": "force",
"type": "boolean",
"description": "Explicitly force command to run in production"
}
]
},
"db:truncate": {
"settings": {
"loadApp": true
},
"commandPath": "@adonisjs/lucid/build/commands/DbTruncate",
"commandName": "db:truncate",
"description": "Truncate all tables in database",
"args": [],
"aliases": [],
"flags": [
{
"name": "connection",
"propertyName": "connection",
"type": "string",
"description": "Define a custom database connection",
"alias": "c"
},
{
"name": "force",
"propertyName": "force",
"type": "boolean",
"description": "Explicitly force command to run in production"
}
]
},
"make:model": {
"settings": {
"loadApp": true
},
"commandPath": "@adonisjs/lucid/build/commands/MakeModel",
"commandName": "make:model",
"description": "Make a new Lucid model",
"args": [
{
"type": "string",
"propertyName": "name",
"name": "name",
"required": true,
"description": "Name of the model class"
}
],
"aliases": [],
"flags": [
{
"name": "migration",
"propertyName": "migration",
"type": "boolean",
"alias": "m",
"description": "Generate the migration for the model"
},
{
"name": "controller",
"propertyName": "controller",
"type": "boolean",
"alias": "c",
"description": "Generate the controller for the model"
},
{
"name": "factory",
"propertyName": "factory",
"type": "boolean",
"alias": "f",
"description": "Generate a factory for the model"
}
]
},
"make:migration": {
"settings": {
"loadApp": true
},
"commandPath": "@adonisjs/lucid/build/commands/MakeMigration",
"commandName": "make:migration",
"description": "Make a new migration file",
"args": [
{
"type": "string",
"propertyName": "name",
"name": "name",
"required": true,
"description": "Name of the migration file"
}
],
"aliases": [],
"flags": [
{
"name": "connection",
"propertyName": "connection",
"type": "string",
"description": "The connection flag is used to lookup the directory for the migration file"
},
{
"name": "folder",
"propertyName": "folder",
"type": "string",
"description": "Pre-select a migration directory"
},
{
"name": "create",
"propertyName": "create",
"type": "string",
"description": "Define the table name for creating a new table"
},
{
"name": "table",
"propertyName": "table",
"type": "string",
"description": "Define the table name for altering an existing table"
}
]
},
"make:seeder": {
"settings": {},
"commandPath": "@adonisjs/lucid/build/commands/MakeSeeder",
"commandName": "make:seeder",
"description": "Make a new Seeder file",
"args": [
{
"type": "string",
"propertyName": "name",
"name": "name",
"required": true,
"description": "Name of the seeder class"
}
],
"aliases": [],
"flags": []
},
"make:factory": {
"settings": {},
"commandPath": "@adonisjs/lucid/build/commands/MakeFactory",
"commandName": "make:factory",
"description": "Make a new factory",
"args": [
{
"type": "string",
"propertyName": "model",
"name": "model",
"required": true,
"description": "The name of the model"
}
],
"aliases": [],
"flags": [
{
"name": "model-path",
"propertyName": "modelPath",
"type": "string",
"description": "The path to the model"
},
{
"name": "exact",
"propertyName": "exact",
"type": "boolean",
"description": "Create the factory with the exact name as provided",
"alias": "e"
}
]
},
"migration:run": {
"settings": {
"loadApp": true
},
"commandPath": "@adonisjs/lucid/build/commands/Migration/Run",
"commandName": "migration:run",
"description": "Migrate database by running pending migrations",
"args": [],
"aliases": [],
"flags": [
{
"name": "connection",
"propertyName": "connection",
"type": "string",
"description": "Define a custom database connection",
"alias": "c"
},
{
"name": "force",
"propertyName": "force",
"type": "boolean",
"description": "Explicitly force to run migrations in production"
},
{
"name": "dry-run",
"propertyName": "dryRun",
"type": "boolean",
"description": "Do not run actual queries. Instead view the SQL output"
},
{
"name": "compact-output",
"propertyName": "compactOutput",
"type": "boolean",
"description": "A compact single-line output"
},
{
"name": "disable-locks",
"propertyName": "disableLocks",
"type": "boolean",
"description": "Disable locks acquired to run migrations safely"
}
]
},
"migration:rollback": {
"settings": {
"loadApp": true
},
"commandPath": "@adonisjs/lucid/build/commands/Migration/Rollback",
"commandName": "migration:rollback",
"description": "Rollback migrations to a specific batch number",
"args": [],
"aliases": [],
"flags": [
{
"name": "connection",
"propertyName": "connection",
"type": "string",
"description": "Define a custom database connection",
"alias": "c"
},
{
"name": "force",
"propertyName": "force",
"type": "boolean",
"description": "Explictly force to run migrations in production"
},
{
"name": "dry-run",
"propertyName": "dryRun",
"type": "boolean",
"description": "Do not run actual queries. Instead view the SQL output"
},
{
"name": "batch",
"propertyName": "batch",
"type": "number",
"description": "Define custom batch number for rollback. Use 0 to rollback to initial state"
},
{
"name": "compact-output",
"propertyName": "compactOutput",
"type": "boolean",
"description": "A compact single-line output"
},
{
"name": "disable-locks",
"propertyName": "disableLocks",
"type": "boolean",
"description": "Disable locks acquired to run migrations safely"
}
]
},
"migration:status": {
"settings": {
"loadApp": true
},
"commandPath": "@adonisjs/lucid/build/commands/Migration/Status",
"commandName": "migration:status",
"description": "View migrations status",
"args": [],
"aliases": [],
"flags": [
{
"name": "connection",
"propertyName": "connection",
"type": "string",
"description": "Define a custom database connection",
"alias": "c"
}
]
},
"migration:reset": {
"settings": {
"loadApp": true
},
"commandPath": "@adonisjs/lucid/build/commands/Migration/Reset",
"commandName": "migration:reset",
"description": "Rollback all migrations",
"args": [],
"aliases": [],
"flags": [
{
"name": "connection",
"propertyName": "connection",
"type": "string",
"description": "Define a custom database connection",
"alias": "c"
},
{
"name": "force",
"propertyName": "force",
"type": "boolean",
"description": "Explicitly force command to run in production"
},
{
"name": "dry-run",
"propertyName": "dryRun",
"type": "boolean",
"description": "Do not run actual queries. Instead view the SQL output"
},
{
"name": "disable-locks",
"propertyName": "disableLocks",
"type": "boolean",
"description": "Disable locks acquired to run migrations safely"
}
]
},
"migration:refresh": {
"settings": {
"loadApp": true
},
"commandPath": "@adonisjs/lucid/build/commands/Migration/Refresh",
"commandName": "migration:refresh",
"description": "Rollback and migrate database",
"args": [],
"aliases": [],
"flags": [
{
"name": "connection",
"propertyName": "connection",
"type": "string",
"description": "Define a custom database connection",
"alias": "c"
},
{
"name": "force",
"propertyName": "force",
"type": "boolean",
"description": "Explicitly force command to run in production"
},
{
"name": "dry-run",
"propertyName": "dryRun",
"type": "boolean",
"description": "Do not run actual queries. Instead view the SQL output"
},
{
"name": "seed",
"propertyName": "seed",
"type": "boolean",
"description": "Run seeders"
},
{
"name": "disable-locks",
"propertyName": "disableLocks",
"type": "boolean",
"description": "Disable locks acquired to run migrations safely"
}
]
},
"migration:fresh": {
"settings": {
"loadApp": true
},
"commandPath": "@adonisjs/lucid/build/commands/Migration/Fresh",
"commandName": "migration:fresh",
"description": "Drop all tables and re-migrate the database",
"args": [],
"aliases": [],
"flags": [
{
"name": "connection",
"propertyName": "connection",
"type": "string",
"description": "Define a custom database connection",
"alias": "c"
},
{
"name": "force",
"propertyName": "force",
"type": "boolean",
"description": "Explicitly force command to run in production"
},
{
"name": "seed",
"propertyName": "seed",
"type": "boolean",
"description": "Run seeders"
},
{
"name": "drop-views",
"propertyName": "dropViews",
"type": "boolean",
"description": "Drop all views"
},
{
"name": "drop-types",
"propertyName": "dropTypes",
"type": "boolean",
"description": "Drop all custom types (Postgres only)"
},
{
"name": "disable-locks",
"propertyName": "disableLocks",
"type": "boolean",
"description": "Disable locks acquired to run migrations safely"
}
]
}
},
"aliases": {}
}

25
ace.js Normal file
View file

@ -0,0 +1,25 @@
/*
/*
|--------------------------------------------------------------------------
| JavaScript entrypoint for running ace commands
|--------------------------------------------------------------------------
|
| Since, we cannot run TypeScript source code using "node" binary, we need
| a JavaScript entrypoint to run ace commands.
|
| This file registers the "ts-node/esm" hook with the Node.js module system
| and then imports the "bin/console.ts" file.
|
*/
/**
* Register hook to process TypeScript files using ts-node
*/
// import { register } from 'node:module';
// register('ts-node/esm', import.meta.url);
import 'ts-node-maintained/register/esm';
/**
* Import ace console entrypoint
*/
await import('./bin/console.js');

122
adonisrc.ts Normal file
View file

@ -0,0 +1,122 @@
import { defineConfig } from '@adonisjs/core/app';
export default defineConfig({
/*
|--------------------------------------------------------------------------
| Commands
|--------------------------------------------------------------------------
|
| List of ace commands to register from packages. The application commands
| will be scanned automatically from the "./commands" directory.
*/
commands: [
() => import('@adonisjs/core/commands'),
() => import('@adonisjs/lucid/commands'),
() => import('@adonisjs/mail/commands')
],
/*
|--------------------------------------------------------------------------
| Preloads
|--------------------------------------------------------------------------
|
| List of modules to import before starting the application.
|
*/
preloads: [
() => import('./start/routes.js'),
() => import('./start/kernel.js'),
() => import('#start/validator'),
// () => import('#start/rules/unique'),
// () => import('#start/rules/translated_language'),
// () => import('#start/rules/unique_person'),
// // () => import('#start/rules/file_length'),
// // () => import('#start/rules/file_scan'),
// // () => import('#start/rules/allowed_extensions_mimetypes'),
// () => import('#start/rules/dependent_array_min_length'),
// () => import('#start/rules/referenceValidation'),
// () => import('#start/rules/valid_mimetype'),
// () => import('#start/rules/array_contains_types'),
// () => import('#start/rules/orcid'),
],
/*
|--------------------------------------------------------------------------
| Service providers
|--------------------------------------------------------------------------
|
| List of service providers to import and register when booting the
| application
|
*/
providers: [
// () => import('./providers/AppProvider.js'),
() => import('@adonisjs/core/providers/app_provider'),
() => import('@adonisjs/core/providers/hash_provider'),
{
file: () => import('@adonisjs/core/providers/repl_provider'),
environment: ['repl', 'test'],
},
() => import('@adonisjs/session/session_provider'),
() => import('@adonisjs/core/providers/edge_provider'),
() => import('@adonisjs/shield/shield_provider'),
// () => import('@eidellev/inertia-adonisjs'),
// () => import('@adonisjs/inertia/inertia_provider'),
() => import('#providers/app_provider'),
() => import('#providers/inertia_provider'),
() => import('@adonisjs/lucid/database_provider'),
() => import('@adonisjs/auth/auth_provider'),
// () => import('@eidellev/adonis-stardust'),
() => import('@adonisjs/redis/redis_provider'),
// () => import('@adonisjs/encore/encore_provider'),
() => import('@adonisjs/static/static_provider'),
() => import('#providers/stardust_provider'),
() => import('#providers/query_builder_provider'),
() => import('#providers/token_worker_provider'),
() => import('#providers/rule_provider'),
// () => import('#providers/drive/provider/drive_provider'),
() => import('@adonisjs/drive/drive_provider'),
// () => import('@adonisjs/core/providers/vinejs_provider'),
() => import('#providers/vinejs_provider'),
() => import('@adonisjs/mail/mail_provider'),
() => import('@adonisjs/vite/vite_provider'),
],
metaFiles: [
{
pattern: 'public/**',
reloadServer: false,
},
{
pattern: 'resources/views/**/*.edge',
reloadServer: false,
},
],
/*
|--------------------------------------------------------------------------
| Tests
|--------------------------------------------------------------------------
|
| List of test suites to organize tests by their type. Feel free to remove
| and add additional suites.
|
*/
tests: {
suites: [
{
files: ['tests/unit/**/*.spec(.ts|.js)'],
name: 'unit',
timeout: 2000,
},
{
files: ['tests/functional/**/*.spec(.ts|.js)'],
name: 'functional',
timeout: 30000,
},
],
forceExit: false,
},
assetsBundler: false,
hooks: {
onBuildStarting: [() => import('@adonisjs/vite/build_hook')],
},
// assetsBundler: false
});

View file

@ -1,14 +1,14 @@
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
import User from 'App/Models/User';
import Role from 'App/Models/Role';
import type { ModelQueryBuilderContract } from '@ioc:Adonis/Lucid/Orm';
import CreateUserValidator from 'App/Validators/CreateUserValidator';
import UpdateUserValidator from 'App/Validators/UpdateUserValidator';
import { RenderResponse } from '@ioc:EidelLev/Inertia';
import type { HttpContext } from '@adonisjs/core/http';
import User from '#models/user';
import Role from '#models/role';
import { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model';
import { createUserValidator, updateUserValidator } from '#validators/user';
// import { schema, rules } from '@ioc:Adonis/Core/Validator';
// import Hash from '@ioc:Adonis/Core/Hash';
// import { schema, rules } from '@ioc:Adonis/Core/Validator';
export default class UsersController {
public async index({ auth, request, inertia }: HttpContextContract) {
export default class AdminuserController {
public async index({ auth, request, inertia }: HttpContext) {
const page = request.input('page', 1);
// const limit = 10
@ -45,7 +45,7 @@ export default class UsersController {
// .filter(qs)
// .preload('focusInterests')
// .preload('role')
.paginate(page, 5);
.paginate(page, 10);
// var test = request.all();
@ -61,7 +61,7 @@ export default class UsersController {
});
}
public async create({ inertia }: HttpContextContract) {
public async create({ inertia }: HttpContext) {
// let rolesPluck = {};
// (await Role.query().select('id', 'name')).forEach((user) => {
// rolesPluck[user.id] = user.name;
@ -73,18 +73,21 @@ export default class UsersController {
});
}
public async store({ request, response, session }: HttpContextContract) {
public async store({ request, response, session }: HttpContext) {
// node ace make:validator CreateUser
try {
// Step 2 - Validate request body against the schema
await request.validate(CreateUserValidator);
// await request.validate(CreateUserValidator);
await request.validateUsing(createUserValidator);
// console.log({ payload });
} catch (error) {
// Step 3 - Handle errors
// return response.badRequest(error.messages);
throw error;
}
const input = request.only(['login', 'email', 'password']);
const input: Record<string, any> = request.only(['login', 'email','first_name', 'last_name']);
input.password = request.input('new_password');
const user = await User.create(input);
if (request.input('roles')) {
const roles: Array<number> = request.input('roles');
@ -92,10 +95,9 @@ export default class UsersController {
}
session.flash('message', 'User has been created successfully');
return response.redirect().toRoute('user.index');
return response.redirect().toRoute('settings.user.index');
}
public async show({ request, inertia }: HttpContextContract) {
public async show({ request, inertia }: HttpContext) {
const id = request.param('id');
const user = await User.query().where('id', id).firstOrFail();
@ -110,7 +112,7 @@ export default class UsersController {
});
}
public async edit({ request, inertia }: HttpContextContract) {
public async edit({ request, inertia }: HttpContext) {
const id = request.param('id');
const user = await User.query().where('id', id).firstOrFail();
@ -125,20 +127,26 @@ export default class UsersController {
});
}
public async update({ request, response, session }: HttpContextContract) {
public async update({ request, response, session }: HttpContext) {
// node ace make:validator UpdateUser
const id = request.param('id');
const user = await User.query().where('id', id).firstOrFail();
// validate update form
await request.validate(UpdateUserValidator);
await request.validateUsing(updateUserValidator, {
meta: {
objId: user.id,
},
});
// password is optional
let input;
if (request.input('password')) {
input = request.only(['login', 'email', 'password']);
let input: Record<string, any>;
if (request.input('new_password')) {
input = request.only(['login', 'email', 'first_name', 'last_name']);
input.password = request.input('new_password');
} else {
input = request.only(['login', 'email']);
input = request.only(['login', 'email', 'first_name', 'last_name']);
}
await user.merge(input).save();
// await user.save();
@ -149,61 +157,19 @@ export default class UsersController {
}
session.flash('message', 'User has been updated successfully');
return response.redirect().toRoute('user.index');
return response.redirect().toRoute('settings.user.index');
}
public async destroy({ request, response, session }: HttpContextContract) {
public async destroy({ request, response, session }: HttpContext) {
const id = request.param('id');
const user = await User.findOrFail(id);
await user.delete();
session.flash('message', `User ${user.login} has been deleted.`);
return response.redirect().toRoute('user.index');
return response.redirect().toRoute('settings.user.index');
}
/**
* Show the user a form to change their personal information & password.
*
* @return \Inertia\Response
*/
public accountInfo({ inertia, auth }: HttpContextContract): RenderResponse {
const user = auth.user;
// const id = request.param('id');
// const user = await User.query().where('id', id).firstOrFail();
return inertia.render('Admin/User/AccountInfo', {
user: user,
});
}
/**
* Save the modified personal information for a user.
*
* @param HttpContextContract ctx
* @return : RedirectContract
*/
public async accountInfoStore({ request, response, auth, session }: HttpContextContract) {
// validate update form
await request.validate(UpdateUserValidator);
const payload = request.only(['login', 'email']);
auth.user?.merge(payload);
const user = await auth.user?.save();
// $user = \Auth::user()->update($request->except(['_token']));
let message;
if (user) {
message = 'Account updated successfully.';
} else {
message = 'Error while saving. Please try again.';
}
session.flash(message);
return response.redirect().toRoute('admin.account.info');
//->with('message', __($message));
}
// private async syncRoles(userId: number, roleIds: Array<number>) {
// const user = await User.findOrFail(userId)
// private async syncRoles(objId: number, roleIds: Array<number>) {
// const user = await User.findOrFail(objId)
// // const roles: Role[] = await Role.query().whereIn('id', roleIds);
// // await user.roles().sync(roles.rows.map(role => role.id))

View file

@ -1,17 +1,17 @@
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
import type { HttpContext } from '@adonisjs/core/http';
export default class HomeController {
public async index({}: HttpContextContract) {}
public async index({}: HttpContext) {}
public async create({}: HttpContextContract) {}
public async create({}: HttpContext) {}
public async store({}: HttpContextContract) {}
public async store({}: HttpContext) {}
public async show({}: HttpContextContract) {}
public async show({}: HttpContext) {}
public async edit({}: HttpContextContract) {}
public async edit({}: HttpContext) {}
public async update({}: HttpContextContract) {}
public async update({}: HttpContext) {}
public async destroy({}: HttpContextContract) {}
public async destroy({}: HttpContext) {}
}

View file

@ -0,0 +1,51 @@
import type { HttpContext } from '@adonisjs/core/http';
import License from '#models/license';
export default class LicenseController {
public async index({ auth, inertia }: HttpContext) {
const direction = 'asc'; // or 'desc'
const licenses = await License.query().orderBy('sort_order', direction).exec();
return inertia.render('Admin/License/Index', {
licenses: licenses,
can: {
edit: await auth.user?.can(['settings']),
},
});
}
public async down({ request, response }: HttpContext) {
const id = request.param('id');
const license = await License.findOrFail(id);
license.active = false;
await license.save();
// session.flash({ message: 'person has been deactivated!' });
return response.flash('License has been deactivated!', 'message').toRoute('settings.license.index')
}
public async up({ request, response }: HttpContext) {
const id = request.param('id');
const license = await License.findOrFail(id);
license.active = true;
await license.save();
// session.flash({ message: 'person has been activated!' });
return response.flash('License has been activated!', 'message').toRoute('settings.license.index');
}
// public async edit({ request, inertia }: HttpContext) {
// const id = request.param('id');
// const license = await License.query().where('id', id).firstOrFail();
// // const permissions = await Permission.query().pluck('name', 'id');
// // // const userHasRoles = user.roles;
// // const rolerHasPermissions = await role.related('permissions').query().orderBy('name').pluck('id');
// return inertia.render('Admin/License/Edit', {
// // permissions: permissions,
// license: license,
// // roleHasPermissions: Object.keys(rolerHasPermissions).map((key) => rolerHasPermissions[key]), //convert object to array with role ids
// });
// }
}

View file

@ -0,0 +1,188 @@
import type { HttpContext } from '@adonisjs/core/http';
import MimeType from '#models/mime_type';
import vine, { SimpleMessagesProvider } from '@vinejs/vine';
export default class MimetypeController {
public async index({ auth, inertia }: HttpContext) {
const direction = 'asc'; // or 'desc'
const mimetypes = await MimeType.query().orderBy('name', direction).exec();
return inertia.render('Admin/Mimetype/Index', {
mimetypes: mimetypes,
can: {
create: await auth.user?.can(['settings']),
edit: await auth.user?.can(['settings']),
},
});
}
public async create({ inertia }: HttpContext) {
// const permissions = await Permission.query().select('id', 'name').pluck('name', 'id');
return inertia.render('Admin/Mimetype/Create', {});
}
public async store({ request, response, session }: HttpContext) {
const newDatasetSchema = vine.object({
name: vine.string().trim().isUnique({ table: 'mime_types', column: 'name' }),
file_extension: vine.array(vine.string()).minLength(1), // define at least one extension for the new mimetype
alternate_mimetype: vine.array(vine.string().isValidMimetype()).distinct().optional(), // define alias mimetypes
enabled: vine.boolean(),
});
// await request.validate({ schema: newDatasetSchema, messages: this.messages });
try {
// Step 2 - Validate request body against the schema
// await request.validate({ schema: newDatasetSchema, messages: this.messages });
const validator = vine.compile(newDatasetSchema);
validator.messagesProvider = new SimpleMessagesProvider(this.messages);
await request.validateUsing(validator, { messagesProvider: new SimpleMessagesProvider(this.messages) });
} catch (error) {
// Step 3 - Handle errors
// return response.badRequest(error.messages);
throw error;
}
const input = request.only(['name', 'enabled', 'file_extension', 'alternate_mimetype']);
// Concatenate the file_extensions array into a string with '|' as the separator
if (Array.isArray(input.file_extension)) {
input.file_extension = input.file_extension.join('|');
}
// Concatenate the alias_mimetype array into a string with '|' as the separator
if (Array.isArray(input.alternate_mimetype)) {
input.alternate_mimetype = input.alternate_mimetype.join('|');
}
await MimeType.create(input);
// if (request.input('roles')) {
// const roles: Array<number> = request.input('roles');
// await user.related('roles').attach(roles);
// }
session.flash('message', 'MimeType has been created successfully');
return response.redirect().toRoute('settings.mimetype.index');
}
public messages = {
'minLength': '{{ field }} must be at least {{ min }} characters long',
'maxLength': '{{ field }} must be less then {{ max }} characters long',
'isUnique': '{{ field }} must be unique, and this value is already taken',
'required': '{{ field }} is required',
'file_extension.array.minLength': 'at least {{ min }} mimetypes must be defined',
'file_extension.*.string': 'Each file extension must be a valid string', // Adjusted to match the type
};
public async edit({ request, inertia }: HttpContext) {
const id = request.param('id');
const mimetype = await MimeType.query().where('id', id).firstOrFail();
// const permissions = await Permission.query().pluck('name', 'id');
// // const userHasRoles = user.roles;
// const rolerHasPermissions = await role.related('permissions').query().orderBy('name').pluck('id');
return inertia.render('Admin/Mimetype/Edit', {
mimetype: mimetype,
});
}
// public async update({ request, response, session }: HttpContext) {
// // node ace make:validator UpdateUser
// const id = request.param('id');
// const role = await Role.query().where('id', id).firstOrFail();
// // validate update form
// // await request.validate(UpdateRoleValidator);
// await request.validateUsing(updateRoleValidator, {
// meta: {
// roleId: role.id,
// },
// });
// // password is optional
// const input = request.only(['name', 'description']);
// await role.merge(input).save();
// // await user.save();
// if (request.input('permissions')) {
// const permissions: Array<number> = request.input('permissions');
// await role.related('permissions').sync(permissions);
// }
// session.flash('message', 'Role has been updated successfully');
// return response.redirect().toRoute('settings.role.index');
// }
public async down({ request, response }: HttpContext) {
const id = request.param('id');
const mimetype = (await MimeType.findOrFail(id)) as MimeType;
mimetype.enabled = false;
await mimetype.save();
// session.flash({ message: 'person has been deactivated!' });
return response.flash('mimetype has been deactivated!', 'message').toRoute('settings.mimetype.index');
}
public async up({ request, response }: HttpContext) {
const id = request.param('id');
const mimetype = await MimeType.findOrFail(id);
mimetype.enabled = true;
await mimetype.save();
// session.flash({ message: 'person has been activated!' });
return response.flash('mimetype has been activated!', 'message').toRoute('settings.mimetype.index');
}
// public async edit({ request, inertia }: HttpContext) {
// const id = request.param('id');
// const license = await License.query().where('id', id).firstOrFail();
// // const permissions = await Permission.query().pluck('name', 'id');
// // // const userHasRoles = user.roles;
// // const rolerHasPermissions = await role.related('permissions').query().orderBy('name').pluck('id');
// return inertia.render('Admin/License/Edit', {
// // permissions: permissions,
// license: license,
// // roleHasPermissions: Object.keys(rolerHasPermissions).map((key) => rolerHasPermissions[key]), //convert object to array with role ids
// });
// }
public async delete({ request, inertia, response, session }: HttpContext) {
const id = request.param('id');
try {
const mimetype = await MimeType.query()
// .preload('user', (builder) => {
// builder.select('id', 'login');
// })
.where('id', id)
// .preload('files')
.firstOrFail();
// const validStates = ['inprogress', 'rejected_editor'];
// if (!validStates.includes(dataset.server_state)) {
// // session.flash('errors', 'Invalid server state!');
// return response
// .flash(
// 'warning',
// `Invalid server state. Dataset with id ${id} cannot be deleted. Datset has server state ${dataset.server_state}.`,
// )
// .redirect()
// .toRoute('dataset.list');
// }
return inertia.render('Admin/Mimetype/Delete', {
mimetype,
});
} catch (error) {
if (error.code == 'E_ROW_NOT_FOUND') {
session.flash({ warning: 'Mimetype is not found in database' });
} else {
session.flash({ warning: 'general error occured, you cannot delete the mimetype' });
}
return response.redirect().toRoute('mimetype.index');
}
}
public async deleteStore({ request, response, session }: HttpContext) {
const id = request.param('id');
const mimetype = await MimeType.findOrFail(id);
await mimetype.delete();
session.flash('message', `Mimetype ${mimetype.name} has been deleted.`);
return response.redirect().toRoute('settings.mimetype.index');
}
}

View file

@ -1,14 +1,13 @@
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
import Role from 'App/Models/Role';
import Permission from 'App/Models/Permission';
import type { ModelQueryBuilderContract } from '@ioc:Adonis/Lucid/Orm';
import CreateRoleValidator from 'App/Validators/CreateRoleValidator';
import UpdateRoleValidator from 'App/Validators/UpdateRoleValidator';
import { RenderResponse } from '@ioc:EidelLev/Inertia';
import type { HttpContext } from '@adonisjs/core/http';
import Role from '#models/role';
import Permission from '#models/permission';
import { createRoleValidator, updateRoleValidator } from '#validators/role';
import type { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model';
// import { schema, rules } from '@ioc:Adonis/Core/Validator';
export default class RoleController {
public async index({ auth, request, inertia }: HttpContextContract) {
public async index({ auth, request, inertia }: HttpContext) {
let roles: ModelQueryBuilderContract<typeof Role, Role> = Role.query();
if (request.input('search')) {
@ -48,18 +47,19 @@ export default class RoleController {
});
}
public async create({ inertia }: HttpContextContract) {
public async create({ inertia }: HttpContext) {
const permissions = await Permission.query().select('id', 'name').pluck('name', 'id');
return inertia.render('Admin/Role/Create', {
permissions: permissions,
});
}
public async store({ request, response, session }: HttpContextContract) {
public async store({ request, response, session }: HttpContext) {
// node ace make:validator CreateUser
try {
// Step 2 - Validate request body against the schema
await request.validate(CreateRoleValidator);
// await request.validate(CreateRoleValidator);
await request.validateUsing(createRoleValidator);
// await request.validate({ schema: roleSchema });
// console.log({ payload });
} catch (error) {
@ -76,10 +76,10 @@ export default class RoleController {
}
session.flash('message', `Role ${role.name} has been created successfully`);
return response.redirect().toRoute('role.index');
return response.redirect().toRoute('settings.role.index');
}
public async show({ request, inertia }: HttpContextContract): RenderResponse {
public async show({ request, inertia }: HttpContext) {
const id = request.param('id');
const role = await Role.query().where('id', id).firstOrFail();
@ -94,7 +94,7 @@ export default class RoleController {
});
}
public async edit({ request, inertia }: HttpContextContract) {
public async edit({ request, inertia }: HttpContext) {
const id = request.param('id');
const role = await Role.query().where('id', id).firstOrFail();
@ -109,13 +109,18 @@ export default class RoleController {
});
}
public async update({ request, response, session }: HttpContextContract) {
public async update({ request, response, session }: HttpContext) {
// node ace make:validator UpdateUser
const id = request.param('id');
const role = await Role.query().where('id', id).firstOrFail();
// validate update form
await request.validate(UpdateRoleValidator);
// await request.validate(UpdateRoleValidator);
await request.validateUsing(updateRoleValidator, {
meta: {
roleId: role.id,
},
});
// password is optional
@ -129,15 +134,15 @@ export default class RoleController {
}
session.flash('message', 'Role has been updated successfully');
return response.redirect().toRoute('role.index');
return response.redirect().toRoute('settings.role.index');
}
public async destroy({ request, response, session }: HttpContextContract) {
public async destroy({ request, response, session }: HttpContext) {
const id = request.param('id');
const role = await Role.findOrFail(id);
await role.delete();
session.flash('message', `Role ${role.name} has been deleted.`);
return response.redirect().toRoute('role.index');
return response.redirect().toRoute('settings.role.index');
}
}

View file

@ -0,0 +1,104 @@
import type { HttpContext } from '@adonisjs/core/http';
import vine from '@vinejs/vine';
import AppConfig from '#models/appconfig';
import mail from '@adonisjs/mail/services/main';
// import config from '@adonisjs/core/services/config';
// import { configProvider } from '@adonisjs/core';
// import app from '@adonisjs/core/services/app';
export default class MailSettingsController {
/**
* Save the email server settings
*/
public async setMailSettings({ request, response }: HttpContext) {
const settingsSchema = vine.compile(
vine.object({
mail_domain: vine.string(),
mail_from_address: vine.string(),
mail_smtp_mode: vine.string(),
mail_smtpsecure: vine.string().optional(),
mail_smtphost: vine.string(),
mail_smtpport: vine.string(),
mail_smtpauth: vine.boolean(),
// mail_sendmailmode: vine.string().optional(),
}),
);
const validatedData = await request.validateUsing(settingsSchema);
const configData: any = { ...validatedData };
if (!validatedData.mail_smtpauth) {
configData.mail_smtpname = null;
configData.mail_smtppassword = null;
}
// Prepare the settings to be saved
const settingsToSave = [
{ appid: 'settings', configkey: 'default', configvalue: validatedData.mail_smtp_mode, type: 1, lazy: 0 },
{ appid: 'settings', configkey: 'host', configvalue: validatedData.mail_smtphost, type: 1, lazy: 0 },
{ appid: 'settings', configkey: 'port', configvalue: validatedData.mail_smtpport, type: 1, lazy: 0 },
{
appid: 'settings',
configkey: 'from.address',
configvalue: `${validatedData.mail_from_address}@${validatedData.mail_domain}`,
type: 1,
lazy: 0,
},
];
// if (validatedData.mail_smtpauth) {
// settingsToSave.push(
// { appid: 'settings', configkey: 'smtp_user', configvalue: validatedData.mail_smtpname, type: 1, lazy: 0 },
// { appid: 'settings', configkey: 'smtp_password', configvalue: validatedData.mail_smtppassword, type: 1, lazy: 0 },
// );
// } else {
// settingsToSave.push(
// { appid: 'settings', configkey: 'smtp_user', configvalue: null, type: 1, lazy: 0 },
// { appid: 'settings', configkey: 'smtp_password', configvalue: null, type: 1, lazy: 0 },
// );
// }
// Save or update the settings in the database
for (const setting of settingsToSave) {
await AppConfig.updateOrCreate(
{ appid: setting.appid, configkey: setting.configkey },
{ configvalue: setting.configvalue, type: setting.type, lazy: setting.lazy },
);
}
return response.json({ success: true, message: 'Mail settings updated successfully' });
}
/**
* Send a test email to ensure settings work
*/
public async sendTestMail({ response, auth }: HttpContext) {
const user = auth.user!;
const userEmail = user.email;
// let mailManager = await app.container.make('mail.manager');
// let iwas = mailManager.use();
// let test = mail.config.mailers.smtp();
if (!userEmail) {
return response.badRequest({ message: 'User email is not set. Please update your profile.' });
}
try {
await mail.send(
(message) => {
message
// .from(Config.get('mail.from.address'))
.from('tethys@geosphere.at')
.to(userEmail)
.subject('Test Email')
.html('<p>If you received this email, the email configuration seems to be correct.</p>');
});
return response.json({ success: true, message: 'Test email sent successfully' });
// return response.flash('Test email sent successfully!', 'message').redirect().back();
} catch (error) {
return response.internalServerError({ message: `Error sending test email: ${error.message}` });
}
}
}

View file

@ -1,31 +1,46 @@
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
import Person from 'App/Models/Person';
import type { HttpContext } from '@adonisjs/core/http';
import Person from '#models/person';
// import Dataset from 'App/Models/Dataset';
// node ace make:controller Author
export default class AuthorsController {
public async index({}: HttpContextContract) {
// select * from gba.persons
// where exists (select * from gba.documents inner join gba.link_documents_persons on "documents"."id" = "link_documents_persons"."document_id"
// where ("link_documents_persons"."role" = 'author') and ("persons"."id" = "link_documents_persons"."person_id"));
public async index({}: HttpContext) {
const authors = await Person.query()
.whereHas('datasets', (dQuery) => {
dQuery.wherePivot('role', 'author');
})
.withCount('datasets', (query) => {
query.as('datasets_count');
});
.select([
'id',
'academic_title',
'first_name',
'last_name',
'identifier_orcid',
'status',
'name_type',
'created_at'
// Note: 'email' is omitted
])
.preload('datasets')
.where('name_type', 'Personal')
.whereHas('datasets', (dQuery) => {
dQuery.wherePivot('role', 'author');
})
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.orderBy('datasets_count', 'desc');
return authors;
}
public async persons({ request }: HttpContextContract) {
public async persons({ request }: HttpContext) {
const authors = Person.query().where('status', true);
if (request.input('filter')) {
// users = users.whereRaw('name like %?%', [request.input('search')])
const searchTerm = request.input('filter');
authors.whereILike('first_name', `%${searchTerm}%`).orWhereILike('last_name', `%${searchTerm}%`);
authors.andWhere((query) => {
query.whereILike('first_name', `%${searchTerm}%`)
.orWhereILike('last_name', `%${searchTerm}%`);
});
// .orWhere('email', 'like', `%${searchTerm}%`);
}

View file

@ -0,0 +1,212 @@
import type { HttpContext } from '@adonisjs/core/http';
import { StatusCodes } from 'http-status-codes';
import redis from '@adonisjs/redis/services/main';
const PREFIXES = ['von', 'van', 'de', 'del', 'della', 'di', 'da', 'dos', 'du', 'le', 'la'];
const DEFAULT_SIZE = 50;
const MIN_SIZE = 16;
const MAX_SIZE = 512;
const FONT_SIZE_RATIO = 0.4;
const COLOR_LIGHTENING_PERCENT = 60;
const COLOR_DARKENING_FACTOR = 0.6;
const CACHE_TTL = 24 * 60 * 60; // 24 hours instead of 1 hour
export default class AvatarController {
public async generateAvatar({ request, response }: HttpContext) {
try {
const { name, size = DEFAULT_SIZE } = request.only(['name', 'size']);
// Enhanced validation
if (!name || typeof name !== 'string' || name.trim().length === 0) {
return response.status(StatusCodes.BAD_REQUEST).json({
error: 'Name is required and must be a non-empty string',
});
}
const parsedSize = this.validateSize(size);
if (!parsedSize.isValid) {
return response.status(StatusCodes.BAD_REQUEST).json({
error: parsedSize.error,
});
}
// Build a unique cache key for the given name and size
const cacheKey = `avatar:${this.sanitizeName(name)}-${parsedSize.value}`;
// const cacheKey = `avatar:${name.trim().toLowerCase()}-${size}`;
try {
const cachedSvg = await redis.get(cacheKey);
if (cachedSvg) {
this.setResponseHeaders(response);
return response.send(cachedSvg);
}
} catch (redisError) {
// Log redis error but continue without cache
console.warn('Redis cache read failed:', redisError);
}
const initials = this.getInitials(name);
const colors = this.generateColors(name);
const svgContent = this.createSvg(size, colors, initials);
// // Cache the generated avatar for future use, e.g. 1 hour expiry
try {
await redis.setex(cacheKey, CACHE_TTL, svgContent);
} catch (redisError) {
// Log but don't fail the request
console.warn('Redis cache write failed:', redisError);
}
this.setResponseHeaders(response);
return response.send(svgContent);
} catch (error) {
console.error('Avatar generation error:', error);
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
error: 'Failed to generate avatar',
});
}
}
private validateSize(size: any): { isValid: boolean; value?: number; error?: string } {
const numSize = Number(size);
if (isNaN(numSize)) {
return { isValid: false, error: 'Size must be a valid number' };
}
if (numSize < MIN_SIZE || numSize > MAX_SIZE) {
return {
isValid: false,
error: `Size must be between ${MIN_SIZE} and ${MAX_SIZE}`,
};
}
return { isValid: true, value: Math.floor(numSize) };
}
private sanitizeName(name: string): string {
return name
.trim()
.toLowerCase()
.replace(/[^a-z0-9\s-]/gi, '');
}
private getInitials(name: string): string {
const sanitized = name.trim().replace(/\s+/g, ' '); // normalize whitespace
const parts = sanitized
.split(' ')
.filter((part) => part.length > 0)
.map((part) => part.trim());
if (parts.length === 0) {
return 'NA';
}
if (parts.length === 1) {
// For single word, take first 2 characters or first char if only 1 char
return parts[0].substring(0, Math.min(2, parts[0].length)).toUpperCase();
}
return this.getMultiWordInitials(parts);
}
private getMultiWordInitials(parts: string[]): string {
// Filter out prefixes and short words
const significantParts = parts.filter((part) => !PREFIXES.includes(part.toLowerCase()) && part.length > 1);
if (significantParts.length === 0) {
// Fallback to first and last regardless of prefixes
const firstName = parts[0];
const lastName = parts[parts.length - 1];
return (firstName.charAt(0) + lastName.charAt(0)).toUpperCase();
}
if (significantParts.length === 1) {
return significantParts[0].substring(0, 2).toUpperCase();
}
// Take first and last significant parts
const firstName = significantParts[0];
const lastName = significantParts[significantParts.length - 1];
return (firstName.charAt(0) + lastName.charAt(0)).toUpperCase();
}
private generateColors(name: string): { background: string; text: string } {
const baseColor = this.getColorFromName(name);
return {
background: this.lightenColor(baseColor, COLOR_LIGHTENING_PERCENT),
text: this.darkenColor(baseColor),
};
}
private createSvg(size: number, colors: { background: string; text: string }, initials: string): string {
const fontSize = Math.max(12, Math.floor(size * FONT_SIZE_RATIO)); // Ensure readable font size
// Escape any potential HTML/XML characters in initials
const escapedInitials = this.escapeXml(initials);
return `<svg width="${size}" height="${size}" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 ${size} ${size}">
<rect width="100%" height="100%" fill="#${colors.background}" rx="${size * 0.1}"/>
<text x="50%" y="50%" dominant-baseline="central" text-anchor="middle"
font-weight="600" font-family="-apple-system, BlinkMacSystemFont, 'Segoe UI', system-ui, sans-serif"
font-size="${fontSize}" fill="#${colors.text}">${escapedInitials}</text>
</svg>`;
}
private escapeXml(text: string): string {
return text.replace(/&/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;').replace(/"/g, '&quot;').replace(/'/g, '&apos;');
}
private setResponseHeaders(response: HttpContext['response']): void {
response.header('Content-Type', 'image/svg+xml');
response.header('Cache-Control', 'public, max-age=86400'); // Cache for 1 day
response.header('ETag', `"${Date.now()}"`); // Simple ETag
}
private getColorFromName(name: string): string {
let hash = 0;
const normalizedName = name.toLowerCase().trim();
for (let i = 0; i < normalizedName.length; i++) {
hash = normalizedName.charCodeAt(i) + ((hash << 5) - hash);
hash = hash & hash; // Convert to 32-bit integer
}
// Ensure we get vibrant colors by constraining the color space
const colorParts = [];
for (let i = 0; i < 3; i++) {
let value = (hash >> (i * 8)) & 0xff;
// Ensure minimum color intensity for better contrast
value = Math.max(50, value);
colorParts.push(value.toString(16).padStart(2, '0'));
}
return colorParts.join('');
}
private lightenColor(hexColor: string, percent: number): string {
const r = parseInt(hexColor.substring(0, 2), 16);
const g = parseInt(hexColor.substring(2, 4), 16);
const b = parseInt(hexColor.substring(4, 6), 16);
const lightenValue = (value: number) => Math.min(255, Math.floor(value + (255 - value) * (percent / 100)));
const newR = lightenValue(r);
const newG = lightenValue(g);
const newB = lightenValue(b);
return ((newR << 16) | (newG << 8) | newB).toString(16).padStart(6, '0');
}
private darkenColor(hexColor: string): string {
const r = parseInt(hexColor.slice(0, 2), 16);
const g = parseInt(hexColor.slice(2, 4), 16);
const b = parseInt(hexColor.slice(4, 6), 16);
const darkenValue = (value: number) => Math.max(0, Math.floor(value * COLOR_DARKENING_FACTOR));
const darkerR = darkenValue(r);
const darkerG = darkenValue(g);
const darkerB = darkenValue(b);
return ((darkerR << 16) + (darkerG << 8) + darkerB).toString(16).padStart(6, '0');
}
}

View file

@ -1,20 +1,37 @@
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
// import Person from 'App/Models/Person';
import Dataset from 'App/Models/Dataset';
import type { HttpContext } from '@adonisjs/core/http';
import Dataset from '#models/dataset';
import { StatusCodes } from 'http-status-codes';
import DatasetReference from '#models/dataset_reference';
// node ace make:controller Author
export default class DatasetController {
public async index({}: HttpContextContract) {
// select * from gba.persons
// where exists (select * from gba.documents inner join gba.link_documents_persons on "documents"."id" = "link_documents_persons"."document_id"
// where ("link_documents_persons"."role" = 'author') and ("persons"."id" = "link_documents_persons"."person_id"));
const datasets = await Dataset.query().where('server_state', 'published').orWhere('server_state', 'deleted');
/**
* GET /api/datasets
* Find all published datasets
*/
public async index({ response }: HttpContext) {
try {
const datasets = await Dataset.query()
.where(function (query) {
query.where('server_state', 'published').orWhere('server_state', 'deleted');
})
.preload('titles')
.preload('identifier')
.orderBy('server_date_published', 'desc');
return datasets;
return response.status(StatusCodes.OK).json(datasets);
} catch (error) {
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
message: error.message || 'Some error occurred while retrieving datasets.',
});
}
}
public async findAll({ response }: HttpContextContract) {
/**
* GET /api/dataset
* Find all published datasets
*/
public async findAll({ response }: HttpContext) {
try {
const datasets = await Dataset.query()
.where('server_state', 'published')
@ -29,34 +46,279 @@ export default class DatasetController {
}
}
public async findOne({ params }: HttpContextContract) {
const datasets = await Dataset.query()
.where('publish_id', params.publish_id)
.preload('titles')
.preload('descriptions')
.preload('user')
.preload('authors', (builder) => {
builder.orderBy('pivot_sort_order', 'asc');
})
.preload('contributors', (builder) => {
builder.orderBy('pivot_sort_order', 'asc');
})
.preload('subjects')
.preload('coverage')
.preload('licenses')
.preload('references')
.preload('project')
.preload('referenced_by', (builder) => {
builder.preload('dataset', (builder) => {
builder.preload('identifier');
});
})
.preload('files', (builder) => {
builder.preload('hashvalues');
})
.preload('identifier')
.firstOrFail();
/**
* GET /api/dataset/:publish_id
* Find one dataset by publish_id
*/
public async findOne({ response, params }: HttpContext) {
try {
const dataset = await Dataset.query()
.where('publish_id', params.publish_id)
.preload('titles')
.preload('descriptions') // Using 'descriptions' instead of 'abstracts'
.preload('user', (builder) => {
builder.select(['id', 'firstName', 'lastName', 'avatar', 'login']);
})
.preload('authors', (builder) => {
builder
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.pivotColumns(['role', 'sort_order'])
.orderBy('pivot_sort_order', 'asc');
})
.preload('contributors', (builder) => {
builder
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.pivotColumns(['role', 'sort_order', 'contributor_type'])
.orderBy('pivot_sort_order', 'asc');
})
.preload('subjects')
.preload('coverage')
.preload('licenses')
.preload('references')
.preload('project')
// .preload('referenced_by', (builder) => {
// builder.preload('dataset', (builder) => {
// builder.preload('identifier');
// });
// })
.preload('files', (builder) => {
builder.preload('hashvalues');
})
.preload('identifier')
.first(); // Use first() instead of firstOrFail() to handle not found gracefully
return datasets;
if (!dataset) {
return response.status(StatusCodes.NOT_FOUND).json({
message: `Cannot find Dataset with publish_id=${params.publish_id}.`,
});
}
// Build the version chain
const versionChain = await this.buildVersionChain(dataset);
// Add version chain to response
const responseData = {
...dataset.toJSON(),
versionChain: versionChain,
};
// return response.status(StatusCodes.OK).json(dataset);
return response.status(StatusCodes.OK).json(responseData);
} catch (error) {
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
message: error.message || `Error retrieving Dataset with publish_id=${params.publish_id}.`,
});
}
}
/**
* GET /:prefix/:value
* Find dataset by identifier (e.g., https://doi.tethys.at/10.24341/tethys.99.2)
*/
public async findByIdentifier({ response, params }: HttpContext) {
const identifierValue = `${params.prefix}/${params.value}`;
// Optional: Validate DOI format
if (!identifierValue.match(/^10\.\d+\/[a-zA-Z0-9._-]+\.[0-9]+(?:\.[0-9]+)*$/)) {
return response.status(StatusCodes.BAD_REQUEST).json({
message: `Invalid DOI format: ${identifierValue}`,
});
}
try {
// Method 1: Using subquery with whereIn (most similar to your original)
const dataset = await Dataset.query()
// .whereIn('id', (subQuery) => {
// subQuery.select('dataset_id').from('dataset_identifiers').where('value', identifierValue);
// })
.whereHas('identifier', (builder) => {
builder.where('value', identifierValue);
})
.preload('titles')
.preload('descriptions') // Using 'descriptions' instead of 'abstracts'
.preload('user', (builder) => {
builder.select(['id', 'firstName', 'lastName', 'avatar', 'login']);
})
.preload('authors', (builder) => {
builder
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.pivotColumns(['role', 'sort_order'])
.wherePivot('role', 'author')
.orderBy('pivot_sort_order', 'asc');
})
.preload('contributors', (builder) => {
builder
.select(['id', 'academic_title', 'first_name', 'last_name', 'identifier_orcid', 'status', 'name_type'])
.withCount('datasets', (query) => {
query.as('datasets_count');
})
.pivotColumns(['role', 'sort_order', 'contributor_type'])
.wherePivot('role', 'contributor')
.orderBy('pivot_sort_order', 'asc');
})
.preload('subjects')
.preload('coverage')
.preload('licenses')
.preload('references')
.preload('project')
// .preload('referenced_by', (builder) => {
// builder.preload('dataset', (builder) => {
// builder.preload('identifier');
// });
// })
.preload('files', (builder) => {
builder.preload('hashvalues');
})
.preload('identifier')
.first();
if (!dataset) {
return response.status(StatusCodes.NOT_FOUND).json({
message: `Cannot find Dataset with identifier=${identifierValue}.`,
});
}
// Build the version chain
const versionChain = await this.buildVersionChain(dataset);
// Add version chain to response
const responseData = {
...dataset.toJSON(),
versionChain: versionChain,
};
// return response.status(StatusCodes.OK).json(dataset);
return response.status(StatusCodes.OK).json(responseData);
} catch (error) {
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
message: error.message || `Error retrieving Dataset with identifier=${identifierValue}.`,
});
}
}
/**
* Build the complete version chain for a dataset
* Traverses both backwards (previous versions) and forwards (newer versions)
*/
private async buildVersionChain(dataset: Dataset) {
const versionChain = {
current: {
id: dataset.id,
publish_id: dataset.publish_id,
doi: dataset.identifier?.value || null,
main_title: dataset.mainTitle || null,
server_date_published: dataset.server_date_published,
},
previousVersions: [] as any[],
newerVersions: [] as any[],
};
// Get all previous versions (going backwards in time)
versionChain.previousVersions = await this.getPreviousVersions(dataset.id);
// Get all newer versions (going forwards in time)
versionChain.newerVersions = await this.getNewerVersions(dataset.id);
return versionChain;
}
/**
* Recursively get all previous versions
*/
private async getPreviousVersions(datasetId: number, visited: Set<number> = new Set()): Promise<any[]> {
// Prevent infinite loops
if (visited.has(datasetId)) {
return [];
}
visited.add(datasetId);
const previousVersions: any[] = [];
// Find references where this dataset "IsNewVersionOf" another dataset
const previousRefs = await DatasetReference.query()
.where('document_id', datasetId)
.where('relation', 'IsNewVersionOf')
.whereNotNull('related_document_id');
for (const ref of previousRefs) {
if (!ref.related_document_id) continue;
const previousDataset = await Dataset.query()
.where('id', ref.related_document_id)
.preload('identifier')
.preload('titles')
.first();
if (previousDataset) {
const versionInfo = {
id: previousDataset.id,
publish_id: previousDataset.publish_id,
doi: previousDataset.identifier?.value || null,
main_title: previousDataset.mainTitle || null,
server_date_published: previousDataset.server_date_published,
relation: 'IsPreviousVersionOf', // From perspective of current dataset
};
previousVersions.push(versionInfo);
// Recursively get even older versions
const olderVersions = await this.getPreviousVersions(previousDataset.id, visited);
previousVersions.push(...olderVersions);
}
}
return previousVersions;
}
/**
* Recursively get all newer versions
*/
private async getNewerVersions(datasetId: number, visited: Set<number> = new Set()): Promise<any[]> {
// Prevent infinite loops
if (visited.has(datasetId)) {
return [];
}
visited.add(datasetId);
const newerVersions: any[] = [];
// Find references where this dataset "IsPreviousVersionOf" another dataset
const newerRefs = await DatasetReference.query()
.where('document_id', datasetId)
.where('relation', 'IsPreviousVersionOf')
.whereNotNull('related_document_id');
for (const ref of newerRefs) {
if (!ref.related_document_id) continue;
const newerDataset = await Dataset.query().where('id', ref.related_document_id).preload('identifier').preload('titles').first();
if (newerDataset) {
const versionInfo = {
id: newerDataset.id,
publish_id: newerDataset.publish_id,
doi: newerDataset.identifier?.value || null,
main_title: newerDataset.mainTitle || null,
server_date_published: newerDataset.server_date_published,
relation: 'IsNewVersionOf', // From perspective of current dataset
};
newerVersions.push(versionInfo);
// Recursively get even newer versions
const evenNewerVersions = await this.getNewerVersions(newerDataset.id, visited);
newerVersions.push(...evenNewerVersions);
}
}
return newerVersions;
}
}

View file

@ -0,0 +1,104 @@
import type { HttpContext } from '@adonisjs/core/http';
import File from '#models/file';
import { StatusCodes } from 'http-status-codes';
import * as fs from 'fs';
import { DateTime } from 'luxon';
// node ace make:controller Author
export default class FileController {
// @Get("download/:id")
public async findOne({ response, params }: HttpContext) {
const id = params.id;
// const file = await File.findOrFail(id);
// Load file with its related dataset to check embargo
const file = await File.query()
.where('id', id)
.preload('dataset') // or 'dataset' - whatever your relationship is named
.firstOrFail();
if (!file) {
return response.status(StatusCodes.NOT_FOUND).send({
message: `Cannot find File with id=${id}.`,
});
}
const dataset = file.dataset;
// Files from unpublished datasets are now blocked
if (dataset.server_state !== 'published') {
return response.status(StatusCodes.FORBIDDEN).send({
message: `File access denied: Dataset is not published.`,
});
}
if (dataset && this.isUnderEmbargo(dataset.embargo_date)) {
return response.status(StatusCodes.FORBIDDEN).send({
message: `File is under embargo until ${dataset.embargo_date?.toFormat('yyyy-MM-dd')}`,
});
}
// Proceed with file download
const filePath = '/storage/app/data/' + file.pathName;
const fileExt = file.filePath.split('.').pop() || '';
// const fileName = file.label + fileExt;
const fileName = file.label.toLowerCase().endsWith(`.${fileExt.toLowerCase()}`) ? file.label : `${file.label}.${fileExt}`;
// Determine if file can be previewed inline in browser
const canPreviewInline = (mimeType: string): boolean => {
const type = mimeType.toLowerCase();
return (
type === 'application/pdf' ||
type.startsWith('image/') ||
type.startsWith('text/') ||
type === 'application/json' ||
type === 'application/xml' ||
// Uncomment if you want video/audio inline
type.startsWith('video/') ||
type.startsWith('audio/')
);
};
const disposition = canPreviewInline(file.mimeType) ? 'inline' : 'attachment';
try {
fs.accessSync(filePath, fs.constants.R_OK); //| fs.constants.W_OK);
// console.log("can read/write:", filePath);
response
.header('Cache-Control', 'no-cache private')
.header('Content-Description', 'File Transfer')
.header('Content-Type', file.mimeType)
.header('Content-Disposition', `${disposition}; filename="${fileName}"`)
.header('Content-Transfer-Encoding', 'binary')
.header('Access-Control-Allow-Origin', '*')
.header('Access-Control-Allow-Methods', 'GET');
response.status(StatusCodes.OK).download(filePath);
} catch (err) {
// console.log("no access:", path);
response.status(StatusCodes.NOT_FOUND).send({
message: `File with id ${id} doesn't exist on file server`,
});
}
}
/**
* Check if the dataset is under embargo
* Compares only dates (ignoring time) for embargo check
* @param embargoDate - The embargo date from dataset
* @returns true if under embargo, false if embargo has passed or no embargo set
*/
private isUnderEmbargo(embargoDate: DateTime | null): boolean {
// No embargo date set - allow download
if (!embargoDate) {
return false;
}
// Get current date at start of day (00:00:00)
const today = DateTime.now().startOf('day');
// Get embargo date at start of day (00:00:00)
const embargoDateOnly = embargoDate.startOf('day');
// File is under embargo if embargo date is after today
// This means the embargo lifts at the start of the embargo date
return embargoDateOnly >= today;
}
}

View file

@ -1,9 +1,9 @@
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
import Database from '@ioc:Adonis/Lucid/Database';
import type { HttpContext } from '@adonisjs/core/http';
import db from '@adonisjs/lucid/services/db';
import { StatusCodes } from 'http-status-codes';
export default class HomeController {
public async findDocumentsPerYear({ response, params }: HttpContextContract) {
public async findDocumentsPerYear({ response, params }: HttpContext) {
const year = params.year;
const from = parseInt(year);
const serverState = 'published';
@ -17,8 +17,9 @@ export default class HomeController {
// .preload('authors')
// .orderBy('server_date_published');
const datasets = await Database.from('documents as doc')
.select(['publish_id', 'server_date_published', Database.raw(`date_part('year', server_date_published) as pub_year`)])
const datasets = await db
.from('documents as doc')
.select(['publish_id', 'server_date_published', db.raw(`date_part('year', server_date_published) as pub_year`)])
.where('server_state', serverState)
.innerJoin('link_documents_persons as ba', 'doc.id', 'ba.document_id')
.andWhereRaw(`date_part('year', server_date_published) = ?`, [from])
@ -32,17 +33,17 @@ export default class HomeController {
}
}
public async findYears({ response }: HttpContextContract) {
public async findYears({ response }: HttpContext) {
const serverState = 'published';
// Use raw SQL queries to select all cars which belongs to the user
try {
const datasets = await Database.rawQuery(
const datasets = await db.rawQuery(
'SELECT distinct EXTRACT(YEAR FROM server_date_published) as published_date FROM gba.documents WHERE server_state = ?',
[serverState],
);
// Pluck the ids of the cars
const years = datasets.rows.map((dataset) => dataset.published_date);
const years = datasets.rows.map((dataset: any) => dataset.published_date);
// check if the cars is returned
// if (years.length > 0) {
return response.status(StatusCodes.OK).json(years);
@ -53,4 +54,92 @@ export default class HomeController {
});
}
}
public async findPublicationsPerMonth({ response }: HttpContext) {
const serverState = 'published';
// const year = params.year;
// const from = parseInt(year);
try {
// const datasets = await Database.from('documents as doc')
// .select([Database.raw(`date_part('month', server_date_published) as pub_month`), Database.raw('COUNT(*) as count')])
// .where('server_state', serverState)
// .innerJoin('link_documents_persons as ba', 'doc.id', 'ba.document_id')
// .andWhereRaw(`date_part('year', server_date_published) = ?`, [from])
// .groupBy('pub_month');
// // .orderBy('server_date_published');
// Calculate the last 4 years including the current year
const currentYear = new Date().getFullYear();
const years = Array.from({ length: 4 }, (_, i) => currentYear - (i + 1)).reverse();
const result = await db
.from('documents as doc')
.select([
db.raw(`date_part('year', server_date_published) as pub_year`),
db.raw(`date_part('month', server_date_published) as pub_month`),
db.raw('COUNT(*) as count'),
])
.where('server_state', serverState)
// .innerJoin('link_documents_persons as ba', 'doc.id', 'ba.document_id')
// .whereIn('pub_year', years) // Filter by both years
.whereRaw(`date_part('year', server_date_published) IN (${years.join(',')})`) // Filter by both years
.groupBy('pub_year', 'pub_month')
.orderBy('pub_year', 'asc')
.orderBy('pub_month', 'asc');
const labels = Array.from({ length: 12 }, (_, i) => i + 1); // Assuming 12 months
const inputDatasets: Map<string, ChartDataset> = result.reduce((acc, item) => {
const { pub_year, pub_month, count } = item;
if (!acc[pub_year]) {
acc[pub_year] = {
data: Array.from({ length: 12 }).fill(0),
label: pub_year.toString(),
borderColor: this.getRandomHexColor, // pub_year === 2022 ? '#3e95cd' : '#8e5ea2',
fill: false,
};
}
acc[pub_year].data[pub_month - 1] = parseInt(count);
return acc;
}, {});
const outputDatasets = Object.entries(inputDatasets).map(([year, data]) => ({
data: data.data,
label: year,
borderColor: data.borderColor,
fill: data.fill,
}));
const data = {
labels: labels,
datasets: outputDatasets,
};
return response.json(data);
} catch (error) {
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
message: error.message || 'Some error occurred while retrieving datasets.',
});
}
}
private getRandomHexColor() {
const letters = '0123456789ABCDEF';
let color = '#';
for (let i = 0; i < 6; i++) {
color += letters[Math.floor(Math.random() * 16)];
}
return color;
}
}
interface ChartDataset {
data: Array<number>;
label: string;
borderColor: string;
fill: boolean;
}

View file

@ -0,0 +1,132 @@
import type { HttpContext } from '@adonisjs/core/http';
import User from '#models/user';
import TwoFactorAuthProvider from '#app/services/TwoFactorAuthProvider';
import { StatusCodes } from 'http-status-codes';
import { InvalidArgumentException } from 'node-exceptions';
import { TotpState } from '#contracts/enums';
import BackupCodeStorage, { SecureRandom } from '#services/backup_code_storage';
import BackupCode from '#models/backup_code';
// Here we are generating secret and recovery codes for the user thats enabling 2FA and storing them to our database.
export default class UserController {
public async getSubmitters({ response }: HttpContext) {
try {
const submitters = await User.query()
.preload('roles', (query) => {
query.where('name', 'submitter')
})
.whereHas('roles', (query) => {
query.where('name', 'submitter')
})
.exec();
return submitters;
} catch (error) {
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
message: 'Invalid TOTP state',
});
}
}
public async enable({ auth, response, request }: HttpContext) {
const user = (await User.find(auth.user?.id)) as User;
// await user.load('totp_secret');
// if (!user.totp_secret) {
// let totpSecret = new TotpSecret();
// user.related('totp_secret').save(totpSecret);
// await user.load('totp_secret');
// }
if (!user) {
throw new Error('user not available');
}
const state: number = request.input('state');
try {
switch (state) {
case TotpState.STATE_DISABLED:
// user.twoFactorSecret = null;
// user.twoFactorRecoveryCodes = null;
await BackupCode.deleteCodes(user);
user.twoFactorSecret = '';
// user.twoFactorRecoveryCodes = [''];
await user.save();
user.state = TotpState.STATE_DISABLED;
await user.save();
let storage = new BackupCodeStorage(new SecureRandom());
let backupState = await storage.getBackupCodesState(user);
return response.status(StatusCodes.OK).json({
state: TotpState.STATE_DISABLED,
backupState: backupState,
});
case TotpState.STATE_CREATED:
user.twoFactorSecret = TwoFactorAuthProvider.generateSecret(user);
user.state = TotpState.STATE_CREATED;
await user.save();
let qrcode = await TwoFactorAuthProvider.generateQrCode(user);
// throw new InvalidArgumentException('code is missing');
return response.status(StatusCodes.OK).json({
state: user.state,
secret: user.twoFactorSecret,
url: qrcode.url,
svg: qrcode.svg,
});
case TotpState.STATE_ENABLED:
let code: string = request.input('code');
if (!code) {
throw new InvalidArgumentException('code is missing');
}
const success = await TwoFactorAuthProvider.enable(user, code);
return response.status(StatusCodes.OK).json({
state: success ? TotpState.STATE_ENABLED : TotpState.STATE_CREATED,
});
default:
throw new InvalidArgumentException('Invalid TOTP state');
}
} catch (error) {
return response.status(StatusCodes.INTERNAL_SERVER_ERROR).json({
message: 'Invalid TOTP state',
});
}
}
// public async fetchRecoveryCodes({ auth, view }) {
// const user = auth?.user;
// return view.render('pages/settings', {
// twoFactorEnabled: user.isTwoFactorEnabled,
// recoveryCodes: user.twoFactorRecoveryCodes,
// });
// }
/**
* @NoAdminRequired
* @PasswordConfirmationRequired
*
* @return JSONResponse
*/
public async createCodes({ auth, response }: HttpContext) {
// $user = $this->userSession->getUser();
const user = (await User.find(auth.user?.id)) as User;
// let codes = TwoFactorAuthProvider.generateRecoveryCodes();
let storage = new BackupCodeStorage(new SecureRandom());
// $codes = $this->storage->createCodes($user);
const codes = await storage.createCodes(user);
let backupState = await storage.getBackupCodesState(user);
// return new JSONResponse([
// 'codes' => $codes,
// 'state' => $this->storage->getBackupCodesState($user),
// ]);
return response.status(StatusCodes.OK).json({
codes: codes,
// state: success ? TotpState.STATE_ENABLED : TotpState.STATE_CREATED,
backupState: backupState, //storage.getBackupCodesState(user),
});
}
}

View file

@ -0,0 +1,36 @@
import type { HttpContext } from '@adonisjs/core/http';
import Collection from '#models/collection';
export default class CollectionsController {
public async show({ params, response }: HttpContext) {
// Get the collection id from route parameters
const collectionId = params.id;
// Find the selected collection by id
const collection = await Collection.find(collectionId);
if (!collection) {
return response.status(404).json({ message: 'Collection not found' });
}
// Query for narrower concepts: collections whose parent_id equals the selected collection's id
const narrowerCollections = await Collection.query().where('parent_id', collection.id) || [];
// For broader concept, if the selected collection has a parent_id fetch that record (otherwise null)
const broaderCollection: Collection[] | never[] | null = await (async () => {
if (collection.parent_id) {
// Try to fetch the parent...
const parent = await Collection.find(collection.parent_id)
// If found, return it wrapped in an array; if not found, return null (or empty array if you prefer)
return parent ? [parent] : null
}
return []
})()
// Return the selected collection along with its narrower and broader concepts in JSON format
return response.json({
selectedCollection: collection,
narrowerCollections,
broaderCollection,
});
}
}

View file

@ -1,40 +1,131 @@
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
// import User from 'App/Models/User';
import type { HttpContext } from '@adonisjs/core/http';
import User from '#models/user';
import BackupCode from '#models/backup_code';
// import Hash from '@ioc:Adonis/Core/Hash';
// import InvalidCredentialException from 'App/Exceptions/InvalidCredentialException';
import AuthValidator from 'App/Validators/AuthValidator';
import { authValidator } from '#validators/auth';
import hash from '@adonisjs/core/services/hash';
import db from '@adonisjs/lucid/services/db';
import TwoFactorAuthProvider from '#app/services/TwoFactorAuthProvider';
// import { Authenticator } from '@adonisjs/auth';
// import { LoginState } from 'Contracts/enums';
// import { StatusCodes } from 'http-status-codes';
// interface MyHttpsContext extends HttpContext {
// auth: Authenticator<User>
// }
export default class AuthController {
// login function
public async login({ request, response, auth, session }: HttpContextContract) {
// login function{ request, auth, response }:HttpContext
public async login({ request, response, auth, session }: HttpContext) {
// console.log({
// registerBody: request.body(),
// });
await request.validate(AuthValidator);
// await request.validate(AuthValidator);
await request.validateUsing(authValidator);
const plainPassword = await request.input('password');
const email = await request.input('email');
// const plainPassword = await request.input('password');
// const email = await request.input('email');
// grab uid and password values off request body
// const { email, password } = request.only(['email', 'password'])
const { email, password } = request.only(['email', 'password']);
try {
// attempt to login
await auth.use('web').attempt(email, plainPassword);
await db.connection().rawQuery('SELECT 1')
// // attempt to verify credential and login user
// await auth.use('web').attempt(email, plainPassword);
// const user = await auth.use('web').verifyCredentials(email, password);
const user = await User.verifyCredentials(email, password);
if (user.isTwoFactorEnabled) {
// session.put("login.id", user.id);
// return view.render("pages/two-factor-challenge");
session.flash('user_id', user.id);
return response.redirect().back();
// let state = LoginState.STATE_VALIDATED;
// return response.status(StatusCodes.OK).json({
// state: state,
// new_user_id: user.id,
// });
}
await auth.use('web').login(user);
} catch (error) {
if (error.code === 'ECONNREFUSED') {
throw error
}
// if login fails, return vague form message and redirect back
session.flash('message', 'Your username, email, or password is incorrect');
return response.redirect().back();
}
// otherwise, redirect todashboard
response.redirect('/dashboard');
response.redirect('/apps/dashboard');
}
public async twoFactorChallenge({ request, session, auth, response }: HttpContext) {
const { code, backup_code, login_id } = request.only(['code', 'backup_code', 'login_id']);
const user = await User.query().where('id', login_id).firstOrFail();
if (code) {
const isValid = await TwoFactorAuthProvider.validate(user, code);
if (isValid) {
// login user and redirect to dashboard
await auth.use('web').login(user);
response.redirect('/apps/dashboard');
} else {
session.flash('message', 'Your two-factor code is incorrect');
return response.redirect().back();
}
} else if (backup_code) {
const codes: BackupCode[] = await user.getBackupCodes();
// const verifiedBackupCodes = await Promise.all(
// codes.map(async (backupCode) => {
// let isVerified = await hash.verify(backupCode.code, backup_code);
// if (isVerified) {
// return backupCode;
// }
// }),
// );
// const backupCodeToDelete = verifiedBackupCodes.find(Boolean);
let backupCodeToDelete = null;
for (const backupCode of codes) {
const isVerified = await hash.verify(backupCode.code, backup_code);
if (isVerified) {
backupCodeToDelete = backupCode;
break;
}
}
if (backupCodeToDelete) {
if (backupCodeToDelete.used === false) {
backupCodeToDelete.used = true;
await backupCodeToDelete.save();
console.log(`BackupCode with id ${backupCodeToDelete.id} has been marked as used.`);
await auth.use('web').login(user);
response.redirect('/apps/dashboard');
} else {
session.flash('message', 'BackupCode already used');
return response.redirect().back();
}
} else {
session.flash('message', 'BackupCode not found');
return response.redirect().back();
}
}
}
// logout function
public async logout({ auth, response }: HttpContextContract) {
public async logout({ auth, response }: HttpContext) {
// await auth.logout();
await auth.use('web').logout();
response.redirect('/app/login');
return response.redirect('/app/login');
// return response.status(200);
}
}

View file

@ -0,0 +1,306 @@
import type { HttpContext } from '@adonisjs/core/http';
import User from '#models/user';
// import { RenderResponse } from '@ioc:EidelLev/Inertia';
import TwoFactorAuthProvider from '#app/services/TwoFactorAuthProvider';
import hash from '@adonisjs/core/services/hash';
// import { schema, rules } from '@adonisjs/validator';
import vine from '@vinejs/vine';
import BackupCodeStorage, { SecureRandom } from '#services/backup_code_storage';
import path from 'path';
import crypto from 'crypto';
// import drive from '#services/drive';
import drive from '@adonisjs/drive/services/main';
import logger from '@adonisjs/core/services/logger';
// Here we are generating secret and recovery codes for the user thats enabling 2FA and storing them to our database.
export default class UserController {
/**
* Show the user a form to change their personal information & password.
*
* @return \Inertia\Response
*/
public async accountInfo({ inertia, auth }: HttpContext) {
// const user = auth.user;
const user = (await User.find(auth.user?.id)) as User;
// const id = request.param('id');
// const user = await User.query().where('id', id).firstOrFail();
let storage = new BackupCodeStorage(new SecureRandom());
// const codes= user.isTwoFactorEnabled? (await user.getBackupCodes()).map((role) => role.code) : [];
let backupState = await storage.getBackupCodesState(user);
return inertia.render('Auth/AccountInfo', {
user: user,
twoFactorEnabled: user.isTwoFactorEnabled,
// code: await TwoFactorAuthProvider.generateQrCode(user),
backupState: backupState,
});
}
public async accountInfoStore({ auth, request, response, session }: HttpContext) {
// const passwordSchema = schema.create({
// old_password: schema.string({ trim: true }, [rules.required()]),
// new_password: schema.string({ trim: true }, [rules.minLength(8), rules.maxLength(255), rules.confirmed('confirm_password')]),
// confirm_password: schema.string({ trim: true }, [rules.required()]),
// });
const passwordSchema = vine.object({
// first step
old_password: vine.string().trim(),
// .regex(/^[a-zA-Z0-9]+$/),
new_password: vine.string().confirmed({ confirmationField: 'confirm_password' }).trim().minLength(8).maxLength(255),
});
try {
// await request.validate({ schema: passwordSchema });
const validator = vine.compile(passwordSchema);
await request.validateUsing(validator);
} catch (error) {
// return response.badRequest(error.messages);
throw error;
}
try {
const user = (await auth.user) as User;
const { old_password, new_password } = request.only(['old_password', 'new_password']);
// if (!(old_password && new_password && confirm_password)) {
// return response.status(400).send({ warning: 'Old password and new password are required.' });
// }
// Verify if the provided old password matches the user's current password
const isSame = await hash.verify(user.password, old_password);
if (!isSame) {
return response.flash('warning', 'Old password is incorrect.').redirect().back();
}
// Hash the new password before updating the user's password
user.password = new_password;
await user.save();
// return response.status(200).send({ message: 'Password updated successfully.' });
session.flash({ message: 'Password updated successfully.' });
return response.redirect().toRoute('settings.user');
} catch (error) {
// return response.status(500).send({ message: 'Internal server error.' });
return response.flash('warning', `Invalid server state. Internal server error.`).redirect().back();
}
}
public async profile({ inertia, auth }: HttpContext) {
const user = await User.find(auth.user?.id);
// let test = await drive.use().getUrl(user?.avatar);
// user?.preload('roles');
const avatarFullPathUrl = user?.avatar ? await drive.use('public').getUrl(user.avatar) : null;
return inertia.render('profile/show', {
user: user,
defaultUrl: avatarFullPathUrl,
});
}
/**
* Update the user's profile information.
*
* @param {HttpContext} ctx - The HTTP context object.
* @returns {Promise<void>}
*/
public async profileUpdate({ auth, request, response, session }: HttpContext) {
if (!auth.user) {
session.flash('error', 'You must be logged in to update your profile.');
return response.redirect().toRoute('login');
}
const updateProfileValidator = vine.withMetaData<{ userId: number }>().compile(
vine.object({
first_name: vine.string().trim().minLength(4).maxLength(255),
last_name: vine.string().trim().minLength(4).maxLength(255),
login: vine.string().trim().minLength(4).maxLength(255),
email: vine
.string()
.trim()
.maxLength(255)
.email()
.normalizeEmail()
.isUnique({ table: 'accounts', column: 'email', whereNot: (field) => field.meta.userId }),
avatar: vine
.myfile({
size: '2mb',
extnames: ['jpg', 'jpeg', 'png', 'gif', 'webp', 'svg'],
})
// .allowedMimetypeExtensions({
// allowedExtensions: ['jpg', 'jpeg', 'png', 'gif', 'webp', 'svg'],
// })
.optional(),
}),
);
const user = await User.find(auth.user.id);
if (!user) {
session.flash('error', 'User not found.');
return response.redirect().toRoute('login');
}
try {
// validate update form
await request.validateUsing(updateProfileValidator, {
meta: {
userId: user.id,
},
});
const { login, email, first_name, last_name } = request.only(['login', 'email', 'first_name', 'last_name']);
const sanitizedData: { [key: string]: any } = {
login: login?.trim(),
email: email?.toLowerCase().trim(),
first_name: first_name?.trim(),
last_name: last_name?.trim(),
// avatar: "",
};
const toCamelCase = (str: string) => str.replace(/_([a-z])/g, (g) => g[1].toUpperCase());
const hasInputChanges = Object.keys(sanitizedData).some((key) => {
const camelKey = toCamelCase(key);
return sanitizedData[key] !== (user.$attributes as { [key: string]: any })[camelKey];
});
let hasAvatarChanged = false;
const avatar = request.file('avatar');
if (avatar) {
const fileHash = crypto
.createHash('sha256')
.update(avatar.clientName + avatar.size)
.digest('hex');
const fileName = `avatar-${fileHash}.${avatar.extname}`;
const avatarFullPath = path.join('/uploads', `${user.login}`, fileName);
if (user.avatar != avatarFullPath) {
if (user.avatar) {
await drive.use('public').delete(user.avatar);
}
hasAvatarChanged = user.avatar !== avatarFullPath;
await avatar.moveToDisk(avatarFullPath, 'public', {
name: fileName,
overwrite: true, // overwrite in case of conflict
disk: 'public',
});
sanitizedData.avatar = avatarFullPath;
}
}
if (!hasInputChanges && !hasAvatarChanged) {
session.flash('message', 'No changes were made.');
return response.redirect().back();
}
await user.merge(sanitizedData).save();
session.flash('message', 'User has been updated successfully');
return response.redirect().toRoute('settings.profile.edit');
} catch (error) {
logger.error('Profile update failed:', error);
// session.flash('errors', 'Profile update failed. Please try again.');
// return response.redirect().back();
throw error;
}
}
public async passwordUpdate({ auth, request, response, session }: HttpContext) {
// const passwordSchema = schema.create({
// old_password: schema.string({ trim: true }, [rules.required()]),
// new_password: schema.string({ trim: true }, [rules.minLength(8), rules.maxLength(255), rules.confirmed('confirm_password')]),
// confirm_password: schema.string({ trim: true }, [rules.required()]),
// });
const passwordSchema = vine.object({
// first step
old_password: vine.string().trim(),
// .regex(/^[a-zA-Z0-9]+$/),
new_password: vine.string().confirmed({ confirmationField: 'confirm_password' }).trim().minLength(8).maxLength(255),
});
try {
// await request.validate({ schema: passwordSchema });
const validator = vine.compile(passwordSchema);
await request.validateUsing(validator);
} catch (error) {
// return response.badRequest(error.messages);
throw error;
}
try {
const user = (await auth.user) as User;
const { old_password, new_password } = request.only(['old_password', 'new_password']);
// if (!(old_password && new_password && confirm_password)) {
// return response.status(400).send({ warning: 'Old password and new password are required.' });
// }
// Verify if the provided old password matches the user's current password
const isSame = await hash.verify(user.password, old_password);
if (!isSame) {
session.flash('warning', 'Old password is incorrect.');
return response.redirect().back();
// return response.flash('warning', 'Old password is incorrect.').redirect().back();
}
// Hash the new password before updating the user's password
user.password = new_password;
await user.save();
// return response.status(200).send({ message: 'Password updated successfully.' });
session.flash({ message: 'Password updated successfully.' });
return response.redirect().toRoute('settings.profile.edit');
} catch (error) {
// return response.status(500).send({ message: 'Internal server error.' });
return response.flash('warning', `Invalid server state. Internal server error.`).redirect().back();
}
}
public async enableTwoFactorAuthentication({ auth, response, session }: HttpContext): Promise<void> {
// const user: User | undefined = auth?.user;
const user = (await User.find(auth.user?.id)) as User;
user.twoFactorSecret = TwoFactorAuthProvider.generateSecret(user);
user.twoFactorRecoveryCodes = await TwoFactorAuthProvider.generateRecoveryCodes();
await user.save();
session.flash('message', 'Two factor authentication enabled.');
return response.redirect().back();
// return inertia.render('Auth/AccountInfo', {
// // status: {
// // type: 'success',
// // message: 'Two factor authentication enabled.',
// // },
// user: user,
// twoFactorEnabled: user.isTwoFactorEnabled,
// code: await TwoFactorAuthProvider.generateQrCode(user),
// recoveryCodes: user.twoFactorRecoveryCodes,
// });
}
public async disableTwoFactorAuthentication({ auth, response, session }: HttpContext): Promise<void> {
const user: User | undefined = auth.user;
if (user) {
user.twoFactorSecret = null;
user.twoFactorRecoveryCodes = null;
await user.save();
session.flash('message', 'Two-factor authentication disabled.');
} else {
session.flash('error', 'User not found.');
}
return response.redirect().back();
// return inertia.render('Auth/AccountInfo', {
// // status: {
// // type: 'success',
// // message: 'Two factor authentication disabled.',
// // },
// user: user,
// twoFactorEnabled: user.isTwoFactorEnabled,
// });
}
// public async fetchRecoveryCodes({ auth, view }) {
// const user = auth?.user;
// return view.render('pages/settings', {
// twoFactorEnabled: user.isTwoFactorEnabled,
// recoveryCodes: user.twoFactorRecoveryCodes,
// });
// }
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,65 @@
<?xml version="1.0" encoding="utf-8"?>
<resource xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://datacite.org/schema/kernel-4"
xsi:schemaLocation="http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4.3/metadata.xsd">
<identifier identifierType="DOI">10.21388/tethys.213</identifier>
<creators>
<creator>
<creatorName nameType="Personal">Moser, Michael</creatorName>
<givenName>Michael</givenName>
<familyName>Moser</familyName>
<affiliation>GBA</affiliation>
</creator>
</creators>
<titles>
<title xml:lang="en">rewerewr</title>
</titles>
<publisher>Tethys RDR</publisher>
<publicationYear>2024</publicationYear>
<subjects>
<subject xml:lang="de">Aletshausen-Langenneufnach Störung</subject>
<subject xml:lang="de">Wolfersberg-Moosach Störung</subject>
<subject xml:lang="en">wefwef</subject>
</subjects>
<language>en</language>
<contributors>
<contributor contributorType="RegistrationAuthority">
<contributorName>Jürgen Reitner</contributorName>
</contributor>
</contributors>
<dates>
<date dateType="Created">2023-11-30</date>
</dates>
<version>1</version>
<resourceType resourceTypeGeneral="Dataset">Dataset</resourceType>
<alternateIdentifiers>
<alternateIdentifier alternateIdentifierType="url">https://www.tethys.at/dataset/213</alternateIdentifier>
</alternateIdentifiers>
<rightsList>
<rights xml:lang="" rightsURI="https://creativecommons.org/licenses/by/4.0/deed.en"
schemeURI="https://spdx.org/licenses/" rightsIdentifierScheme="SPDX"
rightsIdentifier="CC-BY-4.0">Creative Commons Attribution 4.0 International (CC BY 4.0)</rights>
<rights rightsURI="info:eu-repo/semantics/openAccess">Open Access</rights>
</rightsList>
<sizes>
<size>1 datasets</size>
</sizes>
<formats>
<format>image/png</format>
</formats>
<descriptions>
<description xml:lang="en" descriptionType="Abstract">rewrewr</description>
</descriptions>
<geoLocations>
<geoLocation>
<geoLocationBox>
<westBoundLongitude>11.71142578125</westBoundLongitude>
<eastBoundLongitude>14.414062500000002</eastBoundLongitude>
<southBoundLatitude>46.58906908309185</southBoundLatitude>
<northBoundLatitude>47.45780853075031</northBoundLatitude>
</geoLocationBox>
</geoLocation>
</geoLocations>
</resource>

View file

@ -1,16 +1,30 @@
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
import { RequestContract } from '@ioc:Adonis/Core/Request';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces';
import type { HttpContext } from '@adonisjs/core/http';
// import { RequestContract } from '@ioc:Adonis/Core/Request';
import { Request } from '@adonisjs/core/http';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import { create } from 'xmlbuilder2';
import dayjs, { Dayjs } from 'dayjs';
import utc from 'dayjs/plugin/utc';
import timezone from 'dayjs/plugin/timezone';
import utc from 'dayjs/plugin/utc.js';
import timezone from 'dayjs/plugin/timezone.js';
import { readFileSync } from 'fs';
import { StatusCodes } from 'http-status-codes';
import { transform } from 'saxon-js';
import SaxonJS from 'saxon-js';
// import { Xslt, xmlParse } from 'xslt-processor'
import { OaiErrorCodes, OaiModelError } from 'App/Exceptions/OaiErrorCodes';
import { OaiModelException } from 'App/Exceptions/OaiModelException';
import { OaiErrorCodes, OaiModelError } from '#app/exceptions/OaiErrorCodes';
import { OaiModelException, BadOaiModelException } from '#app/exceptions/OaiModelException';
import Dataset from '#models/dataset';
import Collection from '#models/collection';
import { getDomain, preg_match } from '#app/utils/utility-functions';
import DatasetXmlSerializer from '#app/Library/DatasetXmlSerializer';
import logger from '@adonisjs/core/services/logger';
import ResumptionToken from '#app/Library/Oai/ResumptionToken';
// import Config from '@ioc:Adonis/Core/Config';
import config from '@adonisjs/core/services/config';
// import { inject } from '@adonisjs/fold';
import { inject } from '@adonisjs/core';
// import { TokenWorkerContract } from "MyApp/Models/TokenWorker";
import TokenWorkerContract from '#library/Oai/TokenWorkerContract';
import { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model';
interface XslTParameter {
[key: string]: any;
@ -20,10 +34,23 @@ interface Dictionary {
[index: string]: string;
}
interface PagingParameter {
cursor: number;
totalLength: number;
start: number;
nextDocIds: number[];
activeWorkIds: number[];
metadataPrefix: string;
queryParams: Object;
}
@inject()
export default class OaiController {
// private deliveringDocumentStates = ["published", "deleted"];
// private sampleRegEx = /^[A-Za-zäüÄÜß0-9\-_.!~]+$/;
private deliveringDocumentStates = ['published', 'deleted'];
private sampleRegEx = /^[A-Za-zäüÄÜß0-9\-_.!~]+$/;
private xsltParameter: XslTParameter;
private firstPublishedDataset: Dataset | null;
/**
* Holds xml representation of document information to be processed.
*
@ -32,23 +59,19 @@ export default class OaiController {
private xml: XMLBuilder;
private proc;
constructor() {
constructor(public tokenWorker: TokenWorkerContract) {
// Load the XSLT file
this.proc = readFileSync('public/assets2/datasetxml2oai.sef.json');
// tests
// const xslPath = 'assets/datasetxml2oai-pmh.xslt'; // Replace with the actual path to your XSLT file
// this.proc = readFileSync(xslPath, 'utf-8');
// this.configuration = new Configuration();
dayjs.extend(utc);
dayjs.extend(timezone);
}
public async index({ response, request }: HttpContextContract): Promise<void> {
public async index({ response, request }: HttpContext): Promise<void> {
this.xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
// this.proc = new XSLTProcessor();
// const stylesheet = readFileSync(__dirname + "/datasetxml2oai.sef.json");
const xsltParameter = (this.xsltParameter = {});
const xsltParameter: XslTParameter = (this.xsltParameter = {});
let oaiRequest: Dictionary = {};
if (request.method() === 'POST') {
@ -59,9 +82,16 @@ export default class OaiController {
xsltParameter['oai_error_code'] = 'unknown';
xsltParameter['oai_error_message'] = 'Only POST and GET methods are allowed for OAI-PMH.';
}
let earliestDateFromDb;
// const oaiRequest: OaiParameter = request.body;
try {
this.handleRequest(oaiRequest, request);
this.firstPublishedDataset = await Dataset.earliestPublicationDate();
this.firstPublishedDataset != null &&
(earliestDateFromDb = this.firstPublishedDataset.server_date_published.toFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"));
this.xsltParameter['earliestDatestamp'] = earliestDateFromDb;
// start the request
await this.handleRequest(oaiRequest, request);
} catch (error) {
if (error instanceof OaiModelException) {
const code = error.oaiCode;
@ -80,9 +110,9 @@ export default class OaiController {
const xmlString = this.xml.end({ prettyPrint: true });
let xmlOutput;
let xmlOutput; // = xmlString;
try {
const result = await transform({
const result = await SaxonJS.transform({
// stylesheetFileName: `${config.TMP_BASE_DIR}/data-quality/rules/iati.sef.json`,
stylesheetText: this.proc,
destination: 'serialized',
@ -106,7 +136,7 @@ export default class OaiController {
response.status(StatusCodes.OK).send(xmlOutput);
}
protected handleRequest(oaiRequest: Dictionary, request: RequestContract) {
protected async handleRequest(oaiRequest: Dictionary, request: Request) {
// Setup stylesheet
// $this->loadStyleSheet('datasetxml2oai-pmh.xslt');
@ -116,7 +146,7 @@ export default class OaiController {
this.xsltParameter['unixTimestamp'] = now.unix();
// set OAI base url
const baseDomain = process.env.BASE_DOMAIN || 'localhost';
const baseDomain = process.env.OAI_BASE_DOMAIN || 'localhost';
this.xsltParameter['baseURL'] = baseDomain + '/oai';
this.xsltParameter['repURL'] = request.protocol() + '://' + request.hostname();
this.xsltParameter['downloadLink'] = request.protocol() + '://' + request.hostname() + '/file/download/';
@ -130,27 +160,22 @@ export default class OaiController {
this.handleIdentify();
} else if (verb === 'ListMetadataFormats') {
this.handleListMetadataFormats();
}
// else if (verb == "GetRecord") {
// await this.handleGetRecord(oaiRequest);
// } else if (verb == "ListRecords") {
// await this.handleListRecords(oaiRequest);
// } else if (verb == "ListIdentifiers") {
// await this.handleListIdentifiers(oaiRequest);
// } else if (verb == "ListSets") {
// await this.handleListSets();
// }
else {
} else if (verb == 'GetRecord') {
await this.handleGetRecord(oaiRequest);
} else if (verb == 'ListRecords') {
// Get browser fingerprint from the request:
const browserFingerprint = this.getBrowserFingerprint(request);
await this.handleListRecords(oaiRequest, browserFingerprint);
} else if (verb == 'ListIdentifiers') {
// Get browser fingerprint from the request:
const browserFingerprint = this.getBrowserFingerprint(request);
await this.handleListIdentifiers(oaiRequest, browserFingerprint);
} else if (verb == 'ListSets') {
await this.handleListSets();
} else {
this.handleIllegalVerb();
}
} else {
// // try {
// // console.log("Async code example.")
// const err = new PageNotFoundException("verb not found");
// throw err;
// // } catch (error) { // manually catching
// // next(error); // passing to default middleware error handler
// // }
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
'The verb provided in the request is illegal.',
@ -160,11 +185,11 @@ export default class OaiController {
}
protected handleIdentify() {
const email = process.env.OAI_EMAIL || 'repository@geosphere.at';
const repositoryName = 'Tethys RDR';
const repIdentifier = 'tethys.at';
const sampleIdentifier = 'oai:' + repIdentifier + ':1'; //$this->_configuration->getSampleIdentifier();
// Get configuration values from environment or a dedicated configuration service
const email = process.env.OAI_EMAIL ?? 'repository@geosphere.at';
const repositoryName = process.env.OAI_REPOSITORY_NAME ?? 'Tethys RDR';
const repIdentifier = process.env.OAI_REP_IDENTIFIER ?? 'tethys.at';
const sampleIdentifier = `oai:${repIdentifier}:1`;
// Dataset::earliestPublicationDate()->server_date_published->format('Y-m-d\TH:i:s\Z') : null;
// earliestDateFromDb!= null && (this.xsltParameter['earliestDatestamp'] = earliestDateFromDb?.server_date_published);
@ -182,8 +207,523 @@ export default class OaiController {
this.xml.root().ele('Datasets');
}
protected async handleListSets() {
const repIdentifier = 'tethys.at';
this.xsltParameter['repIdentifier'] = repIdentifier;
const datasetElement = this.xml.root().ele('Datasets');
const sets: { [key: string]: string } = {
'open_access': 'Set for open access licenses',
'openaire_data': 'OpenAIRE',
'doc-type:ResearchData': 'Set for document type ResearchData',
...(await this.getSetsForDatasetTypes()),
...(await this.getSetsForCollections()),
// ... await this.getSetsForProjects(),
} as Dictionary;
for (const [key, value] of Object.entries(sets)) {
const setElement = datasetElement.ele('Rdr_Sets');
setElement.att('Type', key);
setElement.att('TypeName', value);
}
}
protected async handleGetRecord(oaiRequest: Dictionary) {
const repIdentifier = 'tethys.at';
this.xsltParameter['repIdentifier'] = repIdentifier;
// Validate that required parameter exists early
if (!('identifier' in oaiRequest)) {
throw new BadOaiModelException('The prefix of the identifier argument is unknown.');
}
// Validate and extract the dataset identifier from the request
const dataId = this.validateAndGetIdentifier(oaiRequest);
// Retrieve dataset with associated XML cache and collection roles
const dataset = await Dataset.query()
.where('publish_id', dataId)
.preload('xmlCache')
.preload('collections', (builder) => {
builder.preload('collectionRole');
})
.first();
if (!dataset || !dataset.publish_id) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
'The value of the identifier argument is unknown or illegal in this repository.',
OaiErrorCodes.IDDOESNOTEXIST,
);
}
// Validate and set the metadata prefix parameter
const metadataPrefix = this.validateAndGetMetadataPrefix(oaiRequest);
this.xsltParameter['oai_metadataPrefix'] = metadataPrefix;
// Ensure that the dataset is in an exportable state
this.validateDatasetState(dataset);
// Build the XML for the dataset record and add it to the root node
const datasetNode = this.xml.root().ele('Datasets');
await this.createXmlRecord(dataset, datasetNode);
}
protected async handleListIdentifiers(oaiRequest: Dictionary, browserFingerprint: string) {
if (!this.tokenWorker.isConnected) {
await this.tokenWorker.connect();
}
const maxIdentifier: number = config.get('oai.max.listidentifiers', 100);
await this.handleLists(oaiRequest, maxIdentifier, browserFingerprint);
}
protected async handleListRecords(oaiRequest: Dictionary, browserFingerprint: string) {
if (!this.tokenWorker.isConnected) {
await this.tokenWorker.connect();
}
const maxRecords: number = config.get('oai.max.listrecords', 100);
await this.handleLists(oaiRequest, maxRecords, browserFingerprint);
}
private async handleLists(oaiRequest: Dictionary, maxRecords: number, browserFingerprint: string) {
const repIdentifier = 'tethys.at';
this.xsltParameter['repIdentifier'] = repIdentifier;
const datasetNode = this.xml.root().ele('Datasets');
const paginationParams: PagingParameter = {
cursor: 0,
totalLength: 0,
start: maxRecords + 1,
nextDocIds: [],
activeWorkIds: [],
metadataPrefix: '',
queryParams: {},
};
if ('resumptionToken' in oaiRequest) {
await this.handleResumptionToken(oaiRequest, maxRecords, paginationParams);
} else {
await this.handleNoResumptionToken(oaiRequest, paginationParams, maxRecords);
}
const nextIds: number[] = paginationParams.nextDocIds;
const workIds: number[] = paginationParams.activeWorkIds;
if (workIds.length === 0) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
'The combination of the given values results in an empty list.',
OaiErrorCodes.NORECORDSMATCH,
);
}
const datasets = await Dataset.query()
.whereIn('publish_id', workIds)
.preload('xmlCache')
.preload('collections', (builder) => {
builder.preload('collectionRole');
})
.orderBy('publish_id');
for (const dataset of datasets) {
await this.createXmlRecord(dataset, datasetNode);
}
await this.setResumptionToken(nextIds, paginationParams, browserFingerprint);
}
private async handleNoResumptionToken(oaiRequest: Dictionary, paginationParams: PagingParameter, maxRecords: number) {
this.validateMetadataPrefix(oaiRequest, paginationParams);
const finder: ModelQueryBuilderContract<typeof Dataset, Dataset> = Dataset.query().whereIn(
'server_state',
this.deliveringDocumentStates,
);
this.applySetFilter(finder, oaiRequest);
this.applyDateFilters(finder, oaiRequest);
await this.fetchAndSetResults(finder, paginationParams, oaiRequest, maxRecords);
}
private async fetchAndSetResults(
finder: ModelQueryBuilderContract<typeof Dataset, Dataset>,
paginationParams: PagingParameter,
oaiRequest: Dictionary,
maxRecords: number,
) {
const totalResult = await finder
.clone()
.count('* as total')
.first()
.then((res) => res?.$extras.total);
paginationParams.totalLength = Number(totalResult);
const combinedRecords: Dataset[] = await finder
.select('publish_id')
.orderBy('publish_id')
.offset(0)
.limit(maxRecords * 2);
paginationParams.activeWorkIds = combinedRecords.slice(0, 100).map((dat) => Number(dat.publish_id));
paginationParams.nextDocIds = combinedRecords.slice(100).map((dat) => Number(dat.publish_id));
// No resumption token was used set queryParams from the current oaiRequest
paginationParams.queryParams = {
...oaiRequest,
deliveringStates: this.deliveringDocumentStates,
};
// paginationParams.totalLength = 230;
}
private async handleResumptionToken(oaiRequest: Dictionary, maxRecords: number, paginationParams: PagingParameter) {
const resParam = oaiRequest['resumptionToken'];
const token = await this.tokenWorker.get(resParam);
if (!token) {
throw new OaiModelException(StatusCodes.INTERNAL_SERVER_ERROR, 'cache is outdated.', OaiErrorCodes.BADRESUMPTIONTOKEN);
}
// this.setResumptionParameters(token, maxRecords, paginationParams);
paginationParams.cursor = token.startPosition - 1;
paginationParams.start = token.startPosition + maxRecords;
paginationParams.totalLength = token.totalIds;
paginationParams.activeWorkIds = token.documentIds;
paginationParams.metadataPrefix = token.metadataPrefix;
paginationParams.queryParams = token.queryParams;
this.xsltParameter['oai_metadataPrefix'] = token.metadataPrefix;
const finder = this.buildDatasetQueryViaToken(token);
const nextRecords: Dataset[] = await this.fetchNextRecords(finder, token, maxRecords);
paginationParams.nextDocIds = nextRecords.map((dat) => Number(dat.publish_id));
}
private async setResumptionToken(nextIds: number[], paginationParams: PagingParameter, browserFingerprint: string) {
const countRestIds = nextIds.length;
if (countRestIds > 0) {
// const token = this.createResumptionToken(paginationParams, nextIds);
const token = new ResumptionToken();
token.startPosition = paginationParams.start;
token.totalIds = paginationParams.totalLength;
token.documentIds = nextIds;
token.metadataPrefix = paginationParams.metadataPrefix;
token.queryParams = paginationParams.queryParams;
const res: string = await this.tokenWorker.set(token, browserFingerprint);
this.setParamResumption(res, paginationParams.cursor, paginationParams.totalLength);
}
}
private buildDatasetQueryViaToken(token: ResumptionToken) {
const finder = Dataset.query();
const originalQuery = token.queryParams || {};
const deliveringStates = originalQuery.deliveringStates || this.deliveringDocumentStates;
finder.whereIn('server_state', deliveringStates);
this.applySetFilter(finder, originalQuery);
this.applyDateFilters(finder, originalQuery);
return finder;
}
private async fetchNextRecords(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, token: ResumptionToken, maxRecords: number) {
return finder
.select('publish_id')
.orderBy('publish_id')
.offset(token.startPosition - 1 + maxRecords)
.limit(100);
}
private validateMetadataPrefix(oaiRequest: Dictionary, paginationParams: PagingParameter) {
if (!('metadataPrefix' in oaiRequest)) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
'The prefix of the metadata argument is unknown.',
OaiErrorCodes.BADARGUMENT,
);
}
paginationParams.metadataPrefix = oaiRequest['metadataPrefix'];
this.xsltParameter['oai_metadataPrefix'] = paginationParams.metadataPrefix;
}
private applySetFilter(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, queryParams: any) {
if ('set' in queryParams) {
const [setType, setValue] = queryParams['set'].split(':');
switch (setType) {
case 'data-type':
setValue && finder.where('type', setValue);
break;
case 'open_access':
finder.andWhereHas('licenses', (query) => {
query.whereIn('name', ['CC-BY-4.0', 'CC-BY-SA-4.0']);
});
break;
case 'ddc':
setValue &&
finder.andWhereHas('collections', (query) => {
query.where('number', setValue);
});
break;
}
}
}
private applyDateFilters(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, queryParams: any) {
const { from, until } = queryParams;
if (from && until) {
this.handleFromUntilFilter(finder, from, until);
} else if (from) {
this.handleFromFilter(finder, from);
} else if (until) {
this.handleUntilFilter(finder, until);
}
}
private handleFromUntilFilter(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, from: string, until: string) {
const fromDate = this.parseDateWithValidation(from, 'From');
const untilDate = this.parseDateWithValidation(until, 'Until');
if (from.length !== until.length) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
'The request has different granularities for the from and until parameters.',
OaiErrorCodes.BADARGUMENT,
);
}
finder.whereBetween('server_date_published', [fromDate.format('YYYY-MM-DD HH:mm:ss'), untilDate.format('YYYY-MM-DD HH:mm:ss')]);
}
private handleFromFilter(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, from: string) {
const fromDate = this.parseDateWithValidation(from, 'From');
const now = dayjs();
if (fromDate.isAfter(now)) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
'Given from date is greater than now. The given values results in an empty list.',
OaiErrorCodes.NORECORDSMATCH,
);
}
finder.andWhere('server_date_published', '>=', fromDate.format('YYYY-MM-DD HH:mm:ss'));
}
private handleUntilFilter(finder: ModelQueryBuilderContract<typeof Dataset, Dataset>, until: string) {
const untilDate = this.parseDateWithValidation(until, 'Until');
const earliestPublicationDate = dayjs(this.firstPublishedDataset?.server_date_published.toISO());
if (earliestPublicationDate.isAfter(untilDate)) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
'earliestDatestamp is greater than given until date. The given values results in an empty list.',
OaiErrorCodes.NORECORDSMATCH,
);
}
finder.andWhere('server_date_published', '<=', untilDate.format('YYYY-MM-DD HH:mm:ss'));
}
private parseDateWithValidation(dateStr: string, label: string) {
let date = dayjs(dateStr);
if (!date.isValid()) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
`${label} date parameter is not valid.`,
OaiErrorCodes.BADARGUMENT,
);
}
date = dayjs.tz(dateStr, 'Europe/Vienna');
return date.hour() === 0 ? (label === 'From' ? date.startOf('day') : date.endOf('day')) : date;
}
private setParamResumption(res: string, cursor: number, totalIds: number) {
const tomorrow = dayjs().add(1, 'day').format('YYYY-MM-DDThh:mm:ss[Z]');
this.xsltParameter['dateDelete'] = tomorrow;
this.xsltParameter['res'] = res;
this.xsltParameter['cursor'] = cursor;
this.xsltParameter['totalIds'] = totalIds;
}
private validateAndGetIdentifier(oaiRequest: Dictionary): number {
// Identifier references metadata Urn, not plain Id!
// Currently implemented as 'oai:foo.bar.de:{docId}' or 'urn:nbn...-123'
if (!('identifier' in oaiRequest)) {
throw new BadOaiModelException('The prefix of the identifier argument is unknown.');
}
const dataId = Number(this.getDocumentIdByIdentifier(oaiRequest.identifier));
if (isNaN(dataId)) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
'The value of the identifier argument is illegal in this repository.',
OaiErrorCodes.BADARGUMENT,
);
}
return dataId;
}
private validateAndGetMetadataPrefix(oaiRequest: Dictionary): string {
let metadataPrefix = '';
if ('metadataPrefix' in oaiRequest) {
metadataPrefix = oaiRequest['metadataPrefix'];
} else {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
'The prefix of the metadata argument is unknown.',
OaiErrorCodes.BADARGUMENT,
);
}
return metadataPrefix;
}
private validateDatasetState(dataset: Dataset): void {
if (dataset.server_state == null || !this.deliveringDocumentStates.includes(dataset.server_state)) {
throw new OaiModelException(
StatusCodes.INTERNAL_SERVER_ERROR,
'Document is not available for OAI export!',
OaiErrorCodes.NORECORDSMATCH,
);
}
}
private async createXmlRecord(dataset: Dataset, datasetNode: XMLBuilder) {
const domNode = await this.getDatasetXmlDomNode(dataset);
if (domNode) {
// add frontdoor url and data-type
dataset.publish_id && this.addLandingPageAttribute(domNode, dataset.publish_id.toString());
this.addSpecInformation(domNode, 'data-type:' + dataset.type);
if (dataset.collections) {
for (const coll of dataset.collections) {
const collRole = coll.collectionRole;
this.addSpecInformation(domNode, collRole.oai_name + ':' + coll.number);
}
}
datasetNode.import(domNode);
}
}
private async getDatasetXmlDomNode(dataset: Dataset) {
const serializer = new DatasetXmlSerializer(dataset).enableCaching().excludeEmptyFields();
// const cache = dataset.xmlCache ? dataset.xmlCache : null;
// dataset.load('xmlCache');
if (dataset.xmlCache) {
serializer.setCache(dataset.xmlCache);
}
// return cache.toXmlDocument();
const xmlDocument: XMLBuilder | null = await serializer.toXmlDocument();
return xmlDocument;
}
private addSpecInformation(domNode: XMLBuilder, information: string) {
domNode.ele('SetSpec').att('Value', information);
}
private addLandingPageAttribute(domNode: XMLBuilder, dataid: string) {
const baseDomain = process.env.OAI_BASE_DOMAIN || 'localhost';
const url = 'https://' + getDomain(baseDomain) + '/dataset/' + dataid;
// add attribute du dataset xml element
domNode.att('landingpage', url);
}
private getDocumentIdByIdentifier(oaiIdentifier: string): string {
const identifierParts: string[] = oaiIdentifier.split(':'); // explode(":", $oaiIdentifier);
const dataId: string = identifierParts[2];
// switch (identifierParts[0]) {
// case 'oai':
// if (isset($identifierParts[2])) {
// $dataId = $identifierParts[2];
// }
// break;
// default:
// throw new OaiModelException(
// 'The prefix of the identifier argument is unknown.',
// OaiModelError::BADARGUMENT
// );
// break;
// }
// if (empty($dataId) or !preg_match('/^\d+$/', $dataId)) {
// throw new OaiModelException(
// 'The value of the identifier argument is unknown or illegal in this repository.',
// OaiModelError::IDDOESNOTEXIST
// );
return dataId;
}
private async getSetsForCollections(): Promise<Dictionary> {
const sets: { [key: string]: string } = {} as Dictionary;
const collections = await Collection.query()
.select('name', 'number', 'role_id')
.whereHas('collectionRole', (query) => {
query.where('visible_oai', true);
})
.preload('collectionRole');
collections.forEach((collection) => {
// if collection has a collection role (classification like ddc):
if (collection.number) {
// collection.load('collectionRole');
const setSpec = collection.collectionRole?.oai_name + ':' + collection.number;
sets[setSpec] = `Set ${collection.number} '${collection.name}'`;
}
});
return sets;
}
private async getSetsForDatasetTypes(): Promise<Dictionary> {
const sets: { [key: string]: string } = {} as Dictionary;
const datasets: Array<Dataset> = await Dataset.query().select('type').where('server_state', 'published');
datasets.forEach((dataset) => {
if (dataset.type && false == preg_match(this.sampleRegEx, dataset.type)) {
const msg = `Invalid SetSpec (data-type='${dataset.type}').
Allowed characters are [${this.sampleRegEx}].`;
// Log::error("OAI-PMH: $msg");
logger.error(`OAI-PMH: ${msg}`);
return;
}
const setSpec = 'data-type:' + dataset.type;
sets[setSpec] = `Set for document type '${dataset.type}'`;
});
return sets;
}
private handleIllegalVerb() {
this.xsltParameter['oai_error_code'] = 'badVerb';
this.xsltParameter['oai_error_message'] = 'The verb provided in the request is illegal.';
}
/**
* Helper method to build a browser fingerprint by combining:
* - User-Agent header,
* - the IP address,
* - Accept-Language header,
* - current timestamp rounded to the hour.
*
* Every new hour, this will return a different fingerprint.
*/
private getBrowserFingerprint(request: Request): string {
const userAgent = request.header('user-agent') || 'unknown';
// Check for X-Forwarded-For header to use the client IP from the proxy if available.
const xForwardedFor = request.header('x-forwarded-for');
let ip = request.ip();
// console.log(ip);
if (xForwardedFor) {
// X-Forwarded-For may contain a comma-separated list of IPs; the first one is the client IP.
ip = xForwardedFor.split(',')[0].trim();
// console.log('xforwardedfor ip' + ip);
}
const locale = request.header('accept-language') || 'default';
// Round the current time to the start of the hour.
const timestampHour = dayjs().startOf('hour').format('YYYY-MM-DDTHH');
return `${userAgent}-${ip}-${locale}-${timestampHour}`;
}
}

View file

@ -0,0 +1,408 @@
import type { HttpContext } from '@adonisjs/core/http';
import User from '#models/user';
import Dataset from '#models/dataset';
import Field from '#app/Library/Field';
import BaseModel from '#models/base_model';
import { DateTime } from 'luxon';
import { ModelQueryBuilderContract } from '@adonisjs/lucid/types/model';
import vine from '@vinejs/vine';
import mail from '@adonisjs/mail/services/main';
import logger from '@adonisjs/core/services/logger';
import { validate } from 'deep-email-validator';
import File from '#models/file';
interface Dictionary {
[index: string]: string;
}
export default class DatasetsController {
public async index({ auth, request, inertia }: HttpContext) {
const user = (await User.find(auth.user?.id)) as User;
const page = request.input('page', 1);
let datasets: ModelQueryBuilderContract<typeof Dataset, Dataset> = Dataset.query();
// if (request.input('search')) {
// // users = users.whereRaw('name like %?%', [request.input('search')])
// const searchTerm = request.input('search');
// datasets.where('name', 'ilike', `%${searchTerm}%`);
// }
if (request.input('sort')) {
type SortOrder = 'asc' | 'desc' | undefined;
let attribute = request.input('sort');
let sortOrder: SortOrder = 'asc';
if (attribute.substr(0, 1) === '-') {
sortOrder = 'desc';
// attribute = substr(attribute, 1);
attribute = attribute.substr(1);
}
datasets.orderBy(attribute, sortOrder);
} else {
// datasets.orderBy('id', 'asc');
// Custom ordering to prioritize rejected_editor state
datasets.orderByRaw(`
CASE
WHEN server_state = 'rejected_to_reviewer' THEN 0
ELSE 1
END ASC,
id ASC
`);
}
// const users = await User.query().orderBy('login').paginate(page, limit);
const myDatasets = await datasets
// .where('server_state', 'approved')
.whereIn('server_state', ['approved', 'rejected_to_reviewer'])
.where('reviewer_id', user.id)
.preload('titles')
.preload('user', (query) => query.select('id', 'login'))
.preload('editor', (query) => query.select('id', 'login'))
.paginate(page, 10);
return inertia.render('Reviewer/Dataset/Index', {
datasets: myDatasets.serialize(),
filters: request.all(),
can: {
review: await auth.user?.can(['dataset-review']),
reject: await auth.user?.can(['dataset-review-reject']),
},
});
}
public async review({ request, inertia, response, auth }: HttpContext) {
const id = request.param('id');
const datasetQuery = Dataset.query().where('id', id);
datasetQuery
.preload('titles', (query) => query.orderBy('id', 'asc'))
.preload('descriptions', (query) => query.orderBy('id', 'asc'))
.preload('coverage')
.preload('licenses')
.preload('authors', (query) => query.orderBy('pivot_sort_order', 'asc'))
.preload('contributors', (query) => query.orderBy('pivot_sort_order', 'asc'))
// .preload('subjects')
.preload('subjects', (builder) => {
builder.orderBy('id', 'asc').withCount('datasets');
})
.preload('references')
.preload('project')
.preload('files', (query) => {
query.orderBy('sort_order', 'asc'); // Sort by sort_order column
});
const dataset = await datasetQuery.firstOrFail();
const validStates = ['approved', 'rejected_to_reviewer'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be reviewed. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.toRoute('reviewer.dataset.list');
}
return inertia.render('Reviewer/Dataset/Review', {
dataset,
can: {
review: await auth.user?.can(['dataset-review']),
reject: await auth.user?.can(['dataset-review-reject']),
},
});
}
public async review_old({ request, inertia, response, auth }: HttpContext) {
const id = request.param('id');
const dataset = await Dataset.query()
.where('id', id)
// .preload('titles')
// .preload('descriptions')
.preload('user', (builder) => {
builder.select('id', 'login');
})
.firstOrFail();
const validStates = ['approved'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be reviewed. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.toRoute('reviewer.dataset.list');
}
const fieldnames: Array<string> = await dataset.describe();
const fields: Dictionary = {};
for (const fieldName of fieldnames) {
const field: Field = dataset.getField(fieldName) as Field;
const modelClass = field.getValueModelClass();
let fieldValues = field.getValue();
let value = '';
if (fieldValues === null || fieldValues == undefined) {
continue;
}
if (modelClass === null) {
if (typeof fieldValues === 'number') {
// If the field values are a number, use them as is
value = fieldValues.toString();
} else {
// If the field values are not a number, use the replace() function to remove non-printable characters
value = fieldValues.replace(/[\x00-\x08\x0B\x0C\x0E-\x1F\x7F]/g, '\xEF\xBF\xBD ');
}
} else {
if (!Array.isArray(fieldValues)) {
fieldValues = [fieldValues];
}
for (const fieldValue of fieldValues) {
if (fieldValue === null) {
continue;
}
if (modelClass.prototype instanceof BaseModel) {
// this.mapModelAttributes(fieldValue, childNode);
value = '<ul>';
Object.keys(fieldValue).forEach((prop) => {
let modelValue = fieldValue[prop];
// console.log(`${prop}: ${value}`);
if (modelValue != null) {
if (modelValue instanceof DateTime) {
modelValue = modelValue.toFormat('yyyy-MM-dd HH:mm:ss').trim();
} else {
modelValue = modelValue.toString().trim();
}
// Replace invalid XML-1.0-Characters by UTF-8 replacement character.
modelValue = modelValue.replace(/[\x00-\x08\x0B\x0C\x0E-\x1F\x7F]/g, '\xEF\xBF\xBD ');
value = value + '<li>' + prop + ' : ' + modelValue + '</li>';
}
});
value = value + '</ul>';
} else if (modelClass instanceof DateTime) {
// console.log('Value is a luxon date');
// this.mapDateAttributes(fieldValue, childNode);
value = value + ' Year ' + modelClass.year.toString();
value = value + ' Month ' + modelClass.month.toString();
value = value + ' Day ' + modelClass.day.toString();
value = value + ' Hour ' + modelClass.hour.toString();
value = value + ' Minute ' + modelClass.minute.toString();
value = value + ' Second ' + modelClass.second.toString();
value = value + ' UnixTimestamp ' + modelClass.toUnixInteger().toString();
let zoneName = modelClass.zoneName ? modelClass.zoneName : '';
value = value + ' Timezone ' + zoneName;
}
}
}
if (value != '') {
fields[fieldName] = value;
}
}
return inertia.render('Reviewer/Dataset/Review', {
dataset,
fields: fields,
can: {
review: await auth.user?.can(['dataset-review']),
reject: await auth.user?.can(['dataset-review-reject']),
},
});
}
public async reviewUpdate({ request, response }: HttpContext) {
const id = request.param('id');
// const { id } = params;
const dataset = await Dataset.findOrFail(id);
const validStates = ['approved', 'rejected_to_reviewer'];
if (!validStates.includes(dataset.server_state)) {
// throw new Error('Invalid server state!');
// return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back();
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be reviewed. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.toRoute('reviewer.dataset.list');
}
dataset.server_state = 'reviewed';
// if editor has rejected to reviewer:
if (dataset.reject_editor_note != null) {
dataset.reject_editor_note = null;
}
try {
// await dataset.related('editor').associate(user); // speichert schon ab
await dataset.save();
return response.toRoute('reviewer.dataset.list').flash('message', `You have successfully reviewed dataset ${dataset.id}!`);
} catch (error) {
// Handle any errors
console.error(error);
return response.status(500).json({ error: 'An error occurred while reviewing the data.' });
}
}
public async reject({ request, inertia, response }: HttpContext) {
const id = request.param('id');
const dataset = await Dataset.query()
.where('id', id)
// .preload('titles')
// .preload('descriptions')
.preload('editor', (builder) => {
builder.select('id', 'login', 'email');
})
.firstOrFail();
const validStates = ['approved', 'rejected_to_reviewer'];
if (!validStates.includes(dataset.server_state)) {
// session.flash('errors', 'Invalid server state!');
return response
.flash(
'warning',
`Invalid server state. Dataset with id ${id} cannot be rejected. Datset has server state ${dataset.server_state}.`,
)
.redirect()
.toRoute('reviewer.dataset.list');
}
return inertia.render('Reviewer/Dataset/Reject', {
dataset,
});
}
public async rejectUpdate({ request, response, auth }: HttpContext) {
const authUser = auth.user!;
const id = request.param('id');
const dataset = await Dataset.query()
.where('id', id)
.preload('editor', (builder) => {
builder.select('id', 'login', 'email');
})
.firstOrFail();
// const newSchema = schema.create({
// server_state: schema.string({ trim: true }),
// reject_reviewer_note: schema.string({ trim: true }, [rules.minLength(10), rules.maxLength(500)]),
// });
const newSchema = vine.object({
server_state: vine.string().trim(),
reject_reviewer_note: vine.string().trim().minLength(10).maxLength(500),
send_mail: vine.boolean().optional(),
});
try {
// await request.validate({ schema: newSchema });
const validator = vine.compile(newSchema);
await request.validateUsing(validator);
} catch (error) {
// return response.badRequest(error.messages);
throw error;
}
const validStates = ['approved', 'rejected_to_reviewer'];
if (!validStates.includes(dataset.server_state)) {
// throw new Error('Invalid server state!');
// return response.flash('warning', 'Invalid server state. Dataset cannot be released to editor').redirect().back();
return response
.flash(
`Invalid server state. Dataset with id ${id} cannot be rejected. Datset has server state ${dataset.server_state}.`,
'warning',
)
.redirect()
.toRoute('reviewer.dataset.list');
}
// dataset.server_state = 'reviewed';
dataset.server_state = 'rejected_reviewer';
const rejectReviewerNote = request.input('reject_reviewer_note', '');
dataset.reject_reviewer_note = rejectReviewerNote;
// add logic for sending reject message
const sendMail = request.input('send_email', false);
// const validRecipientEmail = await this.checkEmailDomain('arno.kaimbacher@outlook.at');
const validationResult = await validate({
email: dataset.editor.email,
validateSMTP: false,
});
const validRecipientEmail: boolean = validationResult.valid;
// let emailStatusMessage = '';
if (sendMail == true) {
if (dataset.editor.email && validRecipientEmail) {
try {
await mail.send((message) => {
message.to(dataset.editor.email).subject('Dataset Rejection Notification').html(`
<p>Dear editor ${dataset.editor.login},</p>
<p>Your approved dataset with ID ${dataset.id} has been rejected.</p>
<p>Reason for rejection: ${rejectReviewerNote}</p>
<p>Best regards,<br>Your Tethys reviewer: ${authUser.login}</p>
`);
});
// emailStatusMessage = ` A rejection email was successfully sent to ${dataset.editor.email}.`;
} catch (error) {
logger.error(error);
return response
.flash('Dataset has not been rejected due to an email error: ' + error.message, 'error')
.toRoute('reviewer.dataset.list');
}
} else {
// emailStatusMessage = ` However, the email could not be sent because the editor's email address (${dataset.editor.email}) is not valid.`;
}
}
await dataset.save();
return response
.toRoute('reviewer.dataset.list')
.flash(`You have rejected dataset ${dataset.id}! to editor ${dataset.editor.login}`, 'message');
}
// public async download({ params, response }: HttpContext) {
// const id = params.id;
// // Find the file by ID
// const file = await File.findOrFail(id);
// // const filePath = await drive.use('local').getUrl('/'+ file.filePath)
// const filePath = file.filePath;
// const fileExt = file.filePath.split('.').pop() || '';
// // Set the response headers and download the file
// response.header('Content-Type', file.mime_type || 'application/octet-stream');
// response.attachment(`${file.label}.${fileExt}`);
// return response.download(filePath);
// }
public async download({ params, response }: HttpContext) {
const id = params.id;
// Find the file by ID
const file = await File.findOrFail(id);
// const filePath = await drive.use('local').getUrl('/'+ file.filePath)
const filePath = file.filePath;
const fileExt = file.filePath.split('.').pop() || '';
// Check if label already includes the extension
const fileName = file.label.toLowerCase().endsWith(`.${fileExt.toLowerCase()}`) ? file.label : `${file.label}.${fileExt}`;
// Set the response headers and download the file
response
.header('Cache-Control', 'no-cache private')
.header('Content-Description', 'File Transfer')
.header('Content-Type', file.mime_type || 'application/octet-stream')
// .header('Content-Disposition', 'inline; filename=' + fileName)
.header('Content-Transfer-Encoding', 'binary')
.header('Access-Control-Allow-Origin', '*')
.header('Access-Control-Allow-Methods', 'GET');
response.attachment(fileName);
return response.download(filePath);
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,4 +1,4 @@
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
import type { HttpContext } from '@adonisjs/core/http';
// import User from 'App/Models/User';
// import Dataset from 'App/Models/Dataset';
// import License from 'App/Models/License';
@ -10,12 +10,11 @@ import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
// import Collection from 'App/Models/Collection';
// import { schema, CustomMessages, rules } from '@ioc:Adonis/Core/Validator';
// import dayjs from 'dayjs';
import Person from 'App/Models/Person';
import type { ModelQueryBuilderContract } from '@ioc:Adonis/Lucid/Orm';
import Person from '#models/person';
import { ModelQueryBuilderContract } from "@adonisjs/lucid/types/model";
export default class PersonController {
public async index({ auth, request, inertia }: HttpContextContract) {
public async index({ auth, request, inertia }: HttpContext) {
// const user = (await User.find(auth.user?.id)) as User;
const page = request.input('page', 1);
let persons: ModelQueryBuilderContract<typeof Person, Person> = Person.query();

View file

@ -1,61 +0,0 @@
/*
|--------------------------------------------------------------------------
| Http Exception Handler
|--------------------------------------------------------------------------
|
| AdonisJs will forward all exceptions occurred during an HTTP request to
| the following class. You can learn more about exception handling by
| reading docs.
|
| The exception handler extends a base `HttpExceptionHandler` which is not
| mandatory, however it can do lot of heavy lifting to handle the errors
| properly.
|
*/
import { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
import Logger from '@ioc:Adonis/Core/Logger';
import HttpExceptionHandler from '@ioc:Adonis/Core/HttpExceptionHandler';
export default class ExceptionHandler extends HttpExceptionHandler {
protected statusPages = {
'401,403': 'errors/unauthorized',
'404': 'errors/not-found',
'500..599': 'errors/server-error',
};
constructor() {
super(Logger);
}
public async handle(error: any, ctx: HttpContextContract) {
const { response, request, inertia } = ctx;
/**
* Handle failed authentication attempt
*/
// if (['E_INVALID_AUTH_PASSWORD', 'E_INVALID_AUTH_UID'].includes(error.code)) {
// session.flash('errors', { login: error.message });
// return response.redirect('/login');
// }
// if ([401].includes(error.status)) {
// session.flash('errors', { login: error.message });
// return response.redirect('/dashboard');
// }
// https://github.com/inertiajs/inertia-laravel/issues/56
if (request.header('X-Inertia') && [500, 503, 404, 403, 401].includes(response.getStatus())) {
return inertia.render('Error', {
status: response.getStatus(),
message: error.message,
});
// ->toResponse($request)
// ->setStatusCode($response->status());
}
/**
* Forward rest of the exceptions to the parent class
*/
return super.handle(error, ctx);
}
}

View file

@ -1,34 +0,0 @@
import { Response } from '@adonisjs/http-server/build/src/Response';
import { ServerResponse, IncomingMessage } from 'http';
import { RouterContract } from '@ioc:Adonis/Core/Route';
import { EncryptionContract } from '@ioc:Adonis/Core/Encryption';
import { ResponseConfig, ResponseContract } from '@ioc:Adonis/Core/Response';
class FlashResponse extends Response implements ResponseContract {
protected static macros = {};
protected static getters = {};
constructor(
public request: IncomingMessage,
public response: ServerResponse,
flashEncryption: EncryptionContract,
flashConfig: ResponseConfig,
flashRouter: RouterContract,
) {
super(request, response, flashEncryption, flashConfig, flashRouter);
}
public nonce: string;
public flash(key: string, message: any): this {
// Store the flash message in the session
this.ctx?.session.flash(key, message);
return this;
}
public toRoute(route: string): this {
// Redirect to the specified route
super.redirect().toRoute(route);
return this;
}
}
export default FlashResponse;

View file

@ -0,0 +1,231 @@
import DocumentXmlCache from '#models/DocumentXmlCache';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import Dataset from '#models/dataset';
import Strategy from './Strategy.js';
import { builder } from 'xmlbuilder2';
import logger from '@adonisjs/core/services/logger';
/**
* Configuration for XML serialization
*
* @interface XmlSerializationConfig
*/
export interface XmlSerializationConfig {
/** The dataset model to serialize */
model: Dataset;
/** DOM representation (if available) */
dom?: XMLBuilder;
/** Fields to exclude from serialization */
excludeFields: Array<string>;
/** Whether to exclude empty fields */
excludeEmpty: boolean;
/** Base URI for xlink:ref elements */
baseUri: string;
}
/**
* Options for controlling serialization behavior
*/
export interface SerializationOptions {
/** Enable XML caching */
enableCaching?: boolean;
/** Exclude empty fields from output */
excludeEmptyFields?: boolean;
/** Custom base URI */
baseUri?: string;
/** Fields to exclude */
excludeFields?: string[];
}
/**
* DatasetXmlSerializer
*
* Handles XML serialization of Dataset models with intelligent caching.
* Generates XML representations and manages cache lifecycle to optimize performance.
*
* @example
* ```typescript
* const serializer = new DatasetXmlSerializer(dataset);
* serializer.enableCaching();
* serializer.excludeEmptyFields();
*
* const xmlDocument = await serializer.toXmlDocument();
* ```
*/
export default class DatasetXmlSerializer {
private readonly config: XmlSerializationConfig;
private readonly strategy: Strategy;
private cache: DocumentXmlCache | null = null;
private cachingEnabled = false;
constructor(dataset: Dataset, options: SerializationOptions = {}) {
this.config = {
model: dataset,
excludeEmpty: options.excludeEmptyFields ?? false,
baseUri: options.baseUri ?? '',
excludeFields: options.excludeFields ?? [],
};
this.strategy = new Strategy({
excludeEmpty: options.excludeEmptyFields ?? false,
baseUri: options.baseUri ?? '',
excludeFields: options.excludeFields ?? [],
model: dataset,
});
if (options.enableCaching) {
this.cachingEnabled = true;
}
}
/**
* Enable caching for XML generation
* When enabled, generated XML is stored in database for faster retrieval
*/
public enableCaching(): this {
this.cachingEnabled = true;
return this;
}
/**
* Disable caching for XML generation
*/
public disableCaching(): this {
this.cachingEnabled = false;
return this;
}
set model(model: Dataset) {
this.config.model = model;
}
/**
* Configure to exclude empty fields from XML output
*/
public excludeEmptyFields(): this {
this.config.excludeEmpty = true;
return this;
}
/**
* Set the cache instance directly (useful when preloading)
* @param cache - The DocumentXmlCache instance
*/
public setCache(cache: DocumentXmlCache): this {
this.cache = cache;
return this;
}
/**
* Get the current cache instance
*/
public getCache(): DocumentXmlCache | null {
return this.cache;
}
/**
* Get DOM document with intelligent caching
* Returns cached version if valid, otherwise generates new document
*/
public async toXmlDocument(): Promise<XMLBuilder | null> {
const dataset = this.config.model;
// Try to get from cache first
let cachedDocument: XMLBuilder | null = await this.retrieveFromCache();
if (cachedDocument) {
logger.debug(`Using cached XML for dataset ${dataset.id}`);
return cachedDocument;
}
// Generate fresh document
logger.debug(`[DatasetXmlSerializer] Cache miss - generating fresh XML for dataset ${dataset.id}`);
const freshDocument = await this.strategy.createDomDocument();
if (!freshDocument) {
logger.error(`[DatasetXmlSerializer] Failed to generate XML for dataset ${dataset.id}`);
return null;
}
// Cache if caching is enabled
if (this.cachingEnabled) {
await this.persistToCache(freshDocument, dataset);
}
// Extract the dataset-specific node
return this.extractDatasetNode(freshDocument);
}
/**
* Generate XML string representation
* Convenience method that converts XMLBuilder to string
*/
public async toXmlString(): Promise<string | null> {
const document = await this.toXmlDocument();
return document ? document.end({ prettyPrint: false }) : null;
}
/**
* Persist generated XML document to cache
* Non-blocking - failures are logged but don't interrupt the flow
*/
private async persistToCache(domDocument: XMLBuilder, dataset: Dataset): Promise<void> {
try {
this.cache = this.cache || new DocumentXmlCache();
this.cache.document_id = dataset.id;
this.cache.xml_version = 1;
this.cache.server_date_modified = dataset.server_date_modified.toFormat('yyyy-MM-dd HH:mm:ss');
this.cache.xml_data = domDocument.end();
await this.cache.save();
logger.debug(`Cached XML for dataset ${dataset.id}`);
} catch (error) {
logger.error(`Failed to cache XML for dataset ${dataset.id}: ${error.message}`);
// Don't throw - caching failure shouldn't break the flow
}
}
/**
* Extract the Rdr_Dataset node from full document
*/
private extractDatasetNode(domDocument: XMLBuilder): XMLBuilder | null {
const node = domDocument.find((n) => n.node.nodeName === 'Rdr_Dataset', false, true)?.node;
if (node) {
return builder({ version: '1.0', encoding: 'UTF-8', standalone: true }, node);
}
return domDocument;
}
/**
* Attempt to retrieve valid cached XML document
* Returns null if cache doesn't exist or is stale
*/
private async retrieveFromCache(): Promise<XMLBuilder | null> {
const dataset: Dataset = this.config.model;
if (!this.cache) {
return null;
}
// Check if cache is still valid
const actuallyCached = await DocumentXmlCache.hasValidEntry(dataset.id, dataset.server_date_modified);
if (!actuallyCached) {
logger.debug(`Cache invalid for dataset ${dataset.id}`);
return null;
}
//cache is actual return cached document
try {
if (this.cache) {
return this.cache.getDomDocument();
} else {
return null;
}
} catch (error) {
logger.error(`Failed to retrieve cached document for dataset ${dataset.id}: ${error.message}`);
return null;
}
}
}

View file

@ -0,0 +1,326 @@
import DoiClientContract from '#app/Library/Doi/DoiClientContract';
import DoiClientException from '#app/exceptions/DoiClientException';
import { StatusCodes } from 'http-status-codes';
import logger from '@adonisjs/core/services/logger';
import { AxiosResponse } from 'axios';
import { default as axios } from 'axios';
export class DoiClient implements DoiClientContract {
public username: string;
public password: string;
public serviceUrl: string;
public apiUrl: string;
constructor() {
// const datacite_environment = process.env.DATACITE_ENVIRONMENT || 'debug';
this.username = process.env.DATACITE_USERNAME || '';
this.password = process.env.DATACITE_PASSWORD || '';
this.serviceUrl = process.env.DATACITE_SERVICE_URL || '';
this.apiUrl = process.env.DATACITE_API_URL || 'https://api.datacite.org';
if (this.username === '' || this.password === '' || this.serviceUrl === '') {
const message = 'issing configuration settings to properly initialize DOI client';
logger.error(message);
throw new DoiClientException(StatusCodes.BAD_REQUEST, message);
}
}
/**
* Creates a DOI with the given identifier
*
* @param doiValue The desired DOI identifier e.g. '10.5072/tethys.999',
* @param xmlMeta
* @param landingPageUrl e.g. https://www.tethys.at/dataset/1
*
* @return Promise<AxiosResponse<any>> The http response in the form of a axios response
*/
public async registerDoi(doiValue: string, xmlMeta: string, landingPageUrl: string): Promise<AxiosResponse<any>> {
//step 1: register metadata via xml upload
// state draft
// let response;
// let url = `${this.serviceUrl}/metadata/${doiValue}`; //https://mds.test.datacite.org/metadata/10.21388/tethys.213
const auth = {
username: this.username,
password: this.password,
};
let headers = {
'Content-Type': 'application/xml;charset=UTF-8',
};
try {
const metadataResponse = await axios.put(`${this.serviceUrl}/metadata/${doiValue}`, xmlMeta, { auth, headers });
// Response Codes
// 201 Created: operation successful
// 401 Unauthorised: no login
// 403 Forbidden: login problem, quota exceeded
// 415 Wrong Content Type : Not including content type in the header.
// 422 Unprocessable Entity : invalid XML
// let test = metadataResponse.data; // 'OK (10.21388/TETHYS.213)'
if (metadataResponse.status !== 201) {
const message = `Unexpected DataCite MDS response code ${metadataResponse.status}`;
logger.error(message);
throw new DoiClientException(metadataResponse.status, message);
}
const doiResponse = await axios.put(`${this.serviceUrl}/doi/${doiValue}`, `doi=${doiValue}\nurl=${landingPageUrl}`, {
auth,
headers,
});
// Response Codes
// 201 Created: operation successful
// 400 Bad Request: request body must be exactly two lines: DOI and URL; wrong domain, wrong prefix;
// 401 Unauthorised: no login
// 403 Forbidden: login problem, quota exceeded
// 412 Precondition failed: metadata must be uploaded first.
if (doiResponse.status !== 201) {
const message = `Unexpected DataCite MDS response code ${doiResponse.status}`;
logger.error(message);
throw new DoiClientException(doiResponse.status, message);
}
return doiResponse;
} catch (error) {
// const message = `request for registering DOI failed with ${error.message}`;
// Handle the error, log it, or rethrow as needed
logger.error(error.message);
throw new DoiClientException(error.response.status, error.response.data);
}
}
/**
* Retrieves DOI information from DataCite REST API
*
* @param doiValue The DOI identifier e.g. '10.5072/tethys.999'
* @returns Promise with DOI information or null if not found
*/
public async getDoiInfo(doiValue: string): Promise<any | null> {
try {
// Use configurable DataCite REST API URL
const dataciteApiUrl = `${this.apiUrl}/dois/${doiValue}`;
const response = await axios.get(dataciteApiUrl, {
headers: {
Accept: 'application/vnd.api+json',
},
});
if (response.status === 200 && response.data.data) {
return {
created: response.data.data.attributes.created,
registered: response.data.data.attributes.registered,
updated: response.data.data.attributes.updated,
published: response.data.data.attributes.published,
state: response.data.data.attributes.state,
url: response.data.data.attributes.url,
metadata: response.data.data.attributes,
};
}
} catch (error) {
if (error.response?.status === 404) {
logger.debug(`DOI ${doiValue} not found in DataCite`);
return null;
}
logger.debug(`DataCite REST API failed for ${doiValue}: ${error.message}`);
// Fallback to MDS API
return await this.getDoiInfoFromMds(doiValue);
}
return null;
}
/**
* Fallback method to get DOI info from MDS API
*
* @param doiValue The DOI identifier
* @returns Promise with basic DOI information or null
*/
private async getDoiInfoFromMds(doiValue: string): Promise<any | null> {
try {
const auth = {
username: this.username,
password: this.password,
};
// Get DOI URL
const doiResponse = await axios.get(`${this.serviceUrl}/doi/${doiValue}`, { auth });
if (doiResponse.status === 200) {
// Get metadata if available
try {
const metadataResponse = await axios.get(`${this.serviceUrl}/metadata/${doiValue}`, {
auth,
headers: {
Accept: 'application/xml',
},
});
return {
url: doiResponse.data.trim(),
metadata: metadataResponse.data,
created: new Date().toISOString(), // MDS doesn't provide creation dates
registered: new Date().toISOString(), // Use current time as fallback
source: 'mds',
};
} catch (metadataError) {
// Return basic info even if metadata fetch fails
return {
url: doiResponse.data.trim(),
created: new Date().toISOString(),
registered: new Date().toISOString(),
source: 'mds',
};
}
}
} catch (error) {
if (error.response?.status === 404) {
logger.debug(`DOI ${doiValue} not found in DataCite MDS`);
return null;
}
logger.debug(`DataCite MDS API failed for ${doiValue}: ${error.message}`);
}
return null;
}
/**
* Checks if a DOI exists in DataCite
*
* @param doiValue The DOI identifier
* @returns Promise<boolean> True if DOI exists
*/
public async doiExists(doiValue: string): Promise<boolean> {
const doiInfo = await this.getDoiInfo(doiValue);
return doiInfo !== null;
}
/**
* Gets the last modification date of a DOI
*
* @param doiValue The DOI identifier
* @returns Promise<Date | null> Last modification date or creation date if never updated, null if not found
*/
public async getDoiLastModified(doiValue: string): Promise<Date | null> {
const doiInfo = await this.getDoiInfo(doiValue);
if (doiInfo) {
// Use updated date if available, otherwise fall back to created/registered date
const dateToUse = doiInfo.updated || doiInfo.registered || doiInfo.created;
if (dateToUse) {
logger.debug(
`DOI ${doiValue}: Using ${doiInfo.updated ? 'updated' : doiInfo.registered ? 'registered' : 'created'} date: ${dateToUse}`,
);
return new Date(dateToUse);
}
}
return null;
}
/**
* Makes a DOI unfindable (registered but not discoverable)
* Note: DOIs cannot be deleted, only made unfindable
* await doiClient.makeDoiUnfindable('10.21388/tethys.231');
*
* @param doiValue The DOI identifier e.g. '10.5072/tethys.999'
* @returns Promise<AxiosResponse<any>> The http response
*/
public async makeDoiUnfindable(doiValue: string): Promise<AxiosResponse<any>> {
const auth = {
username: this.username,
password: this.password,
};
try {
// First, check if DOI exists
const exists = await this.doiExists(doiValue);
if (!exists) {
throw new DoiClientException(404, `DOI ${doiValue} not found`);
}
// Delete the DOI URL mapping to make it unfindable
// This removes the URL but keeps the metadata registered
const response = await axios.delete(`${this.serviceUrl}/doi/${doiValue}`, { auth });
// Response Codes for DELETE /doi/{doi}
// 200 OK: operation successful
// 401 Unauthorized: no login
// 403 Forbidden: login problem, quota exceeded
// 404 Not Found: DOI does not exist
if (response.status !== 200) {
const message = `Unexpected DataCite MDS response code ${response.status}`;
logger.error(message);
throw new DoiClientException(response.status, message);
}
logger.info(`DOI ${doiValue} successfully made unfindable`);
return response;
} catch (error) {
logger.error(`Failed to make DOI ${doiValue} unfindable: ${error.message}`);
if (error instanceof DoiClientException) {
throw error;
}
throw new DoiClientException(error.response?.status || 500, error.response?.data || error.message);
}
}
/**
* Makes a DOI findable again by re-registering the URL
* await doiClient.makeDoiFindable(
* '10.21388/tethys.231',
* 'https://doi.dev.tethys.at/10.21388/tethys.231'
* );
*
* @param doiValue The DOI identifier e.g. '10.5072/tethys.999'
* @param landingPageUrl The landing page URL
* @returns Promise<AxiosResponse<any>> The http response
*/
public async makeDoiFindable(doiValue: string, landingPageUrl: string): Promise<AxiosResponse<any>> {
const auth = {
username: this.username,
password: this.password,
};
try {
// Re-register the DOI with its URL to make it findable again
const response = await axios.put(`${this.serviceUrl}/doi/${doiValue}`, `doi=${doiValue}\nurl=${landingPageUrl}`, { auth });
// Response Codes for PUT /doi/{doi}
// 201 Created: operation successful
// 400 Bad Request: request body must be exactly two lines: DOI and URL
// 401 Unauthorized: no login
// 403 Forbidden: login problem, quota exceeded
// 412 Precondition failed: metadata must be uploaded first
if (response.status !== 201) {
const message = `Unexpected DataCite MDS response code ${response.status}`;
logger.error(message);
throw new DoiClientException(response.status, message);
}
logger.info(`DOI ${doiValue} successfully made findable again`);
return response;
} catch (error) {
logger.error(`Failed to make DOI ${doiValue} findable: ${error.message}`);
if (error instanceof DoiClientException) {
throw error;
}
throw new DoiClientException(error.response?.status || 500, error.response?.data || error.message);
}
}
/**
* Gets the current state of a DOI (draft, registered, findable)
* const state = await doiClient.getDoiState('10.21388/tethys.231');
* console.log(`Current state: ${state}`); // 'findable'
*
* @param doiValue The DOI identifier
* @returns Promise<string | null> The DOI state or null if not found
*/
public async getDoiState(doiValue: string): Promise<string | null> {
const doiInfo = await this.getDoiInfo(doiValue);
return doiInfo?.state || null;
}
}

View file

@ -0,0 +1,13 @@
// import ResumptionToken from './ResumptionToken';
import { AxiosResponse } from 'axios';
export default interface DoiClientContract {
username: string;
password: string;
serviceUrl: string;
// prefix: string;
// base_domain: string;
registerDoi(doiValue: string, xmlMeta: string, landingPageUrl: string): Promise<AxiosResponse<any>>;
// get(key: string): Promise<ResumptionToken | null>;
// set(token: ResumptionToken): Promise<string>;
}

View file

@ -0,0 +1,60 @@
export default class ResumptionToken {
private _documentIds: number[] = [];
private _metadataPrefix = '';
private _resumptionId = '';
private _startPosition = 0;
private _totalIds = 0;
private _queryParams: Record<string, any> = {};
get key(): string {
return this.metadataPrefix + this.startPosition + this.totalIds;
}
get documentIds(): number[] {
return this._documentIds;
}
set documentIds(idsToStore: number | number[]) {
this._documentIds = Array.isArray(idsToStore) ? idsToStore : [idsToStore];
}
get metadataPrefix(): string {
return this._metadataPrefix;
}
set metadataPrefix(value: string) {
this._metadataPrefix = value;
}
get resumptionId(): string {
return this._resumptionId;
}
set resumptionId(resumptionId: string) {
this._resumptionId = resumptionId;
}
get startPosition(): number {
return this._startPosition;
}
set startPosition(startPosition: number) {
this._startPosition = startPosition;
}
get totalIds(): number {
return this._totalIds;
}
set totalIds(totalIds: number) {
this._totalIds = totalIds;
}
get queryParams(): Record<string, any> {
return this._queryParams;
}
set queryParams(params: Record<string, any>) {
this._queryParams = params;
}
}

View file

@ -0,0 +1,11 @@
import ResumptionToken from './ResumptionToken.js';
export default abstract class TokenWorkerContract {
abstract ttl: number;
abstract isConnected: boolean;
abstract connect(): void;
abstract close(): void;
abstract get(key: string): Promise<ResumptionToken | null>;
abstract set(token: ResumptionToken, browserFingerprint: string): Promise<string>;
}

View file

@ -0,0 +1,145 @@
import ResumptionToken from './ResumptionToken.js';
import { createClient, RedisClientType } from 'redis';
import InternalServerErrorException from '#app/exceptions/InternalServerException';
import { sprintf } from 'sprintf-js';
import dayjs from 'dayjs';
import TokenWorkerContract from './TokenWorkerContract.js';
export default class TokenWorkerService implements TokenWorkerContract {
protected filePrefix = 'rs_';
protected fileExtension = 'txt';
private cache: RedisClientType;
public ttl: number;
private url: string;
private connected = false;
constructor(ttl: number) {
this.ttl = ttl; // time to live
this.url = process.env.REDIS_URL || 'redis://127.0.0.1:6379';
}
public async connect() {
this.cache = createClient({ url: this.url });
this.cache.on('error', (err) => {
this.connected = false;
console.log('[Redis] Redis Client Error: ', err);
});
this.cache.on('connect', () => {
this.connected = true;
});
await this.cache.connect();
}
public get isConnected(): boolean {
return this.connected;
}
public async has(key: string): Promise<boolean> {
const result = await this.cache.get(key);
return result !== undefined && result !== null;
}
/**
* Simplified set method that stores the token using a browser fingerprint key.
* If the token for that fingerprint already exists and its documentIds match the new token,
* then the fingerprint key is simply returned.
*/
public async set(token: ResumptionToken, browserFingerprint: string): Promise<string> {
// Generate a 15-digit unique number string based on the fingerprint
const uniqueNumberKey = this.createUniqueNumberFromFingerprint(browserFingerprint, token.documentIds, token.totalIds);
// Optionally, you could prefix it if desired, e.g. 'rs_' + uniqueNumberKey
const fingerprintKey = uniqueNumberKey;
// const fingerprintKey = `rs_fp_${browserFingerprint}`;
const existingTokenString = await this.cache.get(fingerprintKey);
if (existingTokenString) {
const existingToken = this.parseToken(existingTokenString);
if (this.arraysAreEqual(existingToken.documentIds, token.documentIds)) {
return fingerprintKey;
}
}
const serialToken = JSON.stringify(token);
await this.cache.setEx(fingerprintKey, this.ttl, serialToken);
return fingerprintKey;
}
// Updated helper method to generate a unique key based on fingerprint and documentIds
private createUniqueNumberFromFingerprint(browserFingerprint: string, documentIds: number[], totalIds: number): string {
// Combine the fingerprint, document IDs and totalIds to produce the input string
const combined = browserFingerprint + ':' + documentIds.join('-') + ':' + totalIds;
// Simple hash algorithm
let hash = 0;
for (let i = 0; i < combined.length; i++) {
hash = (hash << 5) - hash + combined.charCodeAt(i);
hash |= 0; // Convert to 32-bit integer
}
// Ensure positive number and limit it to at most 15 digits
const positiveHash = Math.abs(hash) % 1000000000000000;
// Pad with trailing zeros to ensure a 15-digit string
return positiveHash.toString().padEnd(15, '0');
}
// Add a helper function to compare two arrays of numbers with identical order
private arraysAreEqual(arr1: number[], arr2: number[]): boolean {
if (arr1.length !== arr2.length) {
return false;
}
return arr1.every((num, index) => num === arr2[index]);
}
// public async set(token: ResumptionToken): Promise<string> {
// const uniqueName = await this.generateUniqueName();
// const serialToken = JSON.stringify(token);
// await this.cache.setEx(uniqueName, this.ttl, serialToken);
// return uniqueName;
// }
private async generateUniqueName(): Promise<string> {
let fc = 0;
const uniqueId = dayjs().unix().toString();
let uniqueName: string;
let cacheKeyExists: boolean;
do {
// format values
// %s - String
// %d - Signed decimal number (negative, zero or positive)
// [0-9] (Specifies the minimum width held of to the variable value)
uniqueName = sprintf('%s%05d', uniqueId, fc++);
cacheKeyExists = await this.has(uniqueName);
} while (cacheKeyExists);
return uniqueName;
}
public async get(key: string): Promise<ResumptionToken | null> {
if (!this.cache) {
throw new InternalServerErrorException('Dataset is not available for OAI export!');
}
const result = await this.cache.get(key);
return result ? this.parseToken(result) : null;
}
private parseToken(result: string): ResumptionToken {
const rToken: ResumptionToken = new ResumptionToken();
const parsed = JSON.parse(result);
Object.assign(rToken, parsed);
return rToken;
}
public del(key: string) {
this.cache.del(key);
}
public flush() {
this.cache.flushAll();
}
public async close() {
await this.cache.disconnect();
this.connected = false;
}
}

View file

@ -1,8 +1,8 @@
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import { create } from 'xmlbuilder2';
import Dataset from 'App/Models/Dataset';
import Field from './Field';
import BaseModel from 'App/Models/BaseModel';
import Dataset from '#models/dataset';
import Field from './Field.js';
import BaseModel from '#models/base_model';
import { DateTime } from 'luxon';
export default class Strategy {
@ -10,7 +10,7 @@ export default class Strategy {
private config;
private xml: XMLBuilder;
constructor(config) {
constructor(config: any) {
this.version = 1.0;
this.config = config;
}
@ -45,11 +45,11 @@ export default class Strategy {
for (const fieldname of fieldsDiff) {
const field = model.getField(fieldname);
this.mapField(field, modelNode);
this.mapField(field as Field, modelNode);
}
}
private mapField(field, modelNode: XMLBuilder) {
private mapField(field: Field, modelNode: XMLBuilder) {
const modelClass = field.getValueModelClass();
let fieldValues = field.getValue();
@ -107,10 +107,10 @@ export default class Strategy {
childNode.att('Timezone', zoneName);
}
private mapModelAttributes(myObject, childNode: XMLBuilder) {
private mapModelAttributes(myObject: any, childNode: XMLBuilder) {
Object.keys(myObject).forEach((prop) => {
let value = myObject[prop];
console.log(`${prop}: ${value}`);
// console.log(`${prop}: ${value}`);
if (value != null) {
if (value instanceof DateTime) {
value = value.toFormat('yyyy-MM-dd HH:mm:ss').trim();
@ -161,7 +161,7 @@ export default class Strategy {
return fieldValues?.toString().trim();
}
private createModelNode(model) {
private createModelNode(model: Dataset) {
const className = 'Rdr_' + model.constructor.name.split('\\').pop(); //Rdr_Dataset
// return dom.createElement(className);
return this.xml.root().ele(className);

195
app/Library/Utils/Index.ts Normal file
View file

@ -0,0 +1,195 @@
import Dataset from '#models/dataset';
import { Client } from '@opensearch-project/opensearch';
import { create } from 'xmlbuilder2';
import SaxonJS from 'saxon-js';
import DatasetXmlSerializer from '#app/Library/DatasetXmlSerializer';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import logger from '@adonisjs/core/services/logger';
import { readFileSync } from 'fs';
import { DateTime } from 'luxon';
// import Config from '@ioc:Adonis/Core/Config';
import { getDomain } from '#app/utils/utility-functions';
// const opensearchNode = process.env.OPENSEARCH_HOST || 'localhost';
// const client = new Client({ node: `http://${opensearchNode}` }); // replace with your OpenSearch endpoint
interface XslTParameter {
[key: string]: any;
}
export default {
// opensearchNode: process.env.OPENSEARCH_HOST || 'localhost',
client: new Client({ node: `http://${process.env.OPENSEARCH_HOST || 'localhost'}` }), // replace with your OpenSearch endpoint
async getDoiRegisterString(dataset: Dataset): Promise<string | undefined> {
try {
const proc = readFileSync('public/assets2/doi_datacite.sef.json');
const xsltParameter: XslTParameter = {};
let xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
const datasetNode = xml.root().ele('Dataset');
await createXmlRecord(dataset, datasetNode);
const xmlString = xml.end({ prettyPrint: false });
// set timestamp
const date = DateTime.now();
const unixTimestamp = date.toUnixInteger();
xsltParameter['unixTimestamp'] = unixTimestamp;
// set prefix
let prefix = '';
// let base_domain = '';
// const datacite_environment = process.env.DATACITE_ENVIRONMENT || 'debug';
// if (datacite_environment === 'debug') {
// prefix = process.env.DATACITE_TEST_PREFIX || '';
// base_domain = process.env.TEST_BASE_DOMAIN || '';
// } else if (datacite_environment === 'production') {
// prefix = process.env.DATACITE_PREFIX || '';
// base_domain = process.env.BASE_DOMAIN || '';
// }
prefix = process.env.DATACITE_PREFIX || '';
xsltParameter['prefix'] = prefix;
const repIdentifier = 'tethys';
xsltParameter['repIdentifier'] = repIdentifier;
let xmlOutput; // = xmlString;
try {
const result = await SaxonJS.transform({
// stylesheetFileName: `${config.TMP_BASE_DIR}/data-quality/rules/iati.sef.json`,
stylesheetText: proc,
destination: 'serialized',
// sourceFileName: sourceFile,
sourceText: xmlString,
stylesheetParams: xsltParameter,
// logLevel: 10,
});
xmlOutput = result.principalResult;
} catch (error) {
logger.error('An error occurred while creating the user', error.message);
}
return xmlOutput;
} catch (error) {
logger.error(`An error occurred while indexing datsaet with publish_id ${dataset.publish_id}.`);
}
},
/**
* Index a dataset document to OpenSearch/Elasticsearch
*/
async indexDocument(dataset: Dataset, index_name: string): Promise<void> {
try {
// Load XSLT transformation file
const xsltProc = readFileSync('public/assets2/solr.sef.json');
// Transform dataset to JSON document
const jsonDoc: string = await this.getTransformedString(dataset, xsltProc);
const document = JSON.parse(jsonDoc);
// Index document to OpenSearch with doument json body
await this.client.index({
id: dataset.publish_id?.toString(),
index: index_name,
body: document,
refresh: true, // make immediately searchable
});
logger.info(`Dataset ${dataset.publish_id} successfully indexed to ${index_name}`);
} catch (error) {
logger.error(`Failed to index dataset ${dataset.publish_id}: ${error.message}`);
throw error; // Re-throw to allow caller to handle
}
},
/**
* Transform dataset XML to JSON using XSLT
*/
async getTransformedString(dataset: Dataset, proc: Buffer): Promise<string> {
// Generate XML string from dataset
const xmlString = await this.generateDatasetXml(dataset);
try {
// Apply XSLT transformation
const result = await SaxonJS.transform({
stylesheetText: proc,
destination: 'serialized',
sourceText: xmlString,
});
return result.principalResult;
} catch (error) {
logger.error(`An error occurred while creating the user, error: ${error.message},`);
return '';
}
},
/**
* Generate XML string from dataset model
*/
async generateDatasetXml(dataset: Dataset): Promise<string> {
const xml = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
const datasetNode = xml.root().ele('Dataset');
await createXmlRecord(dataset, datasetNode);
return xml.end({ prettyPrint: false });
},
};
/**
* Return the default global focus trap stack
*
* @return {import('focus-trap').FocusTrap[]}
*/
/**
* Create complete XML record for dataset
* Handles caching and metadata enrichment
*/
const createXmlRecord = async (dataset: Dataset, datasetNode: XMLBuilder): Promise<void> => {
const domNode = await getDatasetXmlDomNode(dataset);
if (!domNode) {
throw new Error(`Failed to generate XML DOM node for dataset ${dataset.id}`);
}
// Enrich with landing page URL
if (dataset.publish_id) {
addLandingPageAttribute(domNode, dataset.publish_id.toString());
}
// Add data type specification
addSpecInformation(domNode, `data-type:${dataset.type}`);
// Add collection information
if (dataset.collections) {
for (const coll of dataset.collections) {
const collRole = coll.collectionRole;
addSpecInformation(domNode, `${collRole.oai_name}:${coll.number}`);
}
}
datasetNode.import(domNode);
};
const getDatasetXmlDomNode = async (dataset: Dataset): Promise<XMLBuilder | null> => {
const serializer = new DatasetXmlSerializer(dataset).enableCaching().excludeEmptyFields();
// xmlModel.setModel(dataset);
// Load cache relationship if not already loaded
await dataset.load('xmlCache');
if (dataset.xmlCache) {
serializer.setCache(dataset.xmlCache);
}
// Generate or retrieve cached DOM document
const xmlDocument: XMLBuilder | null = await serializer.toXmlDocument();
return xmlDocument;
};
const addLandingPageAttribute = (domNode: XMLBuilder, dataid: string) => {
const baseDomain = process.env.OAI_BASE_DOMAIN || 'localhost';
const url = 'https://' + getDomain(baseDomain) + '/dataset/' + dataid;
// add attribute du dataset xml element
domNode.att('landingpage', url);
};
const addSpecInformation = (domNode: XMLBuilder, information: string) => {
domNode.ele('SetSpec').att('Value', information);
};

View file

@ -1,117 +0,0 @@
import DocumentXmlCache from 'App/Models/DocumentXmlCache';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces';
import Dataset from 'App/Models/Dataset';
import Strategy from './Strategy';
import { DateTime } from 'luxon';
/**
* This is the description of the interface
*
* @interface Conf
* @member {Model} model holds the current dataset model
* @member {XMLBuilder} dom holds the current DOM representation
* @member {Array<string>} excludeFields List of fields to skip on serialization.
* @member {boolean} excludeEmpty True, if empty fields get excluded from serialization.
* @member {string} baseUri Base URI for xlink:ref elements
*/
export interface Conf {
model: Dataset;
dom?: XMLBuilder;
excludeFields: Array<string>;
excludeEmpty: boolean;
baseUri: string;
}
export default class XmlModel {
private config: Conf;
// private strategy = null;
private cache: DocumentXmlCache | null = null;
private _caching = false;
private strategy: Strategy;
constructor(dataset: Dataset) {
// $this->strategy = new Strategy();// Opus_Model_Xml_Version1;
// $this->config = new Conf();
// $this->strategy->setup($this->config);
this.config = {
excludeEmpty: false,
baseUri: '',
excludeFields: [],
model: dataset,
};
this.strategy = new Strategy({
excludeEmpty: true,
baseUri: '',
excludeFields: [],
model: dataset,
});
}
set model(model: Dataset) {
this.config.model = model;
}
public excludeEmptyFields(): void {
this.config.excludeEmpty = true;
}
get xmlCache(): DocumentXmlCache | null {
return this.cache;
}
set xmlCache(cache: DocumentXmlCache) {
this.cache = cache;
}
get caching(): boolean {
return this._caching;
}
set caching(caching: boolean) {
this._caching = caching;
}
public async getDomDocument(): Promise<XMLBuilder | null> {
const dataset = this.config.model;
let domDocument: XMLBuilder | null = await this.getDomDocumentFromXmlCache();
if (domDocument == null) {
domDocument = await this.strategy.createDomDocument();
// domDocument = create({ version: '1.0', encoding: 'UTF-8', standalone: true }, '<root></root>');
if (this._caching) {
// caching is desired:
this.cache = this.cache || new DocumentXmlCache();
this.cache.document_id = dataset.id;
this.cache.xml_version = 1; // (int)$this->strategy->getVersion();
// this.cache.server_date_modified = dataset.server_date_modified.toFormat("yyyy-MM-dd HH:mm:ss");
this.cache.xml_data = domDocument.end();
await this.cache.save();
}
}
return domDocument;
}
private async getDomDocumentFromXmlCache(): Promise<XMLBuilder | null> {
const dataset: Dataset = this.config.model;
if (!this.cache) {
return null;
}
//.toFormat('YYYY-MM-DD HH:mm:ss');
let date: DateTime = dataset.server_date_modified;
const actuallyCached: boolean = await DocumentXmlCache.hasValidEntry(dataset.id, date);
if (!actuallyCached) {
return null;
}
//cache is actual return it for oai:
try {
if (this.cache) {
return this.cache.getDomDocument();
} else {
return null;
}
} catch (error) {
return null;
}
}
}

View file

@ -1,67 +0,0 @@
import { AuthenticationException } from '@adonisjs/auth/build/standalone';
import type { GuardsList } from '@ioc:Adonis/Addons/Auth';
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
/**
* Auth middleware is meant to restrict un-authenticated access to a given route
* or a group of routes.
*
* You must register this middleware inside `start/kernel.ts` file under the list
* of named middleware.
*/
export default class AuthMiddleware {
/**
* The URL to redirect to when request is Unauthorized
*/
protected redirectTo = '/app/login';
/**
* Authenticates the current HTTP request against a custom set of defined
* guards.
*
* The authentication loop stops as soon as the user is authenticated using any
* of the mentioned guards and that guard will be used by the rest of the code
* during the current request.
*/
protected async authenticate(auth: HttpContextContract['auth'], guards: (keyof GuardsList)[]) {
/**
* Hold reference to the guard last attempted within the for loop. We pass
* the reference of the guard to the "AuthenticationException", so that
* it can decide the correct response behavior based upon the guard
* driver
*/
let guardLastAttempted: string | undefined;
for (let guard of guards) {
guardLastAttempted = guard;
if (await auth.use(guard).check()) {
/**
* Instruct auth to use the given guard as the default guard for
* the rest of the request, since the user authenticated
* succeeded here
*/
auth.defaultGuard = guard;
return true;
}
}
/**
* Unable to authenticate using any guard
*/
throw new AuthenticationException('Unauthorized access', 'E_UNAUTHORIZED_ACCESS', guardLastAttempted, this.redirectTo);
}
/**
* Handle request
*/
public async handle({ auth }: HttpContextContract, next: () => Promise<void>, customGuards: (keyof GuardsList)[]) {
/**
* Uses the user defined guards or the default guard mentioned in
* the config file
*/
const guards = customGuards.length ? customGuards : [auth.name];
await this.authenticate(auth, guards);
await next();
}
}

View file

@ -1,76 +0,0 @@
import { DateTime } from 'luxon';
import {
column,
hasMany,
HasMany,
belongsTo,
BelongsTo,
// manyToMany,
// ManyToMany,
SnakeCaseNamingStrategy,
} from '@ioc:Adonis/Lucid/Orm';
import HashValue from './HashValue';
import Dataset from './Dataset';
import BaseModel from './BaseModel';
export default class File extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();
public static primaryKey = 'id';
public static table = 'document_files';
public static selfAssignPrimaryKey = false;
@column({
isPrimary: true,
})
public id: number;
@column({})
public document_id: number;
@column({})
public pathName: string;
@column()
public label: string;
@column()
public comment: string;
@column()
public mimeType: string;
@column()
public language: string;
@column()
public fileSize: number;
@column()
public visibleInOai: boolean;
@column()
public visibleInFrontdoor: boolean;
@column()
public sortOrder: number;
@column.dateTime({ autoCreate: true })
public createdAt: DateTime;
@column.dateTime({ autoCreate: true, autoUpdate: true })
public updatedAt: DateTime;
// public function dataset()
// {
// return $this->belongsTo(Dataset::class, 'document_id', 'id');
// }
@belongsTo(() => Dataset, {
foreignKey: 'document_id',
})
public dataset: BelongsTo<typeof Dataset>;
@hasMany(() => HashValue, {
foreignKey: 'file_id',
})
public hashvalues: HasMany<typeof HashValue>;
}

View file

@ -1,79 +0,0 @@
import { column, SnakeCaseNamingStrategy, computed, manyToMany, ManyToMany } from '@ioc:Adonis/Lucid/Orm';
import { DateTime } from 'luxon';
import dayjs from 'dayjs';
import Dataset from './Dataset';
import BaseModel from './BaseModel';
export default class Person extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();
public static primaryKey = 'id';
public static table = 'persons';
public static selfAssignPrimaryKey = false;
// only the academic_title, email, first_name, identifier_orcid, last_name and name_type attributes are allowed to be mass assigned.
public static fillable: string[] = ['academic_title', 'email', 'first_name', 'identifier_orcid', 'last_name', 'name_type'];
@column({
isPrimary: true,
})
public id: number;
@column({ columnName: 'academic_title' })
public academicTitle: string;
@column()
public email: string;
@column({})
public firstName: string;
@column({})
public lastName: string;
@column({})
public identifierOrcid: string;
@column({})
public status: boolean;
@column({})
public nameType: string;
@column.dateTime({
serialize: (value: Date | null) => {
return value ? dayjs(value).format('MMMM D YYYY HH:mm a') : value;
},
autoCreate: true,
})
public createdAt: DateTime;
@computed({
serializeAs: 'name',
})
public get fullName() {
return `${this.firstName} ${this.lastName}`;
}
// @computed()
// public get progress(): number {
// return 50;
// }
// @computed()
// public get created_at() {
// return '2023-03-21 08:45:00';
// }
@computed()
public get datasetCount() {
const stock = this.$extras.datasets_count; //my pivot column name was "stock"
return stock;
}
@manyToMany(() => Dataset, {
pivotForeignKey: 'person_id',
pivotRelatedForeignKey: 'document_id',
pivotTable: 'link_documents_persons',
pivotColumns: ['role', 'sort_order', 'allow_email_contact'],
})
public datasets: ManyToMany<typeof Dataset>;
}

View file

@ -1,110 +0,0 @@
import { DateTime } from 'luxon';
import { column, beforeSave, manyToMany, ManyToMany, hasMany, HasMany } from '@ioc:Adonis/Lucid/Orm';
import Hash from '@ioc:Adonis/Core/Hash';
import Role from './Role';
import Database from '@ioc:Adonis/Lucid/Database';
import Config from '@ioc:Adonis/Core/Config';
import Dataset from './Dataset';
import BaseModel from './BaseModel';
// export default interface IUser {
// id: number;
// login: string;
// email: string;
// // password: string;
// // createdAt: DateTime;
// // updatedAt: DateTime;
// // async (user): Promise<void>;
// }
const permissionTable = Config.get('rolePermission.permission_table', 'permissions');
const rolePermissionTable = Config.get('rolePermission.role_permission_table', 'role_has_permissions');
const roleTable = Config.get('rolePermission.role_table', 'roles');
const userRoleTable = Config.get('rolePermission.user_role_table', 'link_accounts_roles');
export default class User extends BaseModel {
public static table = 'accounts';
@column({ isPrimary: true })
public id: number;
@column()
public login: string;
@column()
public email: string;
@column({ serializeAs: null })
public password: string;
@column.dateTime({ autoCreate: true })
public createdAt: DateTime;
@column.dateTime({ autoCreate: true, autoUpdate: true })
public updatedAt: DateTime;
@beforeSave()
public static async hashPassword(user) {
if (user.$dirty.password) {
user.password = await Hash.make(user.password);
}
}
@manyToMany(() => Role, {
pivotForeignKey: 'account_id',
pivotRelatedForeignKey: 'role_id',
pivotTable: 'link_accounts_roles',
})
public roles: ManyToMany<typeof Role>;
@hasMany(() => Dataset, {
foreignKey: 'account_id',
})
public datasets: HasMany<typeof Dataset>;
// https://github.com/adonisjs/core/discussions/1872#discussioncomment-132289
public async getRoles(this: User): Promise<string[]> {
const test = await this.related('roles').query();
return test.map((role) => role.name);
}
public async can(permissionNames: Array<string>): Promise<boolean> {
// const permissions = await this.getPermissions()
// return Acl.check(expression, operand => _.includes(permissions, operand))
const hasPermission = await this.checkHasPermissions(this, permissionNames);
return hasPermission;
}
private async checkHasPermissions(user: User, permissionNames: Array<string>): Promise<boolean> {
let permissionPlaceHolder = '(';
let placeholders = new Array(permissionNames.length).fill('?');
permissionPlaceHolder += placeholders.join(',');
permissionPlaceHolder += ')';
let {
rows: {
0: { permissioncount },
},
} = await Database.rawQuery(
'SELECT count("p"."name") as permissionCount FROM ' +
roleTable +
' r INNER JOIN ' +
userRoleTable +
' ur ON ur.role_id=r.id AND "ur"."account_id"=? ' +
' INNER JOIN ' +
rolePermissionTable +
' rp ON rp.role_id=r.id ' +
' INNER JOIN ' +
permissionTable +
' p ON rp.permission_id=p.id AND "p"."name" in ' +
permissionPlaceHolder +
' LIMIT 1',
[user.id, ...permissionNames],
);
return permissioncount > 0;
}
}
// export default User;

View file

@ -1,19 +0,0 @@
import Database, {
// DatabaseQueryBuilderContract,
QueryClientContract,
TransactionClientContract,
} from '@ioc:Adonis/Lucid/Database';
import Config from '@ioc:Adonis/Core/Config';
export function getUserRoles(userId: number, trx?: TransactionClientContract): Promise<Array<string>> {
const { userRole } = Config.get('acl.joinTables');
return ((trx || Database) as QueryClientContract | TransactionClientContract)
.query()
.from('roles')
.distinct('roles.slug')
.leftJoin(userRole, `${userRole}.role_id`, 'roles.id')
.where(`${userRole}.user_id`, userId)
.then((res) => {
return res.map((r) => r.slug);
});
}

View file

@ -1,46 +0,0 @@
import { schema, CustomMessages, rules } from '@ioc:Adonis/Core/Validator';
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
export default class AuthValidator {
constructor(protected ctx: HttpContextContract) {}
/*
* Define schema to validate the "shape", "type", "formatting" and "integrity" of data.
*
* For example:
* 1. The username must be of data type string. But then also, it should
* not contain special characters or numbers.
* ```
* schema.string({}, [ rules.alpha() ])
* ```
*
* 2. The email must be of data type string, formatted as a valid
* email. But also, not used by any other user.
* ```
* schema.string({}, [
* rules.email(),
* rules.unique({ table: 'users', column: 'email' }),
* ])
* ```
*/
public schema = schema.create({
email: schema.string({ trim: true }, [
rules.email(),
// rules.unique({ table: 'accounts', column: 'email' })
]),
password: schema.string({}, [rules.minLength(6)]),
});
/**
* Custom messages for validation failures. You can make use of dot notation `(.)`
* for targeting nested fields and array expressions `(*)` for targeting all
* children of an array. For example:
*
* {
* 'profile.username.required': 'Username is required',
* 'scores.*.number': 'Define scores as valid numbers'
* }
*
*/
public messages: CustomMessages = {};
}

View file

@ -1,178 +0,0 @@
import { schema, CustomMessages, rules } from '@ioc:Adonis/Core/Validator';
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
import dayjs from 'dayjs';
import { TitleTypes, DescriptionTypes, RelationTypes, ReferenceIdentifierTypes, ContributorTypes } from 'Contracts/enums';
export default class CreateDatasetValidator {
constructor(protected ctx: HttpContextContract) {}
/*
* Define schema to validate the "shape", "type", "formatting" and "integrity" of data.
*
* For example:
* 1. The username must be of data type string. But then also, it should
* not contain special characters or numbers.
* ```
* schema.string({}, [ rules.alpha() ])
* ```
*
* 2. The email must be of data type string, formatted as a valid
* email. But also, not used by any other user.
* ```
* schema.string({}, [
* rules.email(),
* rules.unique({ table: 'users', column: 'email' }),
* ])
* ```
*/
public schema = schema.create({
// first step
language: schema.string({ trim: true }, [
rules.regex(/^[a-zA-Z0-9-_]+$/), //Must be alphanumeric with hyphens or underscores
]),
licenses: schema.array([rules.minLength(1)]).members(schema.number()), // define at least one license for the new dataset
rights: schema.string([rules.equalTo('true')]),
// second step
type: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
creating_corporation: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
titles: schema.array([rules.minLength(1)]).members(
schema.object().members({
value: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
type: schema.enum(Object.values(TitleTypes)),
language: schema.string({ trim: true }, [
rules.minLength(2),
rules.maxLength(255),
rules.translatedLanguage('/language', 'type'),
]),
}),
),
descriptions: schema.array([rules.minLength(1)]).members(
schema.object().members({
value: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
type: schema.enum(Object.values(DescriptionTypes)),
language: schema.string({ trim: true }, [
rules.minLength(2),
rules.maxLength(255),
rules.translatedLanguage('/language', 'type'),
]),
}),
),
authors: schema.array([rules.minLength(1)]).members(schema.object().members({ email: schema.string({ trim: true }) })),
contributors: schema.array.optional().members(
schema.object().members({
email: schema.string({ trim: true }),
pivot_contributor_type: schema.enum(Object.keys(ContributorTypes)),
}),
),
// third step
project_id: schema.number.optional(),
embargo_date: schema.date.optional({ format: 'yyyy-MM-dd' }, [rules.after(10, 'days')]),
coverage: schema.object().members({
x_min: schema.number(),
x_max: schema.number(),
y_min: schema.number(),
y_max: schema.number(),
elevation_absolut: schema.number.optional(),
elevation_min: schema.number.optional([rules.requiredIfExists('elevation_max')]),
elevation_max: schema.number.optional([rules.requiredIfExists('elevation_min')]),
depth_absolut: schema.number.optional(),
depth_min: schema.number.optional([rules.requiredIfExists('depth_max')]),
depth_max: schema.number.optional([rules.requiredIfExists('depth_min')]),
}),
references: schema.array.optional([rules.uniqueArray('value')]).members(
schema.object().members({
value: schema.string({ trim: true }, [rules.minLength(3), rules.maxLength(255)]),
type: schema.enum(Object.values(ReferenceIdentifierTypes)),
relation: schema.enum(Object.values(RelationTypes)),
label: schema.string({ trim: true }, [rules.minLength(2), rules.maxLength(255)]),
}),
),
subjects: schema.array([rules.minLength(3), rules.uniqueArray('value')]).members(
schema.object().members({
value: schema.string({ trim: true }, [
rules.minLength(3),
rules.maxLength(255),
// rules.unique({ table: 'dataset_subjects', column: 'value' }),
]),
// type: schema.enum(Object.values(TitleTypes)),
language: schema.string({ trim: true }, [rules.minLength(2), rules.maxLength(255)]),
}),
),
// file: schema.file({
// size: '100mb',
// extnames: ['jpg', 'gif', 'png'],
// }),
files: schema.array([rules.minLength(1)]).members(
schema.file({
size: '100mb',
extnames: ['jpg', 'gif', 'png', 'tif', 'pdf'],
}),
),
// upload: schema.object().members({
// label: schema.string({ trim: true }, [rules.maxLength(255)]),
// // label: schema.string({ trim: true }, [
// // // rules.minLength(3),
// // // rules.maxLength(255),
// // ]),
// }),
});
/**
* Custom messages for validation failures. You can make use of dot notation `(.)`
* for targeting nested fields and array expressions `(*)` for targeting all
* children of an array. For example:
*
* {
* 'profile.username.required': 'Username is required',
* 'scores.*.number': 'Define scores as valid numbers'
* }
*
*/
public messages: CustomMessages = {
'minLength': '{{ field }} must be at least {{ options.minLength }} characters long',
'maxLength': '{{ field }} must be less then {{ options.maxLength }} characters long',
'required': '{{ field }} is required',
'unique': '{{ field }} must be unique, and this value is already taken',
// 'confirmed': '{{ field }} is not correct',
'licenses.minLength': 'at least {{ options.minLength }} permission must be defined',
'licenses.*.number': 'Define roles as valid numbers',
'rights.equalTo': 'you must agree to continue',
'titles.0.value.minLength': 'Main Title must be at least {{ options.minLength }} characters long',
'titles.0.value.required': 'Main Title is required',
'titles.*.value.required': 'Additional title is required, if defined',
'titles.*.type.required': 'Additional title type is required',
'titles.*.language.required': 'Additional title language is required',
'titles.*.language.translatedLanguage': 'The language of the translated title must be different from the language of the dataset',
'descriptions.0.value.minLength': 'Main Abstract must be at least {{ options.minLength }} characters long',
'descriptions.0.value.required': 'Main Abstract is required',
'descriptions.*.value.required': 'Additional description is required, if defined',
'descriptions.*.type.required': 'Additional description type is required',
'descriptions.*.language.required': 'Additional description language is required',
'descriptions.*.language.translatedLanguage':
'The language of the translated description must be different from the language of the dataset',
'authors.minLength': 'at least {{ options.minLength }} author must be defined',
'contributors.*.pivot_contributor_type.required': 'contributor type is required, if defined',
'after': `{{ field }} must be older than ${dayjs().add(10, 'day')}`,
'subjects.minLength': 'at least {{ options.minLength }} keywords must be defined',
'subjects.uniqueArray': 'The {{ options.array }} array must have unique values based on the {{ options.field }} attribute.',
'subjects.*.value.required': 'keyword value is required',
'subjects.*.value.minLength': 'keyword value must be at least {{ options.minLength }} characters long',
'subjects.*.type.required': 'keyword type is required',
'subjects.*.language.required': 'language of keyword is required',
'references.*.value.required': 'Additional reference value is required, if defined',
'references.*.type.required': 'Additional reference identifier type is required',
'references.*.relation.required': 'Additional reference relation type is required',
'references.*.label.required': 'Additional reference label is required',
'files.minLength': 'At least {{ options.minLength }} file upload is required.',
'files.*.size': 'file size is to big',
'files.extnames': 'file extension is not supported',
};
}

View file

@ -1,69 +0,0 @@
import { schema, CustomMessages, rules } from '@ioc:Adonis/Core/Validator';
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
export default class CreateRoleValidator {
constructor(protected ctx: HttpContextContract) {}
/*
* Define schema to validate the "shape", "type", "formatting" and "integrity" of data.
*
* For example:
* 1. The username must be of data type string. But then also, it should
* not contain special characters or numbers.
* ```
* schema.string({}, [ rules.alpha() ])
* ```
*
* 2. The email must be of data type string, formatted as a valid
* email. But also, not used by any other user.
* ```
* schema.string({}, [
* rules.email(),
* rules.unique({ table: 'users', column: 'email' }),
* ])
* ```
*/
public schema = schema.create({
name: schema.string({ trim: true }, [
rules.minLength(3),
rules.maxLength(255),
rules.unique({ table: 'roles', column: 'name' }),
rules.regex(/^[a-zA-Z0-9-_]+$/), //Must be alphanumeric with hyphens or underscores
]),
display_name: schema.string.optional({ trim: true }, [
rules.minLength(3),
rules.maxLength(255),
rules.unique({ table: 'roles', column: 'name' }),
rules.regex(/^[a-zA-Z0-9-_]+$/), //Must be alphanumeric with hyphens or underscores
]),
description: schema.string.optional({}, [rules.minLength(3), rules.maxLength(255)]),
permissions: schema.array([rules.minLength(1)]).members(schema.number()), // define at least one role for the new role
});
// emails: schema
// .array([rules.minLength(1)])
// .members(
// schema.object().members({ email: schema.string({}, [rules.email()]) })
// ),
/**
* Custom messages for validation failures. You can make use of dot notation `(.)`
* for targeting nested fields and array expressions `(*)` for targeting all
* children of an array. For example:
*
* {
* 'profile.username.required': 'Username is required',
* 'scores.*.number': 'Define scores as valid numbers'
* }
*
*/
public messages: CustomMessages = {
'minLength': '{{ field }} must be at least {{ options.minLength }} characters long',
'maxLength': '{{ field }} must be less then {{ options.maxLength }} characters long',
'required': '{{ field }} is required',
'unique': '{{ field }} must be unique, and this value is already taken',
'confirmed': '{{ field }} is not correct',
'permissions.minLength': 'at least {{ options.minLength }} permission must be defined',
'permissions.*.number': 'Define roles as valid numbers',
};
}

View file

@ -1,64 +0,0 @@
import { schema, CustomMessages, rules } from '@ioc:Adonis/Core/Validator';
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
export default class CreateUserValidator {
constructor(protected ctx: HttpContextContract) {}
/*
* Define schema to validate the "shape", "type", "formatting" and "integrity" of data.
*
* For example:
* 1. The username must be of data type string. But then also, it should
* not contain special characters or numbers.
* ```
* schema.string({}, [ rules.alpha() ])
* ```
*
* 2. The email must be of data type string, formatted as a valid
* email. But also, not used by any other user.
* ```
* schema.string({}, [
* rules.email(),
* rules.unique({ table: 'users', column: 'email' }),
* ])
* ```
*/
public schema = schema.create({
login: schema.string({ trim: true }, [
rules.minLength(3),
rules.maxLength(50),
rules.unique({ table: 'accounts', column: 'login' }),
rules.regex(/^[a-zA-Z0-9-_]+$/), //Must be alphanumeric with hyphens or underscores
]),
email: schema.string({}, [rules.email(), rules.unique({ table: 'accounts', column: 'email' })]),
password: schema.string([rules.confirmed(), rules.minLength(6)]),
roles: schema.array([rules.minLength(1)]).members(schema.number()), // define at least one role for the new user
});
// emails: schema
// .array([rules.minLength(1)])
// .members(
// schema.object().members({ email: schema.string({}, [rules.email()]) })
// ),
/**
* Custom messages for validation failures. You can make use of dot notation `(.)`
* for targeting nested fields and array expressions `(*)` for targeting all
* children of an array. For example:
*
* {
* 'profile.username.required': 'Username is required',
* 'scores.*.number': 'Define scores as valid numbers'
* }
*
*/
public messages: CustomMessages = {
'minLength': '{{ field }} must be at least {{ options.minLength }} characters long',
'maxLength': '{{ field }} must be less then {{ options.maxLength }} characters long',
'required': '{{ field }} is required',
'unique': '{{ field }} must be unique, and this value is already taken',
'confirmed': '{{ field }} is not correct',
'roles.minLength': 'at least {{ options.minLength }} role must be defined',
'roles.*.number': 'Define roles as valid numbers',
};
}

View file

@ -1,97 +0,0 @@
import { schema, CustomMessages, rules } from '@ioc:Adonis/Core/Validator';
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
// import { Request } from '@adonisjs/core/build/standalone';
export default class UpdateRoleValidator {
protected ctx: HttpContextContract;
public schema;
constructor(ctx: HttpContextContract) {
this.ctx = ctx;
this.schema = this.createSchema();
}
// public get schema() {
// return this._schema;
// }
private createSchema() {
return schema.create({
name: schema.string({ trim: true }, [
rules.minLength(3),
rules.maxLength(50),
rules.unique({
table: 'roles',
column: 'name',
whereNot: { id: this.ctx?.params.id },
}),
rules.regex(/^[a-zA-Z0-9-_]+$/),
//Must be alphanumeric with hyphens or underscores
]),
description: schema.string.optional({}, [rules.minLength(3), rules.maxLength(255)]),
permissions: schema.array([rules.minLength(1)]).members(schema.number()), // define at least one permission for the new role
});
}
/*
* Define schema to validate the "shape", "type", "formatting" and "integrity" of data.
*
* For example:
* 1. The username must be of data type string. But then also, it should
* not contain special characters or numbers.
* ```
* schema.string({}, [ rules.alpha() ])
* ```
*
* 2. The email must be of data type string, formatted as a valid
* email. But also, not used by any other user.
* ```
* schema.string({}, [
* rules.email(),
* rules.unique({ table: 'users', column: 'email' }),
* ])
* ```
*/
// public refs = schema.refs({
// id: this.ctx.params.id
// })
// public schema = schema.create({
// login: schema.string({ trim: true }, [
// rules.minLength(3),
// rules.maxLength(50),
// rules.unique({
// table: 'accounts',
// column: 'login',
// // whereNot: { id: this.refs.id }
// whereNot: { id: this.ctx?.params.id },
// }),
// // rules.regex(/^[a-zA-Z0-9-_]+$/),
// //Must be alphanumeric with hyphens or underscores
// ]),
// email: schema.string({}, [rules.email(), rules.unique({ table: 'accounts', column: 'email' })]),
// password: schema.string.optional([rules.confirmed(), rules.minLength(6)]),
// roles: schema.array([rules.minLength(1)]).members(schema.number()), // define at least one role for the new user
// });
/**
* Custom messages for validation failures. You can make use of dot notation `(.)`
* for targeting nested fields and array expressions `(*)` for targeting all
* children of an array. For example:
*
* {
* 'profile.username.required': 'Username is required',
* 'scores.*.number': 'Define scores as valid numbers'
* }
*
*/
public messages: CustomMessages = {
'minLength': '{{ field }} must be at least {{ options.minLength }} characters long',
'maxLength': '{{ field }} must be less then {{ options.maxLength }} characters long',
'required': '{{ field }} is required',
'unique': '{{ field }} must be unique, and this value is already taken',
'permissions.minLength': 'at least {{ options.minLength }} permission must be defined',
'permissions.*.number': 'Define permissions as valid numbers',
};
}

View file

@ -1,103 +0,0 @@
import { schema, CustomMessages, rules } from '@ioc:Adonis/Core/Validator';
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
// import { Request } from '@adonisjs/core/build/standalone';
export default class UpdateUserValidator {
protected ctx: HttpContextContract;
public schema;
constructor(ctx: HttpContextContract) {
this.ctx = ctx;
this.schema = this.createSchema();
}
// public get schema() {
// return this._schema;
// }
private createSchema() {
return schema.create({
login: schema.string({ trim: true }, [
rules.minLength(3),
rules.maxLength(50),
rules.unique({
table: 'accounts',
column: 'login',
// whereNot: { id: this.refs.id }
whereNot: { id: this.ctx?.params.id },
}),
// rules.regex(/^[a-zA-Z0-9-_]+$/),
//Must be alphanumeric with hyphens or underscores
]),
email: schema.string({}, [
rules.email(),
rules.unique({ table: 'accounts', column: 'email', whereNot: { id: this.ctx?.params.id } }),
]),
password: schema.string.optional([rules.confirmed(), rules.minLength(6)]),
roles: schema.array.optional([rules.minLength(1)]).members(schema.number()), // define at least one role for the new user
});
}
/*
* Define schema to validate the "shape", "type", "formatting" and "integrity" of data.
*
* For example:
* 1. The username must be of data type string. But then also, it should
* not contain special characters or numbers.
* ```
* schema.string({}, [ rules.alpha() ])
* ```
*
* 2. The email must be of data type string, formatted as a valid
* email. But also, not used by any other user.
* ```
* schema.string({}, [
* rules.email(),
* rules.unique({ table: 'users', column: 'email' }),
* ])
* ```
*/
// public refs = schema.refs({
// id: this.ctx.params.id
// })
// public schema = schema.create({
// login: schema.string({ trim: true }, [
// rules.minLength(3),
// rules.maxLength(50),
// rules.unique({
// table: 'accounts',
// column: 'login',
// // whereNot: { id: this.refs.id }
// whereNot: { id: this.ctx?.params.id },
// }),
// // rules.regex(/^[a-zA-Z0-9-_]+$/),
// //Must be alphanumeric with hyphens or underscores
// ]),
// email: schema.string({}, [rules.email(), rules.unique({ table: 'accounts', column: 'email' })]),
// password: schema.string.optional([rules.confirmed(), rules.minLength(6)]),
// roles: schema.array([rules.minLength(1)]).members(schema.number()), // define at least one role for the new user
// });
/**
* Custom messages for validation failures. You can make use of dot notation `(.)`
* for targeting nested fields and array expressions `(*)` for targeting all
* children of an array. For example:
*
* {
* 'profile.username.required': 'Username is required',
* 'scores.*.number': 'Define scores as valid numbers'
* }
*
*/
public messages: CustomMessages = {
'minLength': '{{ field }} must be at least {{ options.minLength }} characters long',
'maxLength': '{{ field }} must be less then {{ options.maxLength }} characters long',
'required': '{{ field }} is required',
'unique': '{{ field }} must be unique, and this value is already taken',
'confirmed': '{{ field }} is not correct',
'roles.minLength': 'at least {{ options.minLength }} role must be defined',
'roles.*.number': 'Define roles as valid numbers',
};
}

View file

@ -0,0 +1,54 @@
// app/controllers/projects_controller.ts
import Project from '#models/project';
import type { HttpContext } from '@adonisjs/core/http';
import { createProjectValidator, updateProjectValidator } from '#validators/project';
export default class ProjectsController {
// GET /settings/projects
public async index({ inertia, auth }: HttpContext) {
const projects = await Project.all();
// return inertia.render('Admin/Project/Index', { projects });
return inertia.render('Admin/Project/Index', {
projects: projects,
can: {
edit: await auth.user?.can(['settings']),
create: await auth.user?.can(['settings']),
},
});
}
// GET /settings/projects/create
public async create({ inertia }: HttpContext) {
return inertia.render('Admin/Project/Create');
}
// POST /settings/projects
public async store({ request, response, session }: HttpContext) {
// Validate the request data
const data = await request.validateUsing(createProjectValidator);
await Project.create(data);
session.flash('success', 'Project created successfully');
return response.redirect().toRoute('settings.project.index');
}
// GET /settings/projects/:id/edit
public async edit({ params, inertia }: HttpContext) {
const project = await Project.findOrFail(params.id);
return inertia.render('Admin/Project/Edit', { project });
}
// PUT /settings/projects/:id
public async update({ params, request, response, session }: HttpContext) {
const project = await Project.findOrFail(params.id);
// Validate the request data
const data = await request.validateUsing(updateProjectValidator);
await project.merge(data).save();
session.flash('success', 'Project updated successfully');
return response.redirect().toRoute('settings.project.index');
}
}

View file

@ -0,0 +1,12 @@
class DoiClientException extends Error {
public status: number;
public message: string;
constructor(status: number, message: string) {
super(message);
this.status = status;
this.message = message;
}
}
export default DoiClientException;

View file

@ -1,5 +1,5 @@
import { StatusCodes } from 'http-status-codes';
import HTTPException from './HttpException';
import HTTPException from './HttpException.js';
class InternalServerErrorException extends HTTPException {
constructor(message?: string) {

View file

@ -1,4 +1,5 @@
import { Exception } from '@adonisjs/core/build/standalone';
import { Exception } from "@adonisjs/core/exceptions";
import { HttpContext } from "@adonisjs/core/http";
/*
|--------------------------------------------------------------------------
@ -23,21 +24,21 @@ export default class InvalidCredentialException extends Exception {
* Unable to find user
*/
public static invalidUid() {
const error = new this('User not found', 400, 'E_INVALID_AUTH_UID');
const error = new this('User not found', {status: 400, code: 'E_INVALID_AUTH_UID'});
return error;
}
/**
* Invalid user password
*/
public static invalidPassword() {
const error = new this('Password mis-match', 400, 'E_INVALID_AUTH_PASSWORD');
const error = new this('Password mis-match', {status: 400, code: 'E_INVALID_AUTH_PASSWORD'});
return error;
}
/**
* Flash error message and redirect the user back
*/
private respondWithRedirect(error, ctx) {
private respondWithRedirect(error: any, ctx: HttpContext) {
// if (!ctx.session) {
// return ctx.response.status(this.status).send(this.responseText);
// }
@ -59,7 +60,7 @@ export default class InvalidCredentialException extends Exception {
* Handle this exception by itself
*/
public handle(error, ctx) {
public handle(error: any, ctx: HttpContext) {
// return response.status(403).view.render("errors/unauthorized", {
// error: error,
// });

View file

@ -1,6 +1,6 @@
import { StatusCodes } from 'http-status-codes';
// import HTTPException from './HttpException';
import { OaiErrorCodes } from './OaiErrorCodes';
import { OaiErrorCodes } from './OaiErrorCodes.js';
export class ErrorCode {
public static readonly Unauthenticated = 'Unauthenticated';

View file

@ -0,0 +1,43 @@
// import { Exception } from '@adonisjs/core/exceptions'
import { HttpContext, ExceptionHandler } from '@adonisjs/core/http';
export default class DbHandlerException extends ExceptionHandler {
// constructor() {
// super(Logger)
// }
async handle(error: any, ctx: HttpContext) {
// Check for AggregateError type
if (error.type === 'AggregateError' && error.aggregateErrors) {
const dbErrors = error.aggregateErrors.some((err: any) => err.code === 'ECONNREFUSED' && err.port === 5432);
if (dbErrors) {
return ctx.response.status(503).json({
status: 'error',
message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
details: {
code: error.code,
type: error.type,
ports: error.aggregateErrors.map((err: any) => ({
port: err.port,
address: err.address,
})),
},
});
}
}
// Handle simple ECONNREFUSED errors
if (error.code === 'ECONNREFUSED') {
return ctx.response.status(503).json({
status: 'error',
message: 'Database connection failed. Please ensure PostgreSQL is running.',
code: error.code,
});
}
return super.handle(error, ctx);
}
static status = 500;
}

214
app/exceptions/handler.ts Normal file
View file

@ -0,0 +1,214 @@
/*
|--------------------------------------------------------------------------
| Http Exception Handler
|--------------------------------------------------------------------------
|
| AdonisJs will forward all exceptions occurred during an HTTP request to
| the following class. You can learn more about exception handling by
| reading docs.
|
| The exception handler extends a base `HttpExceptionHandler` which is not
| mandatory, however it can do lot of heavy lifting to handle the errors
| properly.
|
*/
import app from '@adonisjs/core/services/app';
import { HttpContext, ExceptionHandler } from '@adonisjs/core/http';
// import logger from '@adonisjs/core/services/logger';
import type { StatusPageRange, StatusPageRenderer } from '@adonisjs/core/types/http';
export default class HttpExceptionHandler extends ExceptionHandler {
/**
* In debug mode, the exception handler will display verbose errors
* with pretty printed stack traces.
*/
protected debug = !app.inProduction;
/**
* Status pages are used to display a custom HTML pages for certain error
* codes. You might want to enable them in production only, but feel
* free to enable them in development as well.
*/
protected renderStatusPages = true; //app.inProduction;
/**
* Status pages is a collection of error code range and a callback
* to return the HTML contents to send as a response.
*/
// protected statusPages: Record<StatusPageRange, StatusPageRenderer> = {
// '401..403': (error, { view }) => {
// return view.render('./errors/unauthorized', { error });
// },
// '404': (error, { view }) => {
// return view.render('./errors/not-found', { error });
// },
// '500..599': (error, { view }) => {
// return view.render('./errors/server-error', { error });
// },
// };
protected statusPages: Record<StatusPageRange, StatusPageRenderer> = {
'404': (error, { inertia }) => {
return inertia.render('Errors/ServerError', {
error: error.message,
code: error.status,
});
},
'401..403': async (error, { inertia }) => {
// session.flash('errors', error.message);
return inertia.render('Errors/ServerError', {
error: error.message,
code: error.status,
});
},
// '500': (error, { inertia }) => {
// return inertia.render('Errors/postgres_error', {
// status: 'error',
// message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
// details: {
// code: error.code,
// type: error.status,
// ports: error.errors.map((err: any) => ({
// port: err.port,
// address: err.address,
// })),
// },
// });
// },
'500..599': (error, { inertia }) => {
if (error.code === 'ECONNREFUSED') {
const dbErrors = error.errors.some((err: any) => err.code === 'ECONNREFUSED' && err.port === 5432);
if (dbErrors) {
return inertia.render('Errors/postgres_error', {
status: 'error',
message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
details: {
code: error.code,
type: error.status,
ports: error.errors.map((err: any) => ({
port: err.port,
address: err.address,
})),
},
});
}
} else {
return inertia.render('Errors/ServerError', {
error: error.message,
code: error.status,
});
}
},
};
// constructor() {
// super(logger);
// }
public async handle(error: any, ctx: HttpContext) {
const { response, request, session, inertia } = ctx;
/**
* Handle failed authentication attempt
*/
// if (['E_INVALID_AUTH_PASSWORD', 'E_INVALID_AUTH_UID'].includes(error.code)) {
// session.flash('errors', { login: error.message });
// return response.redirect('/login');
// }
// if ([401].includes(error.status)) {
// session.flash('errors', { login: error.message });
// return response.redirect('/dashboard');
// }
// Handle Axios errors
if (error.code === 'ECONNREFUSED') {
const dbErrors = error.errors.some((err: any) => err.code === 'ECONNREFUSED' && err.port === 5432);
if (dbErrors) {
// return ctx.response.status(503).json({
// status: 'error',
// message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
// details: {
// code: error.code,
// type: error.status,
// ports: error.errors.map((err: any) => ({
// port: err.port,
// address: err.address,
// })),
// },
// });
// return inertia.render('Errors/postgres_error', {
// status: 'error',
// message: 'PostgreSQL database connection failed. Please ensure the database service is running.',
// details: {
// code: error.code,
// type: error.status,
// ports: error.errors.map((err: any) => ({
// port: err.port,
// address: err.address,
// })),
// },
// });
}
}
// Handle simple ECONNREFUSED errors
// if (error.code === 'ECONNREFUSED') {
// return ctx.response.status(503).json({
// status: 'error',
// message: 'Database connection failed. Please ensure PostgreSQL is running.',
// code: error.code,
// });
// }
// https://github.com/inertiajs/inertia-laravel/issues/56
// let test = response.getStatus(); //200
// let header = request.header('X-Inertia'); // true
// if (request.header('X-Inertia') && [500, 503, 404, 403, 401, 200].includes(response.getStatus())) {
if (request.header('X-Inertia') && [422].includes(error.status)) {
// session.flash('errors', error.messages.errors);
session.flash('errors', error.messages);
return response.redirect().back();
// return inertia.render('errors/server_error', {
// return inertia.render('errors/server_error', {
// // status: response.getStatus(),
// error: error,
// });
// ->toResponse($request)
// ->setStatusCode($response->status());
}
// Handle simple ECONNREFUSED errors
// if (error.code === 'ECONNREFUSED') {
// return ctx.response.status(503).json({
// status: 'error',
// message: 'Database connection failed. Please ensure PostgreSQL is running.',
// code: error.code,
// });
// }
// Dynamically change the error templates based on the absence of X-Inertia header
// if (!ctx.request.header('X-Inertia')) {
// this.statusPages = {
// '401..403': (error, { inertia }) => inertia.render('Errors/ServerError', { error: error.message, code: error.status }),
// '404': (error, { inertia }) => inertia.render('Errors/ServerError', { error: error.message, code: error.status }),
// '500..599': (error, { inertia }) => inertia.render('Errors/ServerError', { error: error.message, code: error.status }),
// };
// }
/**
* Forward rest of the exceptions to the parent class
*/
return super.handle(error, ctx);
}
/**
* The method is used to report error to the logging service or
* the a third party error monitoring service.
*
* @note You should not attempt to send a response from this method.
*/
async report(error: unknown, ctx: HttpContext) {
return super.report(error, ctx);
}
}

View file

@ -1,11 +1,12 @@
import { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
import Config from '@ioc:Adonis/Core/Config';
import Database from '@ioc:Adonis/Lucid/Database';
import User from 'App/Models/User';
import { HttpContext } from '@adonisjs/core/http';
// import Config from '@ioc:Adonis/Core/Config';
import config from '@adonisjs/core/services/config'
import db from '@adonisjs/lucid/services/db';
import User from '#models/user';
// import { Exception } from '@adonisjs/core/build/standalone'
const roleTable = Config.get('rolePermission.role_table', 'roles');
const userRoleTable = Config.get('rolePermission.user_role_table', 'user_roles');
const roleTable = config.get('rolePermission.role_table', 'roles');
const userRoleTable = config.get('rolePermission.user_role_table', 'user_roles');
/**
* Role authentication to check if user has any of the specified roles
@ -16,7 +17,7 @@ export default class Is {
/**
* Handle request
*/
public async handle({ auth, response }: HttpContextContract, next: () => Promise<void>, roleNames: string[]) {
public async handle({ auth, response }: HttpContext, next: () => Promise<void>, roleNames: string[]) {
/**
* Check if user is logged-in or not.
*/
@ -33,7 +34,8 @@ export default class Is {
// 401,
// "E_INVALID_AUTH_UID");
}
await next();
// await next();
return next()
}
private async checkHasRoles(user: User, roleNames: Array<string>): Promise<boolean> {
@ -46,7 +48,7 @@ export default class Is {
0: {
0: { roleCount },
},
} = await Database.rawQuery(
} = await db.rawQuery(
'SELECT count(`ur`.`id`) as roleCount FROM ' +
userRoleTable +
' ur INNER JOIN ' +

View file

@ -0,0 +1,25 @@
import type { HttpContext } from '@adonisjs/core/http'
import type { NextFn } from '@adonisjs/core/types/http'
import type { Authenticators } from '@adonisjs/auth/types'
/**
* Auth middleware is used authenticate HTTP requests and deny
* access to unauthenticated users.
*/
export default class AuthMiddleware {
/**
* The URL to redirect to, when authentication fails
*/
redirectTo = '/app/login'
async handle(
ctx: HttpContext,
next: NextFn,
options: {
guards?: (keyof Authenticators)[]
} = {}
) {
await ctx.auth.authenticateUsing(options.guards, { loginRoute: this.redirectTo })
return next()
}
}

View file

@ -1,13 +1,14 @@
import { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
import Config from '@ioc:Adonis/Core/Config';
import Database from '@ioc:Adonis/Lucid/Database';
import User from 'App/Models/User';
import { Exception } from '@adonisjs/core/build/standalone';
import { HttpContext } from '@adonisjs/core/http';
// import Config from '@ioc:Adonis/Core/Config';
import config from '@adonisjs/core/services/config';
import db from '@adonisjs/lucid/services/db';
import User from '#models/user';
import { Exception } from '@adonisjs/core/exceptions';
const permissionTable = Config.get('rolePermission.permission_table', 'permissions');
const rolePermissionTable = Config.get('rolePermission.role_permission_table', 'role_has_permissions');
const roleTable = Config.get('rolePermission.role_table', 'roles');
const userRoleTable = Config.get('rolePermission.user_role_table', 'link_accounts_roles');
const permissionTable = config.get('rolePermission.permission_table', 'permissions');
const rolePermissionTable = config.get('rolePermission.role_permission_table', 'role_has_permissions');
const roleTable = config.get('rolePermission.role_table', 'roles');
const userRoleTable = config.get('rolePermission.user_role_table', 'link_accounts_roles');
/**
* Permission authentication to check if user has any of the specified permissions
@ -18,7 +19,7 @@ export default class Can {
/**
* Handle request
*/
public async handle({ auth, response }: HttpContextContract, next: () => Promise<void>, permissionNames: string[]) {
public async handle({ auth, response }: HttpContext, next: () => Promise<void>, permissionNames: string[]) {
/**
* Check if user is logged-in
*/
@ -31,9 +32,10 @@ export default class Can {
// return response.unauthorized({
// error: `Doesn't have required role(s): ${permissionNames.join(',')}`,
// });
throw new Exception(`Doesn't have required permission(s): ${permissionNames.join(',')}`, 401);
throw new Exception(`Doesn't have required permission(s): ${permissionNames.join(',')}`, { status: 401 });
}
await next();
// await next();
return next();
}
private async checkHasPermissions(user: User, permissionNames: Array<string>): Promise<boolean> {
@ -66,7 +68,7 @@ export default class Can {
rows: {
0: { permissioncount },
},
} = await Database.rawQuery(
} = await db.rawQuery(
'SELECT count("p"."name") as permissionCount FROM ' +
roleTable +
' r INNER JOIN ' +

View file

@ -0,0 +1,19 @@
import { Logger } from '@adonisjs/core/logger';
import { HttpContext } from '@adonisjs/core/http';
import { NextFn } from '@adonisjs/core/types/http';
/**
* The container bindings middleware binds classes to their request
* specific value using the container resolver.
*
* - We bind "HttpContext" class to the "ctx" object
* - And bind "Logger" class to the "ctx.logger" object
*/
export default class ContainerBindingsMiddleware {
handle(ctx: HttpContext, next: NextFn) {
ctx.containerResolver.bindValue(HttpContext, ctx);
ctx.containerResolver.bindValue(Logger, ctx.logger);
return next();
}
}

View file

@ -0,0 +1,27 @@
import type { HttpContext } from '@adonisjs/core/http';
import type { NextFn } from '@adonisjs/core/types/http';
import type { Authenticators } from '@adonisjs/auth/types';
/**
* Guest middleware is used to deny access to routes that should
* be accessed by unauthenticated users.
*
* For example, the login page should not be accessible if the user
* is already logged-in
*/
export default class GuestMiddleware {
/**
* The URL to redirect to when user is logged-in
*/
redirectTo = '/';
async handle(ctx: HttpContext, next: NextFn, options: { guards?: (keyof Authenticators)[] } = {}) {
for (let guard of options.guards || [ctx.auth.defaultGuard]) {
if (await ctx.auth.use(guard).check()) {
return ctx.response.redirect(this.redirectTo, true);
}
}
return next();
}
}

View file

@ -0,0 +1,43 @@
/*
* This middleware class normalizes newlines in the request input data by replacing
* all occurrences of '\r\n' with '\n' recursively for strings, arrays, and objects.
*/
import type { HttpContext } from '@adonisjs/core/http';
import type { NextFn } from '@adonisjs/core/types/http';
export default class NormalizeNewlinesMiddleware {
async handle(ctx: HttpContext, next: NextFn) {
// Function to recursively normalize newlines
const normalizeNewlines = (input: any): any => {
if (typeof input === 'string') {
return input.replace(/\r\n/g, '\n');
} else if (Array.isArray(input)) {
return input.map((item) => normalizeNewlines(item));
} else if (typeof input === 'object' && input !== null) {
for (const key in input) {
input[key] = normalizeNewlines(input[key]);
}
return input;
}
return input;
};
/**
* Middleware logic goes here (before the next call)
*/
// console.log(ctx)
// Get all request input
const input = ctx.request.all();
// Normalize newlines in text inputs
const normalizedInput = normalizeNewlines(input);
// Replace request input with normalized data
ctx.request.updateBody(normalizedInput);
/**
* Call next method in the pipeline and return its output
*/
const output = await next();
return output;
}
}

View file

@ -1,23 +1,25 @@
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
import Database from '@ioc:Adonis/Lucid/Database';
import Config from '@ioc:Adonis/Core/Config';
import User from 'app/Models/User';
import { Exception } from '@adonisjs/core/build/standalone';
import type { HttpContext } from '@adonisjs/core/http';
import db from '@adonisjs/lucid/services/db';
import config from '@adonisjs/core/services/config';
import User from '#models/user';
import { Exception } from '@adonisjs/core/exceptions';
const roleTable = Config.get('rolePermission.role_table', 'roles');
const userRoleTable = Config.get('rolePermission.user_role_table', 'link_accounts_roles');
// const roleTable = Config.get('rolePermission.role_table', 'roles');
const roleTable = config.get('rolePermission.role_table', 'roles');
// const userRoleTable = Config.get('rolePermission.user_role_table', 'link_accounts_roles');
const userRoleTable = config.get('rolePermission.user_role_table', 'user_roles');
// node ace make:middleware role
export default class Role {
// .middleware(['auth', 'role:admin,moderator'])
public async handle({ auth, response }: HttpContextContract, next: () => Promise<void>, userRoles: string[]) {
public async handle({ auth, response }: HttpContext, next: () => Promise<void>, userRoles: string[]) {
// Check if user is logged-in or not.
// let expression = "";
// if (Array.isArray(args)) {
// expression = args.join(" || ");
// }
let user = await auth.user;
let user = auth.user as User;
if (!user) {
return response.unauthorized({ error: 'Must be logged in' });
}
@ -28,7 +30,7 @@ export default class Role {
// error: `Doesn't have required role(s): ${userRoles.join(',')}`,
// // error: `Doesn't have required role(s)`,
// });
throw new Exception(`Doesn't have required role(s): ${userRoles.join(',')}`, 401);
throw new Exception(`Doesn't have required role(s): ${userRoles.join(',')}`, { status: 401 });
}
// code for middleware goes here. ABOVE THE NEXT CALL
@ -62,7 +64,7 @@ export default class Role {
rows: {
0: { rolecount },
},
} = await Database.rawQuery(
} = await db.rawQuery(
'SELECT count("r"."id") as roleCount FROM ' +
roleTable +
' r INNER JOIN ' +

View file

@ -1,4 +1,4 @@
import type { HttpContextContract } from '@ioc:Adonis/Core/HttpContext';
import type { HttpContext } from '@adonisjs/core/http';
/**
* Silent auth middleware can be used as a global middleware to silent check
@ -10,7 +10,7 @@ export default class SilentAuthMiddleware {
/**
* Handle request
*/
public async handle({ auth }: HttpContextContract, next: () => Promise<void>) {
public async handle({ auth }: HttpContext, next: () => Promise<void>) {
/**
* Check if user is logged-in or not. If yes, then `ctx.auth.user` will be
* set to the instance of the currently logged in user.

View file

@ -0,0 +1,47 @@
import type { HttpContext } from '@adonisjs/core/http';
import type { NextFn } from '@adonisjs/core/types/http';
declare global {
function myFunction(): boolean;
var myVariable: number;
interface StardustData {
pathname?: string;
namedRoutes?: Record<string, string>;
}
var stardust: StardustData;
}
declare global {}
export default class StardustMiddleware {
async handle(ctx: HttpContext, next: NextFn): Promise<void> {
/**
* Middleware logic goes here (before the next call)
*/
// Check if the request is an API request
if (!ctx.request.url().startsWith('/api')) {
// Middleware logic for non-API requests
const { pathname } = new URL(ctx.request.completeUrl()); // '/', '/app/login'
globalThis.myFunction = () => {
return true;
};
globalThis.myVariable = 1;
globalThis.stardust = {
...globalThis.stardust,
pathname,
};
/**
* Call next method in the pipeline and return its output
*/
await next();
} else {
// Skip middleware for API requests
await next();
}
}
}

View file

@ -1,10 +1,11 @@
import { column, BaseModel, SnakeCaseNamingStrategy, belongsTo, BelongsTo } from '@ioc:Adonis/Lucid/Orm';
import Dataset from './Dataset';
import { column, BaseModel, SnakeCaseNamingStrategy, belongsTo } from '@adonisjs/lucid/orm';
import Dataset from './dataset.js';
import { builder, create } from 'xmlbuilder2';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces';
import Database from '@ioc:Adonis/Lucid/Database';
import dayjs from 'dayjs';
import { XMLBuilder } from 'xmlbuilder2/lib/interfaces.js';
import db from '@adonisjs/lucid/services/db';
import { DateTime } from 'luxon';
import type { BelongsTo } from '@adonisjs/lucid/types/relations';
import logger from '@adonisjs/core/services/logger';
export default class DocumentXmlCache extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();
@ -66,33 +67,38 @@ export default class DocumentXmlCache extends BaseModel {
}
/**
* Check if a dataset in a specific xml version is already cached or not.
* Check if a valid (non-stale) cache entry exists
* Cache is valid only if it was created AFTER the dataset's last modification
*
* @param mixed datasetId
* @param mixed serverDateModified
* @returns {Promise<boolean>} Returns true on cached hit else false.
* @param datasetId - The dataset ID to check
* @param datasetServerDateModified - The dataset's last modification timestamp
* @returns true if valid cache exists, false otherwise
*/
// public static async hasValidEntry(datasetId: number, datasetServerDateModified: DateTime): Promise<boolean> {
// // const formattedDate = dayjs(datasetServerDateModified).format('YYYY-MM-DD HH:mm:ss');
// const query = Database.from(this.table)
// .where('document_id', datasetId)
// .where('server_date_modified', '2023-08-17 16:51:03')
// .first();
// const row = await query;
// return !!row;
// }
// Assuming 'DocumentXmlCache' has a table with a 'server_date_modified' column in your database
public static async hasValidEntry(datasetId: number, datasetServerDateModified: DateTime): Promise<boolean> {
const serverDateModifiedString: string = datasetServerDateModified.toFormat('yyyy-MM-dd HH:mm:ss'); // Convert DateTime to ISO string
const query = Database.from(this.table)
const row = await db
.from(this.table)
.where('document_id', datasetId)
.where('server_date_modified', '>=', serverDateModifiedString) // Check if server_date_modified is newer or equal
.where('server_date_modified', '>', serverDateModifiedString) // Check if server_date_modified is newer or equal
.first();
const row = await query;
return !!row;
const isValid = !!row;
if (isValid) {
logger.debug(`Valid cache found for dataset ${datasetId}`);
} else {
logger.debug(`No valid cache for dataset ${datasetId} (dataset modified: ${serverDateModifiedString})`);
}
return isValid;
}
/**
* Invalidate (delete) cache entry
*/
public async invalidate(): Promise<void> {
await this.delete();
logger.debug(`Invalidated cache for document ${this.document_id}`);
}
}

33
app/models/appconfig.ts Normal file
View file

@ -0,0 +1,33 @@
import BaseModel from './base_model.js';
import { column } from '@adonisjs/lucid/orm';
export default class AppConfig extends BaseModel {
public static table = 'appconfigs'; // Specify the table name if it differs from the model name
@column({ isPrimary: true })
public id: number;
@column()
public appid: string;
@column()
public configkey: string;
@column()
public configvalue: string | null;
@column()
public type: number;
@column()
public lazy: number;
// async function setConfig(key: string, value: string) {
// await this.updateOrCreate({ key }, { value })
// }
// async function getConfig(key: string) {
// const config = await this.findBy('key', key)
// return config ? config.value : null
// }
}

51
app/models/backup_code.ts Normal file
View file

@ -0,0 +1,51 @@
import BaseModel from './base_model.js';
import { column, SnakeCaseNamingStrategy, belongsTo } from '@adonisjs/lucid/orm';
import User from './user.js';
import type { BelongsTo } from '@adonisjs/lucid/types/relations';
import db from '@adonisjs/lucid/services/db';
import hash from '@adonisjs/core/services/hash';
export default class BackupCode extends BaseModel {
public static table = 'backupcodes';
public static namingStrategy = new SnakeCaseNamingStrategy();
@column({
isPrimary: true,
})
public id: number;
@column({})
public user_id: number;
@column({
// serializeAs: null,
// consume: (value: string) => (value ? JSON.parse(encryption.decrypt(value) ?? '{}') : null),
// prepare: (value: string) => encryption.encrypt(JSON.stringify(value)),
})
public code: string;
@column({})
public used: boolean;
@belongsTo(() => User, {
foreignKey: 'user_id',
})
public user: BelongsTo<typeof User>;
// public static async getBackupCodes(user: User): Promise<BackupCode[]> {
// return await db.from(this.table).select('id', 'user_id', 'code', 'used').where('user_id', user.id);
// }
public static async deleteCodes(user: User): Promise<void> {
await db.from(this.table).where('user_id', user.id).delete();
}
public static async deleteCodesByUserId(uid: string): Promise<void> {
await db.from(this.table).where('user_id', uid).delete();
}
// Method to verify password
public async verifyCode(plainCode: string) {
return await hash.verify(this.code, plainCode);
}
}

View file

@ -1,4 +1,5 @@
import { BaseModel as LucidBaseModel } from '@ioc:Adonis/Lucid/Orm';
// import { BaseModel as LucidBaseModel } from '@adonisjs/lucid/orm';
import { BaseModel as LucidBaseModel } from '@adonisjs/lucid/orm';
// import { ManyToManyQueryClient } from '@ioc:Adonis/Lucid/Orm';
// export class CustomManyToManyQueryClient extends ManyToManyQueryClient {
@ -13,7 +14,6 @@ import { BaseModel as LucidBaseModel } from '@ioc:Adonis/Lucid/Orm';
// }
// }
/**
* Helper to find if value is a valid Object or
* not
@ -22,7 +22,7 @@ export function isObject(value: any): boolean {
return value !== null && typeof value === 'object' && !Array.isArray(value);
}
export default class BaseModel extends LucidBaseModel {
export default class BaseModel extends LucidBaseModel {
/**
* When `fill` method is called, then we may have a situation where it
* removed the values which exists in `original` and hence the dirty
@ -30,6 +30,9 @@ export default class BaseModel extends LucidBaseModel {
*/
// private fillInvoked: boolean = false;
// [key: string]: any;
public static fillable: string[] = [];
public fill(attributes: any, allowExtraProperties: boolean = false): this {
@ -48,7 +51,7 @@ export default class BaseModel extends LucidBaseModel {
// this.fillInvoked = true;
return this;
}
/**
* Merge bulk attributes with existing attributes.
*
@ -117,9 +120,10 @@ export default class BaseModel extends LucidBaseModel {
return this;
}
}
// export class DatasetRelatedBaseModel extends LucidBaseModel {
// public dataset: BelongsTo<typeof Dataset>;
// }

View file

@ -1,6 +1,9 @@
import { column, SnakeCaseNamingStrategy, manyToMany, ManyToMany } from '@ioc:Adonis/Lucid/Orm';
import Dataset from './Dataset';
import BaseModel from './BaseModel';
import { column, SnakeCaseNamingStrategy, manyToMany, belongsTo } from '@adonisjs/lucid/orm';
import Dataset from './dataset.js';
import BaseModel from './base_model.js';
import CollectionRole from './collection_role.js';
import type { ManyToMany } from "@adonisjs/lucid/types/relations";
import type { BelongsTo } from "@adonisjs/lucid/types/relations";
export default class Collection extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();
@ -43,4 +46,9 @@ export default class Collection extends BaseModel {
pivotTable: 'link_documents_collections',
})
public datasets: ManyToMany<typeof Dataset>;
@belongsTo(() => CollectionRole, {
foreignKey: 'role_id',
})
public collectionRole: BelongsTo<typeof CollectionRole>;
}

View file

@ -0,0 +1,39 @@
import { column, SnakeCaseNamingStrategy, hasMany } from '@adonisjs/lucid/orm';
import BaseModel from './base_model.js';
import Collection from './collection.js';
import type { HasMany } from "@adonisjs/lucid/types/relations";
export default class CollectionRole extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();
public static primaryKey = 'id';
public static table = 'collections_roles';
public static fillable: string[] = ['name', 'oai_name', 'visible'];
@column({
isPrimary: true,
})
public id: number;
@column({})
public name: string;
@column({})
public oai_name?: string;
@column({})
public position: number;
@column({})
public visible: boolean;
@column({})
public visible_frontdoor: boolean;
@column({})
public visible_oai: boolean;
@hasMany(() => Collection, {
foreignKey: 'role_id',
})
public collections: HasMany<typeof Collection>;
}

View file

@ -1,7 +1,8 @@
import { column, SnakeCaseNamingStrategy, belongsTo, BelongsTo } from '@ioc:Adonis/Lucid/Orm';
import { column, SnakeCaseNamingStrategy, belongsTo } from '@adonisjs/lucid/orm';
import { DateTime } from 'luxon';
import Dataset from './Dataset';
import BaseModel from './BaseModel';
import Dataset from './dataset.js';
import BaseModel from './base_model.js';
import type { BelongsTo } from "@adonisjs/lucid/types/relations";
export default class Coverage extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();

View file

@ -2,31 +2,31 @@ import {
column,
SnakeCaseNamingStrategy,
manyToMany,
ManyToMany,
belongsTo,
BelongsTo,
hasMany,
HasMany,
computed,
hasOne,
HasOne,
} from '@ioc:Adonis/Lucid/Orm';
hasOne
} from '@adonisjs/lucid/orm';
import { DateTime } from 'luxon';
import dayjs from 'dayjs';
import Person from './Person';
import User from './User';
import Title from './Title';
import Description from './Description';
import License from './License';
import Subject from './Subject';
import File from './File';
import Coverage from './Coverage';
import DatasetReference from './DatasetReference';
import Collection from './Collection';
import DatasetIdentifier from './DatasetIdentifier';
import Project from './Project';
import DocumentXmlCache from './DocumentXmlCache';
import DatasetExtension from 'App/Models/Traits/DatasetExtension'; // Adjust the import path
import Person from './person.js';
import User from './user.js';
import Title from './title.js';
import Description from './description.js';
import License from './license.js';
import Subject from './subject.js';
import File from './file.js';
import Coverage from './coverage.js';
import DatasetReference from './dataset_reference.js';
import Collection from './collection.js';
import DatasetIdentifier from './dataset_identifier.js';
import Project from './project.js';
import DocumentXmlCache from './DocumentXmlCache.js';
import DatasetExtension from '#models/traits/dataset_extension';
import type { ManyToMany } from "@adonisjs/lucid/types/relations";
import type { BelongsTo } from "@adonisjs/lucid/types/relations";
import type { HasMany } from "@adonisjs/lucid/types/relations";
import type { HasOne } from "@adonisjs/lucid/types/relations";
export default class Dataset extends DatasetExtension {
public static namingStrategy = new SnakeCaseNamingStrategy();
@ -46,7 +46,12 @@ export default class Dataset extends DatasetExtension {
@column({ columnName: 'creating_corporation' })
public creating_corporation: string;
@column.dateTime({ columnName: 'embargo_date' })
@column.dateTime({
columnName: 'embargo_date',
serialize: (value: Date | null) => {
return value ? dayjs(value).format('YYYY-MM-DD') : value;
},
})
public embargo_date: DateTime;
@column({})
@ -55,7 +60,7 @@ export default class Dataset extends DatasetExtension {
@column({})
public language: string;
@column({})
@column({columnName: 'publish_id'})
public publish_id: number | null = null;
@column({})
@ -95,7 +100,14 @@ export default class Dataset extends DatasetExtension {
})
public created_at: DateTime;
@column.dateTime({ autoCreate: true, autoUpdate: true, columnName: 'server_date_modified' })
@column.dateTime({
serialize: (value: Date | null) => {
return value ? dayjs(value).format('MMMM D YYYY HH:mm a') : value;
},
autoCreate: true,
autoUpdate: true,
columnName: 'server_date_modified',
})
public server_date_modified: DateTime;
@manyToMany(() => Person, {
@ -188,6 +200,24 @@ export default class Dataset extends DatasetExtension {
return mainTitle ? mainTitle.value : null;
}
@computed({
serializeAs: 'main_abstract',
})
public get mainAbstract() {
// return `${this.firstName} ${this.lastName}`;
const mainTitle = this.descriptions?.find((desc) => desc.type === 'Abstract');
return mainTitle ? mainTitle.value : null;
}
@computed({
serializeAs: 'doi_identifier',
})
public get doiIdentifier() {
// return `${this.firstName} ${this.lastName}`;
const identifier: DatasetIdentifier = this.identifier;
return identifier ? identifier.value : null;
}
@manyToMany(() => Person, {
pivotForeignKey: 'document_id',
pivotRelatedForeignKey: 'person_id',
@ -203,7 +233,7 @@ export default class Dataset extends DatasetExtension {
pivotForeignKey: 'document_id',
pivotRelatedForeignKey: 'person_id',
pivotTable: 'link_documents_persons',
pivotColumns: ['role', 'sort_order', 'allow_email_contact'],
pivotColumns: ['role', 'sort_order', 'allow_email_contact', 'contributor_type'],
onQuery(query) {
query.wherePivot('role', 'contributor');
},
@ -214,4 +244,44 @@ export default class Dataset extends DatasetExtension {
foreignKey: 'document_id',
})
public xmlCache: HasOne<typeof DocumentXmlCache>;
/**
* Get the account that the dataset belongs to
*/
@belongsTo(() => User, {
foreignKey: 'editor_id',
})
public editor: BelongsTo<typeof User>;
@belongsTo(() => User, {
foreignKey: 'reviewer_id',
})
public reviewer: BelongsTo<typeof User>;
static async earliestPublicationDate(): Promise<Dataset | null> {
const serverState = 'published';
const model = await this.query().where('server_state', serverState).orderBy('server_date_published', 'asc').first();
return model || null;
}
static async getMax (column: string) {
let dataset = await this.query().max(column + ' as max_publish_id').firstOrFail();
return dataset.$extras.max_publish_id;
}
@computed({
serializeAs: 'remaining_time',
})
public get remainingTime() {
const dateFuture = this.server_date_modified.plus({ days: 14 });
if (this.server_state === 'approved') {
const now = DateTime.now();
let duration = dateFuture.diff(now, ['days', 'hours', 'months']).toObject();
return duration.days;
} else {
return 0;
}
}
}

View file

@ -1,7 +1,8 @@
import { column, SnakeCaseNamingStrategy, belongsTo, BelongsTo } from '@ioc:Adonis/Lucid/Orm';
import { column, SnakeCaseNamingStrategy, belongsTo } from '@adonisjs/lucid/orm';
import { DateTime } from 'luxon';
import Dataset from './Dataset';
import BaseModel from './BaseModel';
import Dataset from './dataset.js';
import BaseModel from './base_model.js';
import type { BelongsTo } from "@adonisjs/lucid/types/relations";
export default class DatasetIdentifier extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();
@ -20,6 +21,9 @@ export default class DatasetIdentifier extends BaseModel {
@column({})
public type: string;
@column({})
public status: string;
@column({})
public value: string;
@ -38,4 +42,9 @@ export default class DatasetIdentifier extends BaseModel {
foreignKey: 'dataset_id',
})
public dataset: BelongsTo<typeof Dataset>;
// // Specify the relationships to touch when this model is updated
// public static get touches() {
// return ['dataset'];
// }
}

View file

@ -1,7 +1,8 @@
import { column, SnakeCaseNamingStrategy, belongsTo, BelongsTo } from '@ioc:Adonis/Lucid/Orm';
import { column, SnakeCaseNamingStrategy, belongsTo } from '@adonisjs/lucid/orm';
import { DateTime } from 'luxon';
import Dataset from './Dataset';
import BaseModel from './BaseModel';
import Dataset from './dataset.js';
import BaseModel from './base_model.js';
import type { BelongsTo } from "@adonisjs/lucid/types/relations";
export default class DatasetReference extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();

View file

@ -1,6 +1,7 @@
import { column, belongsTo, BelongsTo } from '@ioc:Adonis/Lucid/Orm';
import Dataset from './Dataset';
import BaseModel from './BaseModel';
import { column, belongsTo } from '@adonisjs/lucid/orm';
import Dataset from './dataset.js';
import BaseModel from './base_model.js';
import type { BelongsTo } from "@adonisjs/lucid/types/relations";
export default class Description extends BaseModel {
public static primaryKey = 'id';
@ -9,6 +10,11 @@ export default class Description extends BaseModel {
public static timestamps = false;
public static fillable: string[] = ['value', 'type', 'language'];
@column({
isPrimary: true,
})
public id: number;
@column({})
public document_id: number;

185
app/models/file.ts Normal file
View file

@ -0,0 +1,185 @@
import { DateTime } from 'luxon';
import { column, hasMany, belongsTo, SnakeCaseNamingStrategy, computed } from '@adonisjs/lucid/orm';
import HashValue from './hash_value.js';
import Dataset from './dataset.js';
import BaseModel from './base_model.js';
import * as fs from 'fs';
import crypto from 'crypto';
// import Drive from '@ioc:Adonis/Core/Drive';
// import Drive from '@adonisjs/drive';
// import drive from '#services/drive';
import drive from '@adonisjs/drive/services/main';
import type { HasMany } from "@adonisjs/lucid/types/relations";
import type { BelongsTo } from "@adonisjs/lucid/types/relations";
// import { TransactionClientContract } from "@adonisjs/lucid/database";
import { TransactionClientContract } from '@adonisjs/lucid/types/database';
export default class File extends BaseModel {
// private readonly _data: Uint8Array;
// private readonly _type: string;
// private readonly _size: number;
public static namingStrategy = new SnakeCaseNamingStrategy();
public static primaryKey = 'id';
public static table = 'document_files';
public static selfAssignPrimaryKey = false;
@column({
isPrimary: true,
})
public id: number;
@column({})
public document_id: number;
@column({})
public pathName: string;
@column()
public label: string;
@column()
public comment: string;
@column()
public mimeType: string;
@column()
public language: string;
@column()
public fileSize: number;
@column()
public visibleInOai: boolean;
@column()
public visibleInFrontdoor: boolean;
@column()
public sortOrder: number;
@column.dateTime({ autoCreate: true })
public createdAt: DateTime;
@column.dateTime({ autoCreate: true, autoUpdate: true })
public updatedAt: DateTime;
// public function dataset()
// {
// return $this->belongsTo(Dataset::class, 'document_id', 'id');
// }
@belongsTo(() => Dataset, {
foreignKey: 'document_id',
})
public dataset: BelongsTo<typeof Dataset>;
@hasMany(() => HashValue, {
foreignKey: 'file_id',
})
public hashvalues: HasMany<typeof HashValue>;
@computed({
serializeAs: 'filePath',
})
public get filePath() {
// return `/storage/app/public/${this.pathName}`;
return `/storage/app/data/${this.pathName}`;
// const mainTitle = this.titles?.find((title) => title.type === 'Main');
// return mainTitle ? mainTitle.value : null;
}
@computed({
serializeAs: 'size',
})
public get size() {
return this.fileSize;
}
@computed({
serializeAs: 'type',
})
public get type() {
return this.mimeType;
}
@computed({
serializeAs: 'name',
})
get name(): string {
return this.label;
}
@computed({
serializeAs: 'lastModified',
})
get lastModified(): number {
return this.updatedAt.toUnixInteger(); //.toFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
}
readonly webkitRelativePath: string = '';
// @computed({
// serializeAs: 'fileData',
// })
// public get fileData(): string {
// try {
// const fileContent: Buffer = fs.readFileSync(this.filePath);
// // Create a Blob from the file content
// // const blob = new Blob([fileContent], { type: this.type }); // Adjust
// // let fileSrc = URL.createObjectURL(blob);
// // return fileSrc;
// // create a JSON string that contains the data in the property "blob"
// const json = JSON.stringify({ blob: fileContent.toString('base64') });
// return json;
// } catch (err) {
// // console.error(`Error reading file: ${err}`);
// return '';
// }
// }
public async createHashValues(trx?: TransactionClientContract) {
const hashtypes: string[] = ['md5', 'sha512'];
for (const type of hashtypes) {
const hash = new HashValue();
hash.type = type;
const hashString = await this._checksumFile(this.filePath, type); // Assuming getRealHash is a method in the same model
hash.value = hashString;
// https://github.com/adonisjs/core/discussions/1872#discussioncomment-132289
const file: File = this;
if (trx) {
await file.useTransaction(trx).related('hashvalues').save(hash); // Save the hash value to the database
} else {
await file.related('hashvalues').save(hash); // Save the hash value to the database
}
}
}
public async delete() {
if (this.pathName) {
// Delete file from additional storage
await drive.use('local').delete(this.pathName);
}
// Call the original delete method of the BaseModel to remove the record from the database
await super.delete();
}
private async _checksumFile(path: string, hashName = 'md5'): Promise<string> {
return new Promise((resolve, reject) => {
const hash = crypto.createHash(hashName);
const stream = fs.createReadStream(path);
stream.on('error', (err) => reject(err));
stream.on('data', (chunk) => hash.update(chunk));
stream.on('end', () => resolve(hash.digest('hex')));
});
}
}

View file

@ -1,9 +1,10 @@
import { column, BaseModel, belongsTo, BelongsTo, SnakeCaseNamingStrategy } from '@ioc:Adonis/Lucid/Orm';
import File from './File';
import { column, BaseModel, belongsTo, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm';
import File from './file.js';
import type { BelongsTo } from "@adonisjs/lucid/types/relations";
export default class HashValue extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();
public static primaryKey = 'file_id, type';
// public static primaryKey = 'file_id,type';
public static table = 'file_hashvalues';
// static get primaryKey () {
@ -20,10 +21,10 @@ export default class HashValue extends BaseModel {
// public id: number;
// Foreign key is still on the same model
@column({})
@column({ isPrimary: true })
public file_id: number;
@column({})
@column({ isPrimary: true })
public type: string;
@column()

View file

@ -1,5 +1,5 @@
import { column, SnakeCaseNamingStrategy } from '@ioc:Adonis/Lucid/Orm';
import BaseModel from './BaseModel';
import { column, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm';
import BaseModel from './base_model.js';
// import { DateTime } from 'luxon';
export default class Language extends BaseModel {

View file

@ -1,5 +1,5 @@
import { column, SnakeCaseNamingStrategy } from '@ioc:Adonis/Lucid/Orm';
import BaseModel from './BaseModel';
import { column, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm';
import BaseModel from './base_model.js';
export default class License extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();

45
app/models/mime_type.ts Normal file
View file

@ -0,0 +1,45 @@
import { column, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm';
import BaseModel from './base_model.js';
import { DateTime } from 'luxon';
export default class MimeType extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();
public static primaryKey = 'id';
public static table = 'mime_types';
public static fillable: string[] = ['name', 'file_extension', 'enabled'];
@column({
isPrimary: true,
})
public id: number;
@column({})
public name: string;
// 1 : n file_extensions are separated by '|' in the database
@column({})
public file_extension: string;
// 1 : n alternate_mimetype are separated by '|' in the database
@column({})
public alternate_mimetype: string;
@column({})
public enabled: boolean;
@column.dateTime({
autoCreate: true,
})
public created_at: DateTime;
@column.dateTime({
autoCreate: true,
autoUpdate: true,
})
public updated_at: DateTime;
// @hasMany(() => Collection, {
// foreignKey: 'role_id',
// })
// public collections: HasMany<typeof Collection>;
}

View file

@ -1,8 +1,9 @@
import { column, manyToMany, ManyToMany, SnakeCaseNamingStrategy, beforeUpdate, beforeCreate } from '@ioc:Adonis/Lucid/Orm';
import { column, manyToMany, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm';
import { DateTime } from 'luxon';
import dayjs from 'dayjs';
import Role from 'App/Models/Role';
import BaseModel from './BaseModel';
import Role from '#models/role';
import BaseModel from './base_model.js';
import type { ManyToMany } from "@adonisjs/lucid/types/relations";
export default class Permission extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();
@ -44,12 +45,12 @@ export default class Permission extends BaseModel {
})
public updated_at: DateTime;
@beforeCreate()
@beforeUpdate()
public static async resetDate(role) {
role.created_at = this.formatDateTime(role.created_at);
role.updated_at = this.formatDateTime(role.updated_at);
}
// @beforeCreate()
// @beforeUpdate()
// public static async resetDate(role) {
// role.created_at = this.formatDateTime(role.created_at);
// role.updated_at = this.formatDateTime(role.updated_at);
// }
// public static boot() {
// super.boot()
@ -64,22 +65,22 @@ export default class Permission extends BaseModel {
// })
// }
private static formatDateTime(datetime) {
let value = new Date(datetime);
return datetime
? value.getFullYear() +
'-' +
(value.getMonth() + 1) +
'-' +
value.getDate() +
' ' +
value.getHours() +
':' +
value.getMinutes() +
':' +
value.getSeconds()
: datetime;
}
// private static formatDateTime(datetime) {
// let value = new Date(datetime);
// return datetime
// ? value.getFullYear() +
// '-' +
// (value.getMonth() + 1) +
// '-' +
// value.getDate() +
// ' ' +
// value.getHours() +
// ':' +
// value.getMinutes() +
// ':' +
// value.getSeconds()
// : datetime;
// }
// @belongsTo(() => Role)
// public role: BelongsTo<typeof Role>;

128
app/models/person.ts Normal file
View file

@ -0,0 +1,128 @@
import { column, SnakeCaseNamingStrategy, computed, manyToMany } from '@adonisjs/lucid/orm';
import { DateTime } from 'luxon';
import dayjs from 'dayjs';
import Dataset from './dataset.js';
import BaseModel from './base_model.js';
import type { ManyToMany } from '@adonisjs/lucid/types/relations';
export default class Person extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();
public static primaryKey = 'id';
public static table = 'persons';
public static selfAssignPrimaryKey = false;
// only the academic_title, email, first_name, identifier_orcid, last_name and name_type attributes are allowed to be mass assigned.
public static fillable: string[] = ['academic_title', 'email', 'first_name', 'identifier_orcid', 'last_name', 'name_type'];
@column({
isPrimary: true,
})
public id: number;
@column({ columnName: 'academic_title' })
public academicTitle: string;
@column()
public email: string;
@column({})
public firstName: string;
@column({})
public lastName: string;
@column({ columnName: 'identifier_orcid' })
public identifierOrcid: string;
@column({})
public status: boolean;
@column({})
public nameType: string;
@column.dateTime({
serialize: (value: Date | null) => {
return value ? dayjs(value).format('MMMM D YYYY HH:mm a') : value;
},
autoCreate: true,
})
public createdAt: DateTime;
@computed({
serializeAs: 'name',
})
public get fullName() {
return [this.firstName, this.lastName].filter(Boolean).join(' ');
}
// @computed()
// public get progress(): number {
// return 50;
// }
// @computed()
// public get created_at() {
// return '2023-03-21 08:45:00';
// }
@computed({
serializeAs: 'dataset_count',
})
public get datasetCount() {
const stock = this.$extras.datasets_count; //my pivot column name was "stock"
return Number(stock);
}
@computed()
public get pivot_contributor_type() {
const contributor_type = this.$extras.pivot_contributor_type; //my pivot column name was "stock"
return contributor_type;
}
@computed({ serializeAs: 'allow_email_contact' })
public get allowEmailContact() {
// If the datasets relation is missing or empty, return false instead of null.
if (!this.datasets || this.datasets.length === 0) {
return false;
}
// Otherwise return the pivot attribute from the first related dataset.
return this.datasets[0].$extras?.pivot_allow_email_contact;
}
@manyToMany(() => Dataset, {
pivotForeignKey: 'person_id',
pivotRelatedForeignKey: 'document_id',
pivotTable: 'link_documents_persons',
pivotColumns: ['role', 'sort_order', 'allow_email_contact'],
})
public datasets: ManyToMany<typeof Dataset>;
// public toJSON() {
// const json = super.toJSON();
// // Check if this person is loaded through a pivot relationship with sensitive roles
// const pivotRole = this.$extras?.pivot_role;
// if (pivotRole === 'author' || pivotRole === 'contributor') {
// // Remove sensitive information for public-facing roles
// delete json.email;
// // delete json.identifierOrcid;
// }
// return json;
// }
// @afterFind()
// public static async afterFindHook(person: Person) {
// if (person.$extras?.pivot_role === 'author' || person.$extras?.pivot_role === 'contributor') {
// person.email = undefined as any;
// }
// }
// @afterFetch()
// public static async afterFetchHook(persons: Person[]) {
// persons.forEach(person => {
// if (person.$extras?.pivot_role === 'author' || person.$extras?.pivot_role === 'contributor') {
// person.email = undefined as any;
// }
// });
// }
}

View file

@ -1,6 +1,6 @@
import { column, SnakeCaseNamingStrategy } from '@ioc:Adonis/Lucid/Orm';
import { column, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm';
import { DateTime } from 'luxon';
import BaseModel from './BaseModel';
import BaseModel from './base_model.js';
export default class Project extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();

View file

@ -1,10 +1,11 @@
import { column, SnakeCaseNamingStrategy, manyToMany, ManyToMany, beforeCreate, beforeUpdate } from '@ioc:Adonis/Lucid/Orm';
import BaseModel from './BaseModel';
import { column, SnakeCaseNamingStrategy, manyToMany, beforeCreate, beforeUpdate } from '@adonisjs/lucid/orm';
import BaseModel from './base_model.js';
import { DateTime } from 'luxon';
// import moment from 'moment';
import dayjs from 'dayjs';
import User from './User';
import Permission from 'App/Models/Permission';
import User from './user.js';
import Permission from '#models/permission';
import type { ManyToMany } from "@adonisjs/lucid/types/relations";
export default class Role extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();
@ -46,7 +47,7 @@ export default class Role extends BaseModel {
@beforeCreate()
@beforeUpdate()
public static async resetDate(role) {
public static async resetDate(role: Role) {
role.created_at = this.formatDateTime(role.created_at);
role.updated_at = this.formatDateTime(role.updated_at);
}
@ -64,7 +65,7 @@ export default class Role extends BaseModel {
// });
// }
private static formatDateTime(datetime) {
private static formatDateTime(datetime: any) {
let value = new Date(datetime);
return datetime
? value.getFullYear() +

View file

@ -1,9 +1,10 @@
import { column, SnakeCaseNamingStrategy, manyToMany, ManyToMany, beforeCreate, beforeUpdate } from '@ioc:Adonis/Lucid/Orm';
import BaseModel from './BaseModel';
import { column, SnakeCaseNamingStrategy, manyToMany, computed} from '@adonisjs/lucid/orm';
import BaseModel from './base_model.js';
import { DateTime } from 'luxon';
import dayjs from 'dayjs';
import Dataset from './Dataset';
import Dataset from './dataset.js';
import type { ManyToMany } from "@adonisjs/lucid/types/relations";
export default class Subject extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();
@ -44,28 +45,33 @@ export default class Subject extends BaseModel {
})
public updated_at: DateTime;
@beforeCreate()
@beforeUpdate()
public static async resetDate(role) {
role.created_at = this.formatDateTime(role.created_at);
role.updated_at = this.formatDateTime(role.updated_at);
}
// @beforeCreate()
// @beforeUpdate()
// public static async resetDate(role) {
// role.created_at = this.formatDateTime(role.created_at);
// role.updated_at = this.formatDateTime(role.updated_at);
// }
private static formatDateTime(datetime) {
let value = new Date(datetime);
return datetime
? value.getFullYear() +
'-' +
(value.getMonth() + 1) +
'-' +
value.getDate() +
' ' +
value.getHours() +
':' +
value.getMinutes() +
':' +
value.getSeconds()
: datetime;
// private static formatDateTime(datetime) {
// let value = new Date(datetime);
// return datetime
// ? value.getFullYear() +
// '-' +
// (value.getMonth() + 1) +
// '-' +
// value.getDate() +
// ' ' +
// value.getHours() +
// ':' +
// value.getMinutes() +
// ':' +
// value.getSeconds()
// : datetime;
// }
@computed()
public get dataset_count() : number{
const count = this.$extras.datasets_count; //my pivot column name was "stock"
return count;
}
@manyToMany(() => Dataset, {

View file

@ -1,6 +1,8 @@
import { column, belongsTo, BelongsTo } from '@ioc:Adonis/Lucid/Orm';
import Dataset from './Dataset';
import BaseModel from './BaseModel';
import { column, belongsTo } from '@adonisjs/lucid/orm';
import Dataset from './dataset.js';
import BaseModel from './base_model.js';
import type { BelongsTo } from "@adonisjs/lucid/types/relations";
// import { DatasetRelatedBaseModel } from './BaseModel';
export default class Title extends BaseModel {
@ -10,6 +12,11 @@ export default class Title extends BaseModel {
public static timestamps = false;
public static fillable: string[] = ['value', 'type', 'language'];
@column({
isPrimary: true,
})
public id: number;
@column({})
public document_id: number;

65
app/models/totp_secret.ts Normal file
View file

@ -0,0 +1,65 @@
import { column, BaseModel, SnakeCaseNamingStrategy, belongsTo } from '@adonisjs/lucid/orm';
import User from './user.js';
import { DateTime } from 'luxon';
import dayjs from 'dayjs';
// import Encryption from '@ioc:Adonis/Core/Encryption';
import encryption from '@adonisjs/core/services/encryption';
import type { BelongsTo } from "@adonisjs/lucid/types/relations";
export default class TotpSecret extends BaseModel {
public static namingStrategy = new SnakeCaseNamingStrategy();
public static table = 'totp_secrets';
// public static fillable: string[] = ['value', 'label', 'type', 'relation'];
@column({
isPrimary: true,
})
public id: number;
@column({})
public user_id: number;
// @column()
// public twoFactorSecret: string;
@column({
serializeAs: null,
consume: (value: string) => (value ? JSON.parse(encryption.decrypt(value) ?? '{}') : null),
prepare: (value: string) => encryption.encrypt(JSON.stringify(value)),
})
public twoFactorSecret?: string | null;
// serializeAs: null removes the model properties from the serialized output.
@column({
serializeAs: null,
consume: (value: string) => (value ? JSON.parse(encryption.decrypt(value) ?? '[]') : []),
prepare: (value: string[]) => encryption.encrypt(JSON.stringify(value)),
})
public twoFactorRecoveryCodes?: string[] | null;
@column({})
public state: number;
@column.dateTime({
serialize: (value: Date | null) => {
// return value ? moment(value).format('MMMM Do YYYY, HH:mm:ss') : value;
return value ? dayjs(value).format('MMMM D YYYY HH:mm a') : value;
},
autoCreate: true,
})
public created_at: DateTime;
@column.dateTime({
serialize: (value: Date | null) => {
return value ? dayjs(value).format('MMMM D YYYY HH:mm a') : value;
},
autoCreate: true,
autoUpdate: true,
})
public updated_at: DateTime;
@belongsTo(() => User, {
foreignKey: 'user_id',
})
public user: BelongsTo<typeof User>;
}

View file

@ -1,15 +1,15 @@
import Title from 'App/Models/Title';
import Description from 'App/Models/Description';
import License from 'App/Models/License';
import Person from 'App/Models/Person';
import DatasetReference from 'App/Models/DatasetReference';
import DatasetIdentifier from 'App/Models/DatasetIdentifier';
import Subject from 'App/Models/Subject';
import File from 'App/Models/File';
import Coverage from 'App/Models/Coverage';
import Collection from 'App/Models/Collection';
import { BaseModel as LucidBaseModel } from '@ioc:Adonis/Lucid/Orm';
import Field from 'App/Library/Field';
import Title from '#models/title';
import Description from '#models/description';
import License from '#models/license';
import Person from '#models/person';
import DatasetReference from '#models/dataset_reference';
import DatasetIdentifier from '#models/dataset_identifier';
import Subject from '#models/subject';
import File from '#models/file';
import Coverage from '#models/coverage';
import Collection from '#models/collection';
import { BaseModel as LucidBaseModel } from '@adonisjs/lucid/orm';
import Field from '#app/Library/Field';
import { DateTime } from 'luxon';
// @StaticImplements<LucidModel>()
@ -30,13 +30,13 @@ export type DatasetRelatedModel =
| typeof DatasetIdentifier
| typeof File;
export default abstract class DatasetExtension extends LucidBaseModel {
public abstract id;
public abstract id: number;
public externalFields: Record<string, any> = this.getExternalFields();
// which fields shoud#t be published
protected internalFields: Record<string, any> = {};
protected fields: Record<string, any> = {};
// [key: string]: any;
private getExternalFields(): Record<string, any> {
// External fields definition
@ -83,7 +83,7 @@ export default abstract class DatasetExtension extends LucidBaseModel {
sort_order: 'sort_order',
allow_email_contact: 'allow_email_contact',
},
relation: 'persons',
relation: 'contributors',
fetch: 'eager',
},
Reference: {
@ -160,7 +160,7 @@ export default abstract class DatasetExtension extends LucidBaseModel {
// // Initialize available date fields and set up date validator
// // if the particular field is present
let dateFields = new Array<string>('EmbargoDate', 'CreatedAt', 'ServerDatePublished', 'ServerDateDeleted');
let dateFields = new Array<string>('EmbargoDate', 'CreatedAt', 'ServerDateModified', 'ServerDatePublished', 'ServerDateDeleted');
dateFields.forEach((fieldname) => {
let dateField = this.getField(fieldname);
dateField instanceof Field && dateField.setValueModelClass(DateTime.now());
@ -323,7 +323,7 @@ export default abstract class DatasetExtension extends LucidBaseModel {
private convertColumnToFieldname(columnName: string): string {
return columnName
.split(/[-_]/)
.map((word) => (word.charAt(0).toUpperCase() + word.slice(1)))
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
.join('');
}
}

Some files were not shown because too many files have changed in this diff Show more