Skip to content

Commit

Permalink
[#38] updating with develop and making pydantic v2 specific code changes
Browse files Browse the repository at this point in the history
  • Loading branch information
pkdash committed May 29, 2024
2 parents 798a1b9 + 2f240fc commit 18d3608
Show file tree
Hide file tree
Showing 112 changed files with 20,015 additions and 24,819 deletions.
20 changes: 10 additions & 10 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,14 @@ env:
DB_PROTOCOL: mongodb+srv
HYDROSHARE_META_READ_URL: https://www.hydroshare.org/hsapi2/resource/%s/json/
HYDROSHARE_FILE_READ_URL: https://www.hydroshare.org/hsapi/resource/%s/files/
VUE_APP_NAME: CZNet Discovery
VUE_APP_LOGIN_URL: https://orcid.org/oauth/authorize
VUE_APP_API_URL: api
VUE_APP_HEAP_ANALYTICS_APP_ID: ""
VUE_APP_GOOGLE_MAPS_API_KEY: ""
VUE_APP_SUPPORT_EMAIL: help@cuahsi.io
VUE_APP_REALM_APP_ID: data-axdrs
VUE_APP_SEARCH_RESOLVER: filtering_cznet
VITE_APP_NAME: I-GUIDE
VITE_APP_URL: https://iguide-dev.cuahsi.io
VITE_APP_API_URL: https://iguide-dev.cuahsi.io/api
VITE_APP_LOGIN_URL: https://orcid.org/oauth/authorize
VITE_APP_GOOGLE_MAPS_API_KEY: ""
VITE_APP_SUPPORT_EMAIL: help@example.com
VITE_APP_CLIENT_ID: APP-4ZA8C8BYAH3QHNE9
SEARCH_RELEVANCE_SCORE_THRESHOLD: 1.4


jobs:
Expand All @@ -37,7 +37,7 @@ jobs:
DB_USERNAME: ${{ secrets.DB_USERNAME }}
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
run: |
variables=("OIDC_ISSUER" "DB_USERNAME" "DB_PASSWORD" "DB_HOST" "DATABASE_NAME" "DB_PROTOCOL" "TESTING" "VUE_APP_LOGIN_URL" "HYDROSHARE_META_READ_URL" "HYDROSHARE_FILE_READ_URL")
variables=("OIDC_ISSUER" "DB_USERNAME" "DB_PASSWORD" "DB_HOST" "DATABASE_NAME" "DB_PROTOCOL" "TESTING" "VITE_APP_LOGIN_URL" "HYDROSHARE_META_READ_URL" "HYDROSHARE_FILE_READ_URL" "SEARCH_RELEVANCE_SCORE_THRESHOLD")
# Empty the .env file
> .env
Expand All @@ -49,7 +49,7 @@ jobs:
- name: Compile the frontend env file
run: |
variables=("VUE_APP_NAME" "VUE_APP_API_URL" "VUE_APP_SUPPORT_EMAIL" "VUE_APP_URL" "VUE_APP_LOGIN_URL" "VUE_APP_CLIENT_ID" "VUE_APP_GOOGLE_MAPS_API_KEY")
variables=("VITE_APP_NAME" "VITE_APP_API_URL" "VITE_APP_SUPPORT_EMAIL" "VITE_APP_URL" "VITE_APP_LOGIN_URL" "VITE_APP_CLIENT_ID" "VITE_APP_GOOGLE_MAPS_API_KEY")
# Empty the .env file
> frontend/.env
Expand Down
11 changes: 11 additions & 0 deletions .github/workflows/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates

version: 2
updates:
- package-ecosystem: "npm" # See documentation for possible values
directory: "/frontend" # Location of package manifests
schedule:
interval: "weekly"
24 changes: 13 additions & 11 deletions .github/workflows/deploy-dev.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,17 @@ env:
IP: iguide-dev
TESTING: false
OIDC_ISSUER: https://orcid.org
DATABASE_NAME: iguide_beta
DATABASE_NAME: iguide_dev
DB_PROTOCOL: mongodb+srv
HYDROSHARE_META_READ_URL: https://www.hydroshare.org/hsapi2/resource/%s/json/
HYDROSHARE_FILE_READ_URL: https://www.hydroshare.org/hsapi/resource/%s/files/
VUE_APP_NAME: CZNet Discovery
VUE_APP_LOGIN_URL: https://orcid.org/oauth/authorize
VUE_APP_API_URL: api
VUE_APP_HEAP_ANALYTICS_APP_ID: ""
VUE_APP_GOOGLE_MAPS_API_KEY: ""
VUE_APP_SUPPORT_EMAIL: [email protected]
VUE_APP_REALM_APP_ID: data-axdrs
VUE_APP_SEARCH_RESOLVER: filtering_cznet
VITE_APP_NAME: I-GUIDE
VITE_APP_URL: https://iguide-dev.cuahsi.io
VITE_APP_API_URL: https://iguide-dev.cuahsi.io/api
VITE_APP_LOGIN_URL: https://orcid.org/oauth/authorize
VITE_APP_SUPPORT_EMAIL: [email protected]
VITE_APP_CLIENT_ID: APP-4ZA8C8BYAH3QHNE9
SEARCH_RELEVANCE_SCORE_THRESHOLD: 1.4


jobs:
Expand All @@ -46,7 +45,7 @@ jobs:
DB_USERNAME: ${{ secrets.DB_USERNAME_BETA }}
DB_PASSWORD: ${{ secrets.DB_PASSWORD_BETA }}
run: |
variables=("OIDC_ISSUER" "DB_USERNAME" "DB_PASSWORD" "DB_HOST" "DATABASE_NAME" "DB_PROTOCOL" "TESTING" "VUE_APP_LOGIN_URL" "HYDROSHARE_META_READ_URL" "HYDROSHARE_FILE_READ_URL")
variables=("OIDC_ISSUER" "DB_USERNAME" "DB_PASSWORD" "DB_HOST" "DATABASE_NAME" "DB_PROTOCOL" "TESTING" "VITE_APP_LOGIN_URL" "HYDROSHARE_META_READ_URL" "HYDROSHARE_FILE_READ_URL" "SEARCH_RELEVANCE_SCORE_THRESHOLD")
# Empty the .env file
> .env
Expand All @@ -57,8 +56,11 @@ jobs:
done
- name: Compile the frontend env file
env:
VITE_APP_GOOGLE_MAPS_API_KEY: ${{ secrets.VITE_APP_GOOGLE_MAPS_API_KEY }}

run: |
variables=("VUE_APP_NAME" "VUE_APP_API_URL" "VUE_APP_SUPPORT_EMAIL" "VUE_APP_URL" "VUE_APP_LOGIN_URL" "VUE_APP_CLIENT_ID" "VUE_APP_GOOGLE_MAPS_API_KEY")
variables=("VITE_APP_NAME" "VITE_APP_API_URL" "VITE_APP_SUPPORT_EMAIL" "VITE_APP_URL" "VITE_APP_LOGIN_URL" "VITE_APP_CLIENT_ID" "VITE_APP_GOOGLE_MAPS_API_KEY")
# Empty the .env file
> frontend/.env
Expand Down
20 changes: 10 additions & 10 deletions .github/workflows/deploy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@ env:
DB_PROTOCOL: mongodb+srv
HYDROSHARE_META_READ_URL: https://www.hydroshare.org/hsapi2/resource/%s/json/
HYDROSHARE_FILE_READ_URL: https://www.hydroshare.org/hsapi/resource/%s/files/
VUE_APP_NAME: CZNet Discovery
VUE_APP_LOGIN_URL: https://orcid.org/oauth/authorize
VUE_APP_API_URL: api
VUE_APP_HEAP_ANALYTICS_APP_ID: ""
VUE_APP_GOOGLE_MAPS_API_KEY: ""
VUE_APP_SUPPORT_EMAIL: help@cuahsi.io
VUE_APP_REALM_APP_ID: data-axdrs
VUE_APP_SEARCH_RESOLVER: filtering_cznet
VITE_APP_NAME: I-GUIDE
VITE_APP_URL: https://iguide.cuahsi.io
VITE_APP_API_URL: https://iguide.cuahsi.io/api
VITE_APP_LOGIN_URL: https://orcid.org/oauth/authorize
VITE_APP_GOOGLE_MAPS_API_KEY: ""
VITE_APP_SUPPORT_EMAIL: help@example.com
VITE_APP_CLIENT_ID: APP-4ZA8C8BYAH3QHNE9
SEARCH_RELEVANCE_SCORE_THRESHOLD: 1.4


jobs:
Expand All @@ -47,7 +47,7 @@ jobs:
DB_USERNAME: ${{ secrets.DB_USERNAME }}
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
run: |
variables=("OIDC_ISSUER" "DB_USERNAME" "DB_PASSWORD" "DB_HOST" "DATABASE_NAME" "DB_PROTOCOL" "TESTING" "VUE_APP_LOGIN_URL" "HYDROSHARE_META_READ_URL" "HYDROSHARE_FILE_READ_URL")
variables=("OIDC_ISSUER" "DB_USERNAME" "DB_PASSWORD" "DB_HOST" "DATABASE_NAME" "DB_PROTOCOL" "TESTING" "VITE_APP_LOGIN_URL" "HYDROSHARE_META_READ_URL" "HYDROSHARE_FILE_READ_URL" "SEARCH_RELEVANCE_SCORE_THRESHOLD")
# Empty the .env file
> .env
Expand All @@ -59,7 +59,7 @@ jobs:
- name: Compile the frontend env file
run: |
variables=("VUE_APP_NAME" "VUE_APP_API_URL" "VUE_APP_SUPPORT_EMAIL" "VUE_APP_URL" "VUE_APP_LOGIN_URL" "VUE_APP_CLIENT_ID" "VUE_APP_GOOGLE_MAPS_API_KEY")
variables=("VITE_APP_NAME" "VITE_APP_API_URL" "VITE_APP_SUPPORT_EMAIL" "VITE_APP_URL" "VITE_APP_LOGIN_URL" "VITE_APP_CLIENT_ID" "VITE_APP_GOOGLE_MAPS_API_KEY")
# Empty the .env file
> frontend/.env
Expand Down
3 changes: 3 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"editor.detectIndentation": true
}
10 changes: 5 additions & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,6 @@ down:
build:
docker-compose build

.PHONY: test
test:
docker-compose exec api pytest tests

.PHONY: format
format:
docker-compose run api $(isort)
Expand All @@ -31,6 +27,10 @@ format:
schema:
docker-compose run api python api/models/management/generate_schema.py

.PHONY: test
test:
docker-compose exec api pytest tests

.PHONY: pre-post
pre-post:
docker-compose run catalog-trigger python /app/triggers/management/change_streams_pre_and_post.py
docker-compose run catalog-trigger python /app/api/models/management/change_streams_pre_and_post.py
31 changes: 28 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,30 @@
# I-GUIDE catalog API
I-GUIDE Catalog API
# I-GUIDE Catalog

The I-GUIDE Catalog is part of of the [I-GUIDE Cyberinfrastructure Platform](https://i-guide.io/platform/). The Platform supports collaborative research along with computation and data-intensive geospatial problem solving. Within the context of the I-GUIDE Platform, the goal of the Catalog is to allow users to find, explore, and share data, models, code, software, hosted services, computational resources, and learning materials. A major goal of the catalog is to make these resources "actionable" - e.g., once a user finds a resource, they should be able to interact with the content of the resource and/or launch it into an appropriate analysis or computational environment for execution and exploration.

## Deployment

The I-GUIDE Catalog is currently deployed at [https://iguide.cuahsi.io/](https://iguide.cuahsi.io/).

## Issue Tracker

Please report any bugs or ideas for enhancements to the I-GUIDE Catalog issue tracker:

[https://github.com/I-GUIDE/catalog/issues](https://github.com/I-GUIDE/catalog/issues)

## License

The I-GUIDE Catalog is released under the BSD 3-Clause License. This means that you can do what you want with the code [provided that you inlude the BSD copyright and license notice in it](https://www.tldrlegal.com/license/bsd-3-clause-license-revised).

©2024 I-GUIDE Developers.

## Sponsors and Credits

[![NSF-2118329](https://img.shields.io/badge/NSF-2118329-blue.svg)](https://nsf.gov/awardsearch/showAward?AWD_ID=2118329)

This material is based upon work supported by the National Science Foundation (NSF) under award [2118329](https://www.nsf.gov/awardsearch/showAward?AWD_ID=2118329). Any opinions, findings, conclusions, or recommendations expressed in this material are those of the authors and do not necessarily reflect the views of the NSF.

## Developer Information

### Getting Started
```console
Expand Down Expand Up @@ -43,7 +68,7 @@ TESTING=True
```

2. Login and submit a record to create all the collections
3. Run `triggers/management/change_streams_pre_and_post.py`
3. Run `api/models/management/change_streams_pre_and_post.py`
4. Create the catalog and typeahead indexes from `atlas/` (TODO detailed instructions)

### Triggers
Expand Down
1 change: 1 addition & 0 deletions api/adapters/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
# import all adapters here to get them registered
from api.adapters import hydroshare
from api.adapters import s3
3 changes: 2 additions & 1 deletion api/adapters/hydroshare.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from api.exceptions import RepositoryException
from api.models import schema
from api.models.catalog import DatasetMetadataDOC
from api.models.user import Submission
from api.models.user import Submission, SubmissionType
from api.models.schema import HttpUrlStr


Expand Down Expand Up @@ -133,6 +133,7 @@ def to_dataset_media_object(self):
media_object.encodingFormat = self.content_type
media_object.contentSize = f"{self.size/1000.00} KB"
media_object.name = self.file_name
media_object.sha256 = self.checksum
return media_object


Expand Down
50 changes: 50 additions & 0 deletions api/adapters/s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import boto3
import json
from botocore.client import Config
from botocore import UNSIGNED

from api.adapters.base import AbstractRepositoryMetadataAdapter, AbstractRepositoryRequestHandler
from api.adapters.utils import RepositoryType, register_adapter
from api.models.catalog import DatasetMetadataDOC
from api.models.user import Submission, SubmissionType


class _S3RequestHandler(AbstractRepositoryRequestHandler):
def get_metadata(self, record_id: str):
endpoint_url = record_id.split("+")[0]
bucket_name = record_id.split("+")[1]
file_key = record_id.split("+")[2]

s3 = boto3.client('s3', config=Config(signature_version=UNSIGNED), endpoint_url=endpoint_url)

response = s3.get_object(Bucket=bucket_name, Key=file_key)
json_content = response['Body'].read().decode('utf-8')

# Parse the JSON content
data = json.loads(json_content)

return data


class S3MetadataAdapter(AbstractRepositoryMetadataAdapter):
repo_api_handler = _S3RequestHandler()

@staticmethod
def to_catalog_record(metadata: dict) -> DatasetMetadataDOC:
return DatasetMetadataDOC(**metadata)

@staticmethod
def to_repository_record(catalog_record: DatasetMetadataDOC):
"""Converts dataset catalog record to hydroshare resource metadata"""
raise NotImplementedError

@staticmethod
def update_submission(submission: Submission, repo_record_id: str) -> Submission:
"""Sets additional hydroshare specific metadata to submission record"""

submission.repository_identifier = repo_record_id
submission.repository = RepositoryType.S3
return submission


register_adapter(RepositoryType.S3, S3MetadataAdapter)
1 change: 1 addition & 0 deletions api/adapters/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

class RepositoryType(str, Enum):
HYDROSHARE = 'HYDROSHARE'
S3 = 'S3'


_adapter_registry = {}
Expand Down
1 change: 1 addition & 0 deletions api/config/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ class Settings(BaseSettings):
oidc_issuer: str
hydroshare_meta_read_url: HttpUrlStr
hydroshare_file_read_url: HttpUrlStr
search_relevance_score_threshold: float = 1.4

def __init__(self, **data: Any) -> None:
super().__init__(**data)
Expand Down
14 changes: 9 additions & 5 deletions api/models/catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,15 @@

from beanie import Document

from api.models.user import Submission

from .schema import CoreMetadata
from api.models.user import Submission, S3Path
from .schema import CoreMetadata, DatasetMetadata


class CoreMetadataDOC(Document, CoreMetadata):
# this field is not stored in the database, but is populated from the corresponding submission record
# using the type field in the submission record
submission_type: Optional[str] = None

class Settings:
# name is the collection name in database (iguide) where the Metadata Record documents will be stored
# for all metadata record types (e.g. dataset, geopackage, software etc.)
Expand All @@ -20,7 +23,7 @@ class Settings:
),
datetime.datetime: lambda dt: datetime.datetime(
year=dt.year, month=dt.month, day=dt.day, hour=dt.hour, minute=dt.minute, second=dt.second
),
)
}

def as_submission(self) -> Submission:
Expand All @@ -33,5 +36,6 @@ def as_submission(self) -> Submission:
)


class DatasetMetadataDOC(CoreMetadataDOC):
class DatasetMetadataDOC(CoreMetadataDOC, DatasetMetadata):
repository_identifier: Optional[str] = None
s3_path: Optional[S3Path] = None
5 changes: 3 additions & 2 deletions api/models/management/change_streams_pre_and_post.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import asyncio
from api.config import get_settings
from api.models.catalog import DatasetMetadataDOC

from beanie import init_beanie
from motor.motor_asyncio import AsyncIOMotorClient

from api.config import get_settings
from api.models.catalog import DatasetMetadataDOC


async def main():
db = AsyncIOMotorClient(get_settings().db_connection_string)
Expand Down
4 changes: 2 additions & 2 deletions api/models/management/generate_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@

import typer

from api.models.schema import DatasetSchema
from api.models.schema import DatasetMetadata


def main(output_name: str = "api/models/schemas/schema.json"):
schema = DatasetSchema.model_json_schema()
schema = DatasetMetadata.model_json_schema()
json_schema = json.dumps(schema)
# Have to run it a few times for the definitions to get updated before inserted into another model
while "#/$defs/" in json_schema:
Expand Down
Loading

0 comments on commit 18d3608

Please sign in to comment.