Skip to content

Commit

Permalink
[#38] updating with develop
Browse files Browse the repository at this point in the history
  • Loading branch information
pkdash committed Aug 16, 2023
2 parents dca1631 + 1722ce6 commit 00be384
Show file tree
Hide file tree
Showing 41 changed files with 2,171 additions and 280 deletions.
6 changes: 4 additions & 2 deletions .github/workflows/deploy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,10 @@ jobs:
DB_PROTOCOL: ${{ secrets.DB_PROTOCOL }}
TESTING: ${{ secrets.TESTING }}
VUE_APP_LOGIN_URL: ${{ secrets.VUE_APP_LOGIN_URL }}
HYDROSHARE_META_READ_URL: ${{ secrets.HYDROSHARE_META_READ_URL}}
HYDROSHARE_FILE_READ_URL: ${{ secrets.HYDROSHARE_FILE_READ_URL}}
run: |
variables=("OIDC_ISSUER" "DB_USERNAME" "DB_PASSWORD" "DB_HOST" "DATABASE_NAME" "DB_PROTOCOL" "TESTING" "VUE_APP_LOGIN_URL")
variables=("OIDC_ISSUER" "DB_USERNAME" "DB_PASSWORD" "DB_HOST" "DATABASE_NAME" "DB_PROTOCOL" "TESTING" "VUE_APP_LOGIN_URL" "HYDROSHARE_META_READ_URL" "HYDROSHARE_FILE_READ_URL")
# Empty the .env file
> .env
Expand Down Expand Up @@ -68,7 +70,7 @@ jobs:
gcloud auth configure-docker us-central1-docker.pkg.dev
docker build -t us-central1-docker.pkg.dev/$GOOGLE_PROJECT/iguide/api:latest -f docker/api/Dockerfile .
docker push us-central1-docker.pkg.dev/$GOOGLE_PROJECT/iguide/api:latest
docker build -t us-central1-docker.pkg.dev/$GOOGLE_PROJECT/iguide/frontend:latest -f docker/frontend/Dockerfile frontend/
docker build -t us-central1-docker.pkg.dev/$GOOGLE_PROJECT/iguide/frontend:latest -f docker/frontend/Dockerfile .
docker push us-central1-docker.pkg.dev/$GOOGLE_PROJECT/iguide/frontend:latest
docker build -t us-central1-docker.pkg.dev/$GOOGLE_PROJECT/iguide/trigger:latest -f docker/triggers/Dockerfile .
docker push us-central1-docker.pkg.dev/$GOOGLE_PROJECT/iguide/trigger:latest
Expand Down
2 changes: 2 additions & 0 deletions api/adapters/hydroshare.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,6 +221,7 @@ class _HydroshareResourceMetadata(BaseModel):
creators: List[Creator]
created: datetime
modified: datetime
published: Optional[datetime]
subjects: Optional[List[str]] = []
language: str
rights: Rights
Expand Down Expand Up @@ -298,6 +299,7 @@ def to_catalog_dataset(self):
dataset.creator = self.to_dataset_creators()
dataset.dateCreated = self.created
dataset.dateModified = self.modified
dataset.datePublished = self.published
dataset.keywords = self.to_dataset_keywords()
dataset.inLanguage = self.language
dataset.funding = self.to_dataset_funding()
Expand Down
2 changes: 1 addition & 1 deletion api/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def handle_exit(self, sig: int, frame) -> None:
async def main():
"""Run FastAPI"""

server = Server(config=uvicorn.Config(app, workers=1, loop="asyncio", host="0.0.0.0", port=8000))
server = Server(config=uvicorn.Config(app, workers=1, loop="asyncio", host="0.0.0.0", port=8000, forwarded_allow_ips="*"))
api = asyncio.create_task(server.serve())

await asyncio.wait([api])
Expand Down
2 changes: 1 addition & 1 deletion api/models/catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,4 @@ def delete_revision_id(self):


class DatasetMetadataDOC(CoreMetadataDOC):
pass
repository_identifier: str = None
106 changes: 60 additions & 46 deletions api/models/schemas/ui-schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,8 @@
"type": "Control",
"scope": "#/properties/creator",
"options": {
"showSortButtons": true,
"elementLabelProp": "name",
"detail": {
"type": "Control",
"scope": "#",
Expand Down Expand Up @@ -240,7 +242,34 @@
},
{
"type": "Control",
"scope": "#/properties/organization"
"scope": "#/properties/affiliation",
"options": {
"detail": {
"type": "VerticalLayout",
"elements": [
{
"type": "Control",
"scope": "#/properties/@type",
"rule": {
"effect": "HIDE",
"condition": {}
}
},
{
"type": "Control",
"scope": "#/properties/name"
},
{
"type": "Control",
"scope": "#/properties/url"
},
{
"type": "Control",
"scope": "#/properties/address"
}
]
}
}
}
]
}
Expand Down Expand Up @@ -352,11 +381,11 @@
"elements": [
{
"type": "Control",
"scope": "#/properties/start"
"scope": "#/properties/startDate"
},
{
"type": "Control",
"scope": "#/properties/end"
"scope": "#/properties/endDate"
}
]
}
Expand Down Expand Up @@ -458,48 +487,6 @@
"type": "Group",
"label": "Related Resources",
"elements": [
{
"type": "Control",
"scope": "#/properties/includedInDataCatalog",
"options": {
"elementLabelProp": [
"name"
],
"detail": {
"type": "VerticalLayout",
"elements": [
{
"type": "Control",
"scope": "#/properties/@type",
"rule": {
"effect": "HIDE",
"condition": {}
}
},
{
"type": "HorizontalLayout",
"elements": [
{
"type": "Control",
"scope": "#/properties/name"
},
{
"type": "Control",
"scope": "#/properties/url"
}
]
},
{
"type": "Control",
"scope": "#/properties/description",
"options": {
"multi": true
}
}
]
}
}
},
{
"type": "Control",
"scope": "#/properties/subjectOf",
Expand Down Expand Up @@ -703,7 +690,34 @@
},
{
"type": "Control",
"scope": "#/properties/organization"
"scope": "#/properties/affiliation",
"options": {
"detail": {
"type": "VerticalLayout",
"elements": [
{
"type": "Control",
"scope": "#/properties/@type",
"rule": {
"effect": "HIDE",
"condition": {}
}
},
{
"type": "Control",
"scope": "#/properties/name"
},
{
"type": "Control",
"scope": "#/properties/url"
},
{
"type": "Control",
"scope": "#/properties/address"
}
]
}
}
}
]
}
Expand Down
26 changes: 19 additions & 7 deletions api/routes/catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,12 @@
router = APIRouter()


def inject_repository_identifier(submission: Submission, document: DatasetMetadataDOC):
if submission.repository_identifier:
document.repository_identifier = submission.repository_identifier
return document


@router.post("/dataset/", response_model=DatasetMetadataDOC, status_code=status.HTTP_201_CREATED)
async def create_dataset(document: DatasetMetadataDOC, user: Annotated[User, Depends(get_current_user)]):
await document.insert()
Expand All @@ -25,22 +31,25 @@ async def create_dataset(document: DatasetMetadataDOC, user: Annotated[User, Dep
return document


@router.get("/dataset/{submission_id}", response_model=DatasetMetadataDOC)
async def get_dataset(submission_id: PydanticObjectId, user: Annotated[User, Depends(get_current_user)]):
submission = user.submission(submission_id)
@router.get("/dataset/{submission_id}", response_model=DatasetMetadataDOC, response_model_exclude_none=True)
async def get_dataset(submission_id: PydanticObjectId):
submission: Submission = await Submission.find_one(Submission.identifier == submission_id)
if submission is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Dataset metadata record was not found")

document = await DatasetMetadataDOC.get(submission.identifier)
document: DatasetMetadataDOC = await DatasetMetadataDOC.get(submission.identifier)
if document is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Dataset metadata record was not found")

document.delete_revision_id()
document = inject_repository_identifier(submission, document)
return document


@router.get("/dataset/", response_model=List[DatasetMetadataDOC])
@router.get("/dataset/", response_model=List[DatasetMetadataDOC], response_model_exclude_none=True)
async def get_datasets(user: Annotated[User, Depends(get_current_user)]):
documents = [await DatasetMetadataDOC.get(submission.identifier) for submission in user.submissions]
documents = [inject_repository_identifier(submission, await DatasetMetadataDOC.get(submission.identifier)) for
submission in user.submissions]
for document in documents:
document.delete_revision_id()
return documents
Expand All @@ -65,8 +74,9 @@ async def update_dataset(
updated_submission = dataset.as_submission()
updated_submission.repository_identifier = submission.repository_identifier
updated_submission.repository = submission.repository
updated_submission.submitted = submission.submitted
await submission.set(updated_submission.model_dump(exclude_unset=True))
dataset.delete_revision_id()
dataset = inject_repository_identifier(submission, dataset)
return dataset


Expand Down Expand Up @@ -141,8 +151,10 @@ async def _save_to_db(identifier: str, user: User, submission: Submission = None
updated_dataset = await DatasetMetadataDOC.get(submission.identifier)
updated_submission = updated_dataset.as_submission()
updated_submission = adapter.update_submission(submission=updated_submission, repo_record_id=identifier)
updated_submission.submitted = submission.submitted
await submission.set(updated_submission.model_dump(exclude_unset=True))
dataset = updated_dataset

dataset.delete_revision_id()
dataset = inject_repository_identifier(submission, dataset)
return dataset
33 changes: 20 additions & 13 deletions api/routes/discovery.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,9 @@


class SearchQuery(BaseModel):
term: str
term: str = None
sortBy: str = None
reverseSort: bool = True
contentType: str = None
providerName: str = None
creatorName: str = None
Expand All @@ -27,7 +28,7 @@ class SearchQuery(BaseModel):

@field_validator('*')
def empty_str_to_none(cls, v, info: FieldValidationInfo):
if info.field_name == 'term':
if info.field_name == 'term' and v:
return v.strip()

if isinstance(v, str) and v.strip() == '':
Expand Down Expand Up @@ -82,25 +83,26 @@ def _filters(self):

if self.dataCoverageStart:
filters.append(
{'range': {'path': 'temporalCoverageStart', 'gte': datetime(self.dataCoverageStart, 1, 1)}}
{'range': {'path': 'temporalCoverage.startDate', 'gte': datetime(self.dataCoverageStart, 1, 1)}}
)
if self.dataCoverageEnd:
filters.append(
{'range': {'path': 'temporalCoverageEnd', 'lt': datetime(self.dataCoverageEnd + 1, 1, 1)}}
{'range': {'path': 'temporalCoverage.endDate', 'lt': datetime(self.dataCoverageEnd + 1, 1, 1)}}
)
return filters

@property
def _should(self):
auto_complete_paths = ['name', 'description', 'keywords', 'keywords.name']
search_paths = ['name', 'description', 'keywords', 'keywords.name']
should = [
{'autocomplete': {'query': self.term, 'path': key, 'fuzzy': {'maxEdits': 1}}} for key in auto_complete_paths
{'autocomplete': {'query': self.term, 'path': key, 'fuzzy': {'maxEdits': 1}}} for key in search_paths
]
return should

@property
def _must(self):
must = []
must.append({'term': {'path': '@type', 'query': "Dataset"}})
if self.contentType:
must.append({'term': {'path': '@type', 'query': self.contentType}})
if self.creatorName:
Expand All @@ -127,19 +129,24 @@ def _must(self):
def stages(self):
highlightPaths = ['name', 'description', 'keywords', 'keywords.name', 'creator.name']
stages = []
stages.append(
compound = {'filter': self._filters, 'must': self._must}
if self.term:
compound['should'] = self._should
search_stage = \
{
'$search': {
'index': 'fuzzy_search',
'compound': {'filter': self._filters, 'should': self._should, 'must': self._must},
'highlight': {'path': highlightPaths},
'compound': compound,
}
}
)
if self.term:
search_stage["$search"]['highlight'] = {'path': highlightPaths}

stages.append(search_stage)

# sorting needs to happen before pagination
if self.sortBy:
stages.append({'$sort': {self.sortBy: 1}})
stages.append({'$sort': {self.sortBy: -1 if self.reverseSort else 1}})
stages.append({'$skip': (self.pageNumber - 1) * self.pageSize})
stages.append({'$limit': self.pageSize})
#stages.append({'$unset': ['_id', '_class_id']})
Expand All @@ -160,8 +167,8 @@ async def search(request: Request, search_query: SearchQuery = Depends()):

@router.get("/typeahead")
async def typeahead(request: Request, term: str, pageSize: int = 30):
auto_complete_paths = ['name', 'description', 'keywords', 'keywords.name']
should = [{'autocomplete': {'query': term, 'path': key, 'fuzzy': {'maxEdits': 1}}} for key in auto_complete_paths]
search_paths = ['name', 'description', 'keywords', 'keywords.name']
should = [{'autocomplete': {'query': term, 'path': key, 'fuzzy': {'maxEdits': 1}}} for key in search_paths]

stages = [
{
Expand Down
28 changes: 14 additions & 14 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,26 +8,26 @@ services:
nginx:
image: nginx:1.17.10
volumes:
- ./nginx/nginx-local.conf:/etc/nginx/nginx.conf
#- ./nginx/nginx-local-frontend-dev.conf:/etc/nginx/nginx.conf
# - ./nginx/nginx-local.conf:/etc/nginx/nginx.conf
- ./nginx/nginx-local-frontend-dev.conf:/etc/nginx/nginx.conf
- ./nginx/config:/etc/ssl
ports:
- 443:443
depends_on:
- api

frontend:
image: iguide_frontend
build:
dockerfile: ./docker/frontend/Dockerfile
#volumes:
#- ./frontend:/app
#- ./frontend/nginx.conf:/etc/nginx/nginx.conf
ports:
- 5001:5001
restart: unless-stopped
depends_on:
- api
# frontend:
# image: iguide_frontend
# build:
# dockerfile: ./docker/frontend/Dockerfile
# #volumes:
# #- ./frontend:/app
# #- ./frontend/nginx.conf:/etc/nginx/nginx.conf
# ports:
# - 5001:5001
# restart: unless-stopped
# depends_on:
# - api

api:
image: iguide_api
Expand Down
Loading

0 comments on commit 00be384

Please sign in to comment.