Skip to content

Commit

Permalink
Merging staging branch into prod branch
Browse files Browse the repository at this point in the history
  • Loading branch information
prathapsridharan committed Jun 20, 2024
2 parents 3b35405 + a7ea3fc commit bd32b39
Show file tree
Hide file tree
Showing 14 changed files with 229 additions and 262 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build-images-and-create-deployment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ jobs:
uses: avakar/create-deployment@v1
# To stop deployment to a specific DEPLOYMENT_STAGE remove it from condition below.
# The DEPLOYMENT_STAGE that should be present are dev, stage, prod.
if: env.DEPLOYMENT_STAGE == 'prod' || env.DEPLOYMENT_STAGE == 'stage'
if: env.DEPLOYMENT_STAGE == 'prod' || env.DEPLOYMENT_STAGE == 'stage' || env.DEPLOYMENT_STAGE == 'dev'
with:
auto_merge: false
environment: ${{ env.DEPLOYMENT_STAGE }}
Expand Down
2 changes: 1 addition & 1 deletion .happy/terraform/modules/ecs-stack/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ locals {
backend_wmg_workers = var.backend_wmg_workers
backend_cmd = ["gunicorn", "--worker-class", "gevent", "--workers", "${local.backend_workers}",
"--bind", "0.0.0.0:5000", "backend.api_server.app:app", "--max-requests", "10000", "--timeout", "180",
"--keep-alive", "61", "--log-level", "info"]
"--keep-alive", "100", "--log-level", "info"]
backend_de_cmd = ["gunicorn", "--worker-class", "gevent", "--workers", "${local.backend_de_workers}",
"--bind", "0.0.0.0:5000", "backend.de.server.app:app", "--max-requests", "10000", "--timeout", "540",
"--keep-alive", "61", "--log-level", "info"]
Expand Down
23 changes: 7 additions & 16 deletions .happy/terraform/modules/schema_migration/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,6 @@ resource aws_sfn_state_machine sfn_schema_migration {
"Next": "ApplyDefaults",
"ResultPath": "$.inputDefaults",
"Parameters": {
"auto_publish": "False",
"limit_migration": "0"
}
},
Expand Down Expand Up @@ -221,10 +220,6 @@ resource aws_sfn_state_machine sfn_schema_migration {
"Name": "EXECUTION_ID",
"Value.$": "$$.Execution.Name"
},
{
"Name": "AUTO_PUBLISH",
"Value.$": "$.auto_publish"
},
{
"Name": "LIMIT_MIGRATION",
"Value.$": "$.limit_migration"
Expand Down Expand Up @@ -286,10 +281,6 @@ resource aws_sfn_state_machine sfn_schema_migration {
"Name": "COLLECTION_VERSION_ID",
"Value.$": "$.collection_version_id"
},
{
"Name": "CAN_PUBLISH",
"Value.$": "$.can_publish"
},
{
"Name": "TASK_TOKEN",
"Value.$": "$$.Task.Token"
Expand All @@ -314,7 +305,7 @@ resource aws_sfn_state_machine sfn_schema_migration {
"States.ALL"
],
"ResultPath": null,
"Next": "CollectionPublish"
"Next": "CollectionCleanup"
}
]
},
Expand All @@ -324,17 +315,17 @@ resource aws_sfn_state_machine sfn_schema_migration {
{
"Variable": "$.key_name",
"IsPresent": false,
"Next": "CollectionPublish"
"Next": "CollectionCleanup"
}
],
"Default": "SpanDatasets"
},
"CollectionPublish": {
"CollectionCleanup": {
"Type": "Task",
"Resource": "arn:aws:states:::batch:submitJob.sync",
"Parameters": {
"JobDefinition": "${resource.aws_batch_job_definition.schema_migrations.arn}",
"JobName": "Collection_publish",
"JobName": "Collection_cleanup",
"JobQueue": "${var.job_queue_arn}",
"Timeout": {
"AttemptDurationSeconds": 600
Expand All @@ -343,7 +334,7 @@ resource aws_sfn_state_machine sfn_schema_migration {
"Environment": [
{
"Name": "STEP_NAME",
"Value": "collection_publish"
"Value": "collection_cleanup"
},
{
"Name": "MIGRATE",
Expand Down Expand Up @@ -497,14 +488,14 @@ resource aws_sfn_state_machine sfn_schema_migration {
"Key.$": "$.key_name"
}
},
"Next": "CollectionPublish",
"Next": "CollectionCleanup",
"MaxConcurrency": 10,
"Catch": [
{
"ErrorEquals": [
"States.ALL"
],
"Next": "CollectionPublish",
"Next": "CollectionCleanup",
"ResultPath": null
}
],
Expand Down
2 changes: 2 additions & 0 deletions backend/api_server/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@

from backend.common.utils.db_session import db_session_manager

# TODO remove from code base


def dbconnect(func):
@wraps(func)
Expand Down
1 change: 1 addition & 0 deletions backend/common/utils/db_session.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# TODO remove from code base
import logging
from contextlib import contextmanager

Expand Down
16 changes: 16 additions & 0 deletions backend/common/utils/timer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import logging
import time
from contextlib import contextmanager

logging.basicConfig(level=logging.INFO)


@contextmanager
def log_time_taken(description: str = "Code block"):
start_time = time.time()
try:
yield
finally:
end_time = time.time()
elapsed_time = end_time - start_time
logging.info(dict(type="METRIC", details=dict(description=description, time=elapsed_time, unit="seconds")))
4 changes: 3 additions & 1 deletion backend/curation/api/v1/curation/collections/actions.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import logging

from flask import jsonify, make_response

import backend.common.doi as doi
Expand Down Expand Up @@ -37,7 +39,7 @@ def get(visibility: str, token_info: dict, curator: str = None):
else:
filters["curator_name"] = curator

print(filters)
logging.info(filters)

resp_collections = []
for collection_version in get_business_logic().get_collections(CollectionQueryFilter(**filters)):
Expand Down
16 changes: 13 additions & 3 deletions backend/curation/api/v1/curation/collections/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,11 +195,20 @@ def reshape_datasets_for_curation_api(
as_version: bool = False,
is_published: bool = False,
) -> List[dict]:
business_logic = get_business_logic()
active_datasets = []
dataset_version_ids = []
dataset_versions = []
for dv in datasets:
dataset_version = get_business_logic().get_dataset_version(dv) if isinstance(dv, DatasetVersionId) else dv
if isinstance(dv, DatasetVersion):
dataset_versions.append(dv)
else:
dataset_version_ids.append(dv)
if dataset_version_ids:
dataset_versions.extend(business_logic.database_provider.get_dataset_versions_by_id(dataset_version_ids))
for dv in dataset_versions:
reshaped_dataset = reshape_dataset_for_curation_api(
dataset_version, use_canonical_url, preview, as_canonical=not as_version, is_published=is_published
dv, use_canonical_url, preview, as_canonical=not as_version, is_published=is_published
)
active_datasets.append(reshaped_dataset)
return active_datasets
Expand Down Expand Up @@ -275,7 +284,8 @@ def reshape_dataset_for_curation_datasets_index_api(
"""
Create the response shape for the curation datasets index API response. Handles shape for both public and private
requests.
:param visibility: the requested visibility of the datasets to be included in the dataset index response; either PUBLIC or PRIVATE.
:param visibility: the requested visibility of the datasets to be included in the dataset index response; either
PUBLIC or PRIVATE.
:param collection_version: the collection version of the dataset to be included in the API response.
:param dataset_version: a dataset version to be included in the API response.
:return: A dictionary shaped for inclusion in the datasets index API response.
Expand Down
Loading

0 comments on commit bd32b39

Please sign in to comment.