From 36d3904d88b98771430d020406dc6384c39990ad Mon Sep 17 00:00:00 2001 From: Nayib Gloria <55710092+nayib-jose-gloria@users.noreply.github.com> Date: Tue, 2 Jul 2024 14:17:32 -0400 Subject: [PATCH 01/15] fix: update raw h5ad when editing dataset title (#7250) --- .../processing/dataset_metadata_update.py | 180 +++++++++++++----- .../test_dataset_metadata_update.py | 94 ++++++++- 2 files changed, 221 insertions(+), 53 deletions(-) diff --git a/backend/layers/processing/dataset_metadata_update.py b/backend/layers/processing/dataset_metadata_update.py index 803759d14db0a..129594f69a63e 100644 --- a/backend/layers/processing/dataset_metadata_update.py +++ b/backend/layers/processing/dataset_metadata_update.py @@ -20,6 +20,7 @@ DatasetConversionStatus, DatasetProcessingStatus, DatasetStatusKey, + DatasetUploadStatus, DatasetValidationStatus, DatasetVersion, DatasetVersionId, @@ -39,6 +40,7 @@ # maps artifact name for metadata field to DB field name, if different ARTIFACT_TO_DB_FIELD = {"title": "name"} +FIELDS_IN_RAW_H5AD = ["title"] class DatasetMetadataUpdaterWorker(ProcessDownload): @@ -56,6 +58,40 @@ def __init__(self, artifact_bucket: str, datasets_bucket: str) -> None: self.artifact_bucket = artifact_bucket self.datasets_bucket = datasets_bucket + def update_raw_h5ad( + self, + raw_h5ad_uri: str, + new_key_prefix: str, + new_dataset_version_id: DatasetVersionId, + metadata_update: DatasetArtifactMetadataUpdate, + ): + raw_h5ad_filename = self.download_from_source_uri( + source_uri=raw_h5ad_uri, + local_path=CorporaConstants.ORIGINAL_H5AD_ARTIFACT_FILENAME, + ) + try: + adata = scanpy.read_h5ad(raw_h5ad_filename) + for key, val in metadata_update.as_dict_without_none_values().items(): + if key in adata.uns: + adata.uns[key] = val + + adata.write(raw_h5ad_filename, compression="gzip") + + self.update_processing_status( + new_dataset_version_id, DatasetStatusKey.UPLOAD, DatasetUploadStatus.UPLOADING + ) + self.create_artifact( + raw_h5ad_filename, + DatasetArtifactType.RAW_H5AD, + new_key_prefix, + new_dataset_version_id, + self.artifact_bucket, + DatasetStatusKey.H5AD, + ) + self.update_processing_status(new_dataset_version_id, DatasetStatusKey.UPLOAD, DatasetUploadStatus.UPLOADED) + finally: + os.remove(raw_h5ad_filename) + def update_h5ad( self, h5ad_uri: str, @@ -68,33 +104,36 @@ def update_h5ad( source_uri=h5ad_uri, local_path=CorporaConstants.LABELED_H5AD_ARTIFACT_FILENAME, ) - - adata = scanpy.read_h5ad(h5ad_filename) - metadata = current_dataset_version.metadata - # maps artifact name for metadata field to DB field name, if different - for key, val in metadata_update.as_dict_without_none_values().items(): - adata.uns[key] = val - - db_field = ARTIFACT_TO_DB_FIELD.get(key) if key in ARTIFACT_TO_DB_FIELD else key - setattr(metadata, db_field, val) - - adata.write(h5ad_filename, compression="gzip") - self.business_logic.set_dataset_metadata(new_dataset_version_id, metadata) - - self.create_artifact( - h5ad_filename, - DatasetArtifactType.H5AD, - new_key_prefix, - new_dataset_version_id, - self.artifact_bucket, - DatasetStatusKey.H5AD, - datasets_bucket=self.datasets_bucket, - ) - os.remove(h5ad_filename) - self.update_processing_status( - new_dataset_version_id, DatasetStatusKey.VALIDATION, DatasetValidationStatus.VALID - ) - self.update_processing_status(new_dataset_version_id, DatasetStatusKey.H5AD, DatasetConversionStatus.CONVERTED) + try: + adata = scanpy.read_h5ad(h5ad_filename) + metadata = current_dataset_version.metadata + # maps artifact name for metadata field to DB field name, if different + for key, val in metadata_update.as_dict_without_none_values().items(): + adata.uns[key] = val + + db_field = ARTIFACT_TO_DB_FIELD.get(key) if key in ARTIFACT_TO_DB_FIELD else key + setattr(metadata, db_field, val) + + adata.write(h5ad_filename, compression="gzip") + self.business_logic.set_dataset_metadata(new_dataset_version_id, metadata) + + self.create_artifact( + h5ad_filename, + DatasetArtifactType.H5AD, + new_key_prefix, + new_dataset_version_id, + self.artifact_bucket, + DatasetStatusKey.H5AD, + datasets_bucket=self.datasets_bucket, + ) + self.update_processing_status( + new_dataset_version_id, DatasetStatusKey.VALIDATION, DatasetValidationStatus.VALID + ) + self.update_processing_status( + new_dataset_version_id, DatasetStatusKey.H5AD, DatasetConversionStatus.CONVERTED + ) + finally: + os.remove(h5ad_filename) def update_rds( self, @@ -107,29 +146,36 @@ def update_rds( source_uri=rds_uri, local_path=CorporaConstants.LABELED_RDS_ARTIFACT_FILENAME, ) - self.update_processing_status(new_dataset_version_id, DatasetStatusKey.RDS, DatasetConversionStatus.CONVERTING) - rds_object = base.readRDS(seurat_filename) + try: + self.update_processing_status( + new_dataset_version_id, DatasetStatusKey.RDS, DatasetConversionStatus.CONVERTING + ) - for key, val in metadata_update.as_dict_without_none_values().items(): - seurat_metadata = seurat.Misc(object=rds_object) - if seurat_metadata.rx2[key]: - val = val if isinstance(val, list) else [val] - seurat_metadata[seurat_metadata.names.index(key)] = StrVector(val) + rds_object = base.readRDS(seurat_filename) - base.saveRDS(rds_object, file=seurat_filename) + for key, val in metadata_update.as_dict_without_none_values().items(): + seurat_metadata = seurat.Misc(object=rds_object) + if seurat_metadata.rx2[key]: + val = val if isinstance(val, list) else [val] + seurat_metadata[seurat_metadata.names.index(key)] = StrVector(val) - self.create_artifact( - seurat_filename, - DatasetArtifactType.RDS, - new_key_prefix, - new_dataset_version_id, - self.artifact_bucket, - DatasetStatusKey.RDS, - datasets_bucket=self.datasets_bucket, - ) - os.remove(seurat_filename) - self.update_processing_status(new_dataset_version_id, DatasetStatusKey.RDS, DatasetConversionStatus.CONVERTED) + base.saveRDS(rds_object, file=seurat_filename) + + self.create_artifact( + seurat_filename, + DatasetArtifactType.RDS, + new_key_prefix, + new_dataset_version_id, + self.artifact_bucket, + DatasetStatusKey.RDS, + datasets_bucket=self.datasets_bucket, + ) + self.update_processing_status( + new_dataset_version_id, DatasetStatusKey.RDS, DatasetConversionStatus.CONVERTED + ) + finally: + os.remove(seurat_filename) def update_cxg( self, @@ -161,6 +207,22 @@ def __init__( self.cellxgene_bucket = cellxgene_bucket self.datasets_bucket = datasets_bucket + @staticmethod + def update_raw_h5ad( + artifact_bucket: str, + datasets_bucket: str, + raw_h5ad_uri: str, + new_key_prefix: str, + new_dataset_version_id: DatasetVersionId, + metadata_update: DatasetArtifactMetadataUpdate, + ): + DatasetMetadataUpdaterWorker(artifact_bucket, datasets_bucket).update_raw_h5ad( + raw_h5ad_uri, + new_key_prefix, + new_dataset_version_id, + metadata_update, + ) + @staticmethod def update_h5ad( artifact_bucket: str, @@ -229,17 +291,33 @@ def update_metadata( ) return + artifact_jobs = [] + new_artifact_key_prefix = self.get_key_prefix(new_dataset_version_id.id) if DatasetArtifactType.RAW_H5AD in artifact_uris: raw_h5ad_uri = artifact_uris[DatasetArtifactType.RAW_H5AD] else: self.logger.error(f"Cannot find raw H5AD artifact uri for {current_dataset_version_id}.") raise ValueError - self.upload_raw_h5ad(new_dataset_version_id, raw_h5ad_uri, self.artifact_bucket) - - new_artifact_key_prefix = self.get_key_prefix(new_dataset_version_id.id) - - artifact_jobs = [] + # Only trigger raw H5AD update if any updated metadata is part of the raw H5AD artifact + if any(getattr(metadata_update, field, None) for field in FIELDS_IN_RAW_H5AD): + self.logger.info("Main: Starting thread for raw h5ad update") + raw_h5ad_job = Process( + target=DatasetMetadataUpdater.update_raw_h5ad, + args=( + self.artifact_bucket, + self.datasets_bucket, + raw_h5ad_uri, + new_artifact_key_prefix, + new_dataset_version_id, + metadata_update, + ), + ) + artifact_jobs.append(raw_h5ad_job) + raw_h5ad_job.start() + else: + self.logger.info("Main: No raw h5ad update required") + self.upload_raw_h5ad(new_dataset_version_id, raw_h5ad_uri, self.artifact_bucket) if DatasetArtifactType.H5AD in artifact_uris: self.logger.info("Main: Starting thread for h5ad update") diff --git a/tests/unit/processing/test_dataset_metadata_update.py b/tests/unit/processing/test_dataset_metadata_update.py index e2c29b5a1fb3d..24b29ad86d705 100644 --- a/tests/unit/processing/test_dataset_metadata_update.py +++ b/tests/unit/processing/test_dataset_metadata_update.py @@ -79,8 +79,12 @@ def test_update_metadata(self, mock_worker_factory, *args): ) mock_worker = mock_worker_factory.return_value self.updater.has_valid_artifact_statuses = Mock(return_value=True) - self.updater.update_metadata(current_dataset_version_id, new_dataset_version_id, None) + self.updater.update_metadata( + current_dataset_version_id, new_dataset_version_id, DatasetArtifactMetadataUpdate(citation="New Citation") + ) + # skip raw_h5ad update since no updated fields are expected fields in raw H5AD + mock_worker.update_raw_h5ad.assert_not_called() mock_worker.update_h5ad.assert_called_once() mock_worker.update_rds.assert_called_once() mock_worker.update_cxg.assert_called_once() @@ -125,8 +129,11 @@ def test_update_metadata__rds_skipped(self, mock_worker_factory, *args): ) mock_worker = mock_worker_factory.return_value self.updater.has_valid_artifact_statuses = Mock(return_value=True) - self.updater.update_metadata(current_dataset_version_id, new_dataset_version_id, None) + self.updater.update_metadata( + current_dataset_version_id, new_dataset_version_id, DatasetArtifactMetadataUpdate(citation="New Citation") + ) + mock_worker.update_raw_h5ad.assert_not_called() mock_worker.update_h5ad.assert_called_once() mock_worker.update_rds.assert_not_called() mock_worker.update_cxg.assert_called_once() @@ -143,6 +150,44 @@ def test_update_metadata__rds_skipped(self, mock_worker_factory, *args): assert self.updater.s3_provider.uri_exists(f"s3://artifact_bucket/{new_dataset_version_id}/raw.h5ad") + @patch("backend.common.utils.dl_sources.uri.downloader") + @patch("scanpy.read_h5ad") + @patch("backend.layers.processing.dataset_metadata_update.S3Provider", Mock(side_effect=MockS3Provider)) + @patch("backend.layers.processing.dataset_metadata_update.DatabaseProvider", Mock(side_effect=DatabaseProviderMock)) + @patch("backend.layers.processing.dataset_metadata_update.DatasetMetadataUpdaterWorker") + def test_update_metadata__raw_h5ad_updated(self, mock_worker_factory, *args): + current_dataset_version = self.generate_dataset( + statuses=[ + DatasetStatusUpdate(status_key=DatasetStatusKey.PROCESSING, status=DatasetProcessingStatus.SUCCESS), + DatasetStatusUpdate(status_key=DatasetStatusKey.RDS, status=DatasetConversionStatus.CONVERTED), + ] + ) + collection_version_id = CollectionVersionId(current_dataset_version.collection_version_id) + current_dataset_version_id = DatasetVersionId(current_dataset_version.dataset_version_id) + new_dataset_version_id, _ = self.business_logic.ingest_dataset( + collection_version_id=collection_version_id, + url=None, + file_size=0, + current_dataset_version_id=current_dataset_version_id, + start_step_function=False, + ) + mock_worker = mock_worker_factory.return_value + self.updater.has_valid_artifact_statuses = Mock(return_value=True) + self.updater.update_metadata( + current_dataset_version_id, new_dataset_version_id, DatasetArtifactMetadataUpdate(title="New Dataset Title") + ) + + mock_worker.update_raw_h5ad.assert_called_once() + mock_worker.update_h5ad.assert_called_once() + mock_worker.update_rds.assert_called_once() + mock_worker.update_cxg.assert_called_once() + + # check that collection version maps to dataset version with updated metadata + collection_version = self.business_logic.get_collection_version(collection_version_id) + new_dataset_version = collection_version.datasets[0] + + assert new_dataset_version.status.processing_status == DatasetProcessingStatus.SUCCESS + def test_update_metadata__current_dataset_version_bad_processing_status(self, *args): current_dataset_version = self.generate_dataset( statuses=[ @@ -350,6 +395,51 @@ def mock_download(source_uri, local_path): self.updater.download_from_source_uri = Mock(side_effect=mock_download) + @patch("backend.common.utils.dl_sources.uri.downloader") + @patch("backend.layers.processing.dataset_metadata_update.os.remove") + @patch("scanpy.read_h5ad") + def test_update_raw_h5ad(self, mock_read_h5ad, *args): + collection_version = self.generate_unpublished_collection(add_datasets=1) + current_dataset_version = collection_version.datasets[0] + new_dataset_version_id, _ = self.business_logic.ingest_dataset( + collection_version_id=collection_version.version_id, + url=None, + file_size=0, + current_dataset_version_id=current_dataset_version.version_id, + start_step_function=False, + ) + key_prefix = new_dataset_version_id.id + metadata_update = DatasetArtifactMetadataUpdate( + citation="Publication DOI www.doi.org/567.8", title="New Dataset Title" + ) + + # Mock anndata object + mock_anndata = Mock(spec=scanpy.AnnData) + mock_anndata.uns = {"title": "Old Dataset Title", "other_metadata": "misc."} + mock_anndata.write = Mock() + mock_read_h5ad.return_value = mock_anndata + + self.updater.update_raw_h5ad(None, key_prefix, new_dataset_version_id, metadata_update) + + local_filename = CorporaConstants.ORIGINAL_H5AD_ARTIFACT_FILENAME + # check mock_anndata object + mock_read_h5ad.assert_called_with(local_filename) + assert "citation" not in mock_anndata.uns + assert mock_anndata.uns["title"] == "New Dataset Title" + assert mock_anndata.uns["other_metadata"] == "misc." + # check s3 uris exist + assert self.updater.s3_provider.uri_exists(f"s3://artifact_bucket/{new_dataset_version_id.id}/{local_filename}") + # check DB DatasetVersion + new_dataset_version = self.business_logic.get_dataset_version(new_dataset_version_id) + artifacts = [(artifact.uri, artifact.type) for artifact in new_dataset_version.artifacts] + assert ( + f"s3://artifact_bucket/{new_dataset_version_id.id}/{local_filename}", + DatasetArtifactType.RAW_H5AD, + ) in artifacts + # check processing status + assert new_dataset_version.status.upload_status == DatasetUploadStatus.UPLOADED + assert new_dataset_version.status.h5ad_status == DatasetConversionStatus.UPLOADED + @patch("backend.common.utils.dl_sources.uri.downloader") @patch("backend.layers.processing.dataset_metadata_update.os.remove") @patch("scanpy.read_h5ad") From f039d56015afbc4a47a6100fc3111974a61c0129 Mon Sep 17 00:00:00 2001 From: Trent Smith <1429913+Bento007@users.noreply.github.com> Date: Tue, 2 Jul 2024 13:06:49 -0700 Subject: [PATCH 02/15] fix: api token expired error (#7251) --- .../curation/api/v1/curation/auth/token.py | 17 ++++++++++++----- .../backend/layers/api/test_curation_api.py | 19 +++++++++++++++++++ 2 files changed, 31 insertions(+), 5 deletions(-) diff --git a/backend/curation/api/v1/curation/auth/token.py b/backend/curation/api/v1/curation/auth/token.py index e970ea6318f83..28ea7d9c5101d 100644 --- a/backend/curation/api/v1/curation/auth/token.py +++ b/backend/curation/api/v1/curation/auth/token.py @@ -1,3 +1,4 @@ +import requests from flask import make_response, request from jose import JWTError @@ -14,9 +15,15 @@ def post(): token_info = api_key.verify(user_api_key, config.api_key_secret) except JWTError: raise UnauthorizedError(detail="The API key is invalid") from None - else: - identity = auth0_management_session.get_user_api_key_identity(token_info["sub"]) - if not identity: - raise NotFoundHTTPException(detail="The API key is no longer valid.") + identity = auth0_management_session.get_user_api_key_identity(token_info["sub"]) + if not identity: + raise NotFoundHTTPException(detail="The API key is no longer valid.") + try: token = auth0_management_session.generate_access_token(identity["profileData"]["email"], user_api_key) - return make_response(token, 201) + except requests.exceptions.HTTPError as error: + if error.response.status_code == 403: + raise UnauthorizedError( + detail="This API key is old. use the latest API key or generate a new one." + ) from error + raise error + return make_response(token, 201) diff --git a/tests/unit/backend/layers/api/test_curation_api.py b/tests/unit/backend/layers/api/test_curation_api.py index aa3929ddb575b..44e219d4812ef 100644 --- a/tests/unit/backend/layers/api/test_curation_api.py +++ b/tests/unit/backend/layers/api/test_curation_api.py @@ -6,6 +6,8 @@ from dataclasses import asdict from unittest.mock import Mock, patch +from requests import HTTPError, Response + from backend.common.providers.crossref_provider import CrossrefDOINotFoundException from backend.common.utils.api_key import generate from backend.curation.api.v1.curation.collections.common import EntityColumns @@ -2814,3 +2816,20 @@ def test__post_token__404(self, auth0_management_session, CorporaAuthConfig): user_api_key = generate(test_user_id, test_secret) response = self.app.post("/curation/v1/auth/token", headers={"x-api-key": user_api_key}) self.assertEqual(404, response.status_code) + + @patch("backend.curation.api.v1.curation.auth.token.CorporaAuthConfig") + @patch("backend.curation.api.v1.curation.auth.token.auth0_management_session") + def test__post_token__401_old_token(self, auth0_management_session, CorporaAuthConfig): + """The old token fails to authenticate""" + test_secret = "password1234" + test_email = "user@email.com" + test_user_id = "test_user_id" + test_response = Response() + test_response.status_code = 403 + test_error = HTTPError(response=test_response) + CorporaAuthConfig().api_key_secret = test_secret + auth0_management_session.get_user_api_key_identity = Mock(return_value={"profileData": {"email": test_email}}) + auth0_management_session.generate_access_token = Mock(side_effect=test_error) + user_api_key = generate(test_user_id, test_secret) + response = self.app.post("/curation/v1/auth/token", headers={"x-api-key": user_api_key}) + self.assertEqual(401, response.status_code) From 410370c21b22a2a674103bd1dedda44ec1b91381 Mon Sep 17 00:00:00 2001 From: Trent Smith <1429913+Bento007@users.noreply.github.com> Date: Wed, 3 Jul 2024 07:40:11 -0700 Subject: [PATCH 03/15] fix: account for tombstoned datasets when getting all versions of a collection (#7252) --- backend/layers/persistence/persistence.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/backend/layers/persistence/persistence.py b/backend/layers/persistence/persistence.py index 02ef4ea03d41a..7a81c01b30032 100644 --- a/backend/layers/persistence/persistence.py +++ b/backend/layers/persistence/persistence.py @@ -370,16 +370,22 @@ def get_all_versions_for_collection( Retrieves all versions for a specific collections, without filtering """ with self._manage_session() as session: - version_rows = session.query(CollectionVersionTable).filter_by(collection_id=collection_id.id).all() + collection_version_rows = ( + session.query(CollectionVersionTable).filter_by(collection_id=collection_id.id).all() + ) canonical_collection = self.get_canonical_collection(collection_id) versions = [] - dataset_version_ids = [DatasetVersionId(str(_id)) for vr in version_rows for _id in vr.datasets] + dataset_version_ids = [ + DatasetVersionId(str(_id)) for cvr in collection_version_rows for _id in cvr.datasets + ] datasets = { str(dv.version_id): dv for dv in self.get_dataset_versions_by_id(dataset_version_ids, get_tombstoned=get_tombstoned) } - for row in version_rows: - ds = [datasets[str(id)] for id in row.datasets] + for row in collection_version_rows: + # filter out datasets that were not returned by get_dataset_versions_by_id + actual_datasets = filter(lambda dv_id: str(dv_id) in datasets, row.datasets) + ds = [datasets[str(dv_id)] for dv_id in actual_datasets] version = self._row_to_collection_version_with_datasets(row, canonical_collection, ds) versions.append(version) return versions From fb52bf2c53b7c656ff1c15d1b2869dc737372af5 Mon Sep 17 00:00:00 2001 From: Trent Smith <1429913+Bento007@users.noreply.github.com> Date: Wed, 3 Jul 2024 09:42:56 -0700 Subject: [PATCH 04/15] chore: cleanup backend dependencies (#7003) --- .github/dependabot.yml | 13 +++++++++++++ python_dependencies/backend/requirements.txt | 3 --- python_dependencies/common/requirements-dev.txt | 2 ++ python_dependencies/wmg_processing/requirements.txt | 3 --- 4 files changed, 15 insertions(+), 6 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index fe2d03e3dbdb1..c0dd41a9e4386 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -15,6 +15,19 @@ updates: interval: "daily" assignees: - "Bento007" + groups: + database: + patterns: + - "alembic" + - "sqlalchemy" + - "psycopg2" + server: + patterns: + - "flask" + - "gunicorn" + - "setproctitle" + - "connexion" + - package-ecosystem: pip directory: "/python_dependencies/wmg/" open-pull-requests-limit: 3 diff --git a/python_dependencies/backend/requirements.txt b/python_dependencies/backend/requirements.txt index 070c7ed38a086..f4ad7ee4d8e85 100644 --- a/python_dependencies/backend/requirements.txt +++ b/python_dependencies/backend/requirements.txt @@ -13,11 +13,8 @@ ddtrace>=2.1.4, <3 Flask>=2.2.3, <3 Flask-Cors>=3.0.6 flask-server-timing>=0.1.2 -furl>=2.1.2, <3 gunicorn[gevent]==22.0.0 -jsonschema>=3.2.0, <4 matplotlib>=3.6.3, <3.7 # 3.7.0 isn't compatible with scanpy: https://github.com/scverse/scanpy/issues/2411 -moto>=5.0.0 numba==0.59.1 # required for where's my gene numpy==1.23.5 # required for where's my gene pandas==1.5.3 # required for where's my gene diff --git a/python_dependencies/common/requirements-dev.txt b/python_dependencies/common/requirements-dev.txt index 1ffdd9abc3533..7c96d38d2d484 100644 --- a/python_dependencies/common/requirements-dev.txt +++ b/python_dependencies/common/requirements-dev.txt @@ -1,6 +1,8 @@ allure-pytest<3 click coverage +furl>=2.1.2, <3 +moto>=5.0.0 parameterized pytest pytest-mock diff --git a/python_dependencies/wmg_processing/requirements.txt b/python_dependencies/wmg_processing/requirements.txt index 0c2a432ce2938..59c7e6decb176 100644 --- a/python_dependencies/wmg_processing/requirements.txt +++ b/python_dependencies/wmg_processing/requirements.txt @@ -6,9 +6,6 @@ cellxgene-census>=1.10.0 # WMG pipeline always reads the latest version of Censu cellxgene-ontology-guide==0.8.0 dataclasses-json==0.5.7 ddtrace==2.1.4 -furl==2.1.3 -jsonschema==4.18.4 -moto>=5.0.0 numba>=0.58.0 numpy==1.23.5 openai==0.27.7 From aa6815eca8b464f2cc414f904c630d0ed45dbb86 Mon Sep 17 00:00:00 2001 From: Trent Smith <1429913+Bento007@users.noreply.github.com> Date: Wed, 3 Jul 2024 16:29:40 -0700 Subject: [PATCH 05/15] fix(testing): avoid port number collisions (#7232) --- docker-compose.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 82899da2cad8e..486ed046a4ae8 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -362,7 +362,7 @@ services: depends_on: - localstack ports: - - "5000:5000" + - "5001:5000" environment: - PYTHONUNBUFFERED=1 - CORPORA_LOCAL_DEV=true @@ -378,10 +378,11 @@ services: volumes: # Mount dirs in a layout suitable for running tests. - .:/single-cell-data-portal + - ./oauth/pkcs12:/tmp/pkcs12:ro networks: corporanet: aliases: - - backend.corporanet.local + - backend-de.corporanet.local backend-wmg: image: "${DOCKER_REPO}corpora-backend-wmg" @@ -405,7 +406,7 @@ services: depends_on: - localstack ports: - - "5000:5000" + - "5001:5000" environment: - PYTHONUNBUFFERED=1 - CORPORA_LOCAL_DEV=true @@ -421,10 +422,11 @@ services: volumes: # Mount dirs in a layout suitable for running tests. - .:/single-cell-data-portal + - ./oauth/pkcs12:/tmp/pkcs12:ro networks: corporanet: aliases: - - backend.corporanet.local + - backend-wmg.corporanet.local oidc: image: soluto/oidc-server-mock:0.3.0 From e688b68088dc03e0fb5058d386dc39956f1f8afa Mon Sep 17 00:00:00 2001 From: Trent Smith <1429913+Bento007@users.noreply.github.com> Date: Mon, 8 Jul 2024 09:37:13 -0700 Subject: [PATCH 06/15] feat(cxg_admin): add command to generate a schema migration report from execution_id (#7164) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Reason for Change - Generate a report similar to the one posted to slack using the cxg_admin cli tool. This will aid in getting early feedback about a in progress schema-migration. ## Changes - add dry_run option to SchemaMigrate.report to generate a report with out reporting to slack or deleting file in s3. - ådd schema-migrate group in cxg_admin script to group together commands that are related to schema migration - add generate-report command under schema-migrate in the cxg_admin cli tool. This will generated a report and save it to a file given a Sfn Execution ID. - make SchemaMigrate.schema_version a property so it is lazy loaded. This is so the cxg_admin cli tool does not need the cellxgene_schema_cli tool to work. ## Testing steps - updated unit tests for SchemaMigrate.report - tested the command on an active migration in dev ## Notes for Reviewer --- backend/layers/processing/schema_migration.py | 46 ++++++++++++------- scripts/cxg_admin.py | 29 +++++++++++- scripts/cxg_admin_scripts/schema_migration.py | 11 +++++ .../test_collection_migrate.py | 10 ++-- .../schema_migration/test_report.py | 11 ++--- 5 files changed, 76 insertions(+), 31 deletions(-) diff --git a/backend/layers/processing/schema_migration.py b/backend/layers/processing/schema_migration.py index edfddc8dcb89d..e0544638a2cd8 100644 --- a/backend/layers/processing/schema_migration.py +++ b/backend/layers/processing/schema_migration.py @@ -31,12 +31,18 @@ def __init__(self, business_logic: BusinessLogic, schema_validator: SchemaValida self.schema_validator = schema_validator self.business_logic = business_logic self.s3_provider = business_logic.s3_provider # For compatiblity with ProcessingLogic - self.artifact_bucket = os.environ.get("ARTIFACT_BUCKET", "test-bucket") + self.artifact_bucket = os.environ.get("ARTIFACT_BUCKET", "artifact-bucket") self.execution_id = os.environ.get("EXECUTION_ID", "test-execution-arn") self.logger = logging.getLogger("processing") self.local_path: str = "." # Used for testing self.limit_migration = os.environ.get("LIMIT_MIGRATION", 0) # Run a small migration for testing - self.schema_version = schema_validator.get_current_schema_version() + self._schema_version = None + + @property + def schema_version(self): + if not self._schema_version: + self._schema_version = self.schema_validator.get_current_schema_version() + return self._schema_version def fetch_collections(self) -> Iterable[CollectionVersion]: published_collections = [*self.business_logic.get_collections(CollectionQueryFilter(is_published=True))] @@ -309,37 +315,45 @@ def wrapper(*args, **kwargs): return wrapper - def report(self) -> str: + def report(self, artifact_bucket=None, execution_id=None, dry_run=True) -> dict: + """ + Generate a report of the schema migration process. This function will download all the error and migration + :param artifact_bucket: The bucket where the schema migration artifacts are stored. + :param execution_id: the execution id of the AWS SFN schema migration in progress. + :param dry_run: If dry_run is True, then a report will be returned without deleting any s3 assets or report to + slack. + :return: a json report of the schema migration process + """ + artifact_bucket = artifact_bucket or self.artifact_bucket + execution_id = execution_id or self.execution_id + try: report = dict(errors=[], migrate_changes=[]) def retrieve_report_files_from_s3(message_type: str): s3_keys = list( self.s3_provider.list_directory( - self.artifact_bucket, - self.get_key_prefix(f"schema_migration/{self.execution_id}/report/{message_type}"), + artifact_bucket, + self.get_key_prefix(f"schema_migration/{execution_id}/report/{message_type}"), ) ) self.logger.info("Subdirectory Count", extra={"message_type": message_type, "count": len(s3_keys)}) for s3_key in s3_keys: local_file = os.path.join(self.local_path, "data.json") - self.s3_provider.download_file(self.artifact_bucket, s3_key, local_file) + self.s3_provider.download_file(artifact_bucket, s3_key, local_file) with open(local_file, "r") as f: jn = json.load(f) report[message_type].append(jn) - # Cleanup S3 files - self.s3_provider.delete_files(self.artifact_bucket, s3_keys) retrieve_report_files_from_s3("errors") retrieve_report_files_from_s3("migrate_changes") - self.logger.info("Report", extra=report) - report_str = json.dumps(report, indent=4, sort_keys=True, cls=CustomJSONEncoder) - report_message = f"Schema migration results ({os.environ['DEPLOYMENT_STAGE']} env)" - self._upload_to_slack("schema_migration_report.json", report_str, report_message) - # Cleanup leftover schema migration files - self.s3_provider.delete_prefix( - self.artifact_bucket, self.get_key_prefix(f"schema_migration/{self.execution_id}") - ) + if not dry_run: + self.logger.info("Report", extra=report) + report_str = json.dumps(report, indent=4, sort_keys=True, cls=CustomJSONEncoder) + report_message = f"Schema migration results ({os.environ['DEPLOYMENT_STAGE']} env)" + self._upload_to_slack("schema_migration_report.json", report_str, report_message) + # Cleanup leftover schema migration files + self.s3_provider.delete_prefix(artifact_bucket, self.get_key_prefix(f"schema_migration/{execution_id}")) return report except Exception as e: diff --git a/scripts/cxg_admin.py b/scripts/cxg_admin.py index c1e11f6b12aa1..ea0a545539e5e 100755 --- a/scripts/cxg_admin.py +++ b/scripts/cxg_admin.py @@ -287,18 +287,42 @@ def get_public_datasets(ctx): print(json.dumps(published_datasets, indent=2)) -@cli.command() +@cli.group("schema-migration") +@click.pass_context +def schema_migration_cli(ctx): + """ + Commands for schema migration + """ + deployment = ctx.obj["deployment"] + happy_env = "stage" if deployment == "staging" else ctx.obj["deployment"] + happy_config = json.loads(AwsSecret(f"happy/env-{happy_env}-config").value) + os.environ["ARTIFACT_BUCKET"] = happy_config["s3_buckets"]["artifact"]["name"] + + +@schema_migration_cli.command() @click.pass_context @click.argument("report_path", type=click.Path(exists=True)) def rollback_datasets(ctx, report_path: str): """ Used to rollback a datasets to a previous version. - ./scripts/cxg_admin.py --deployment dev rollback-dataset report.json + ./scripts/cxg_admin.py schema-migration --deployment dev rollback-dataset report.json """ schema_migration.rollback_dataset(ctx, report_path) + +@schema_migration_cli.command() +@click.pass_context +@click.argument("execution_id") +@click.argument("output_path", type=click.Path(writable=True), default=".") +def generate_report(ctx, execution_id: str, output_path: str): + """ + Generates a report for the schema migration process. + ./scripts/cxg_admin.py --deployment dev schema-migration generate-report execution_id + """ + schema_migration.generate_report(ctx, execution_id, output_path, os.environ["ARTIFACT_BUCKET"]) + @cli.command() @click.pass_context @click.argument("request_id") @@ -312,5 +336,6 @@ def get_request_logs(ctx, request_id: str, hours: int): print(json.dumps(request_logs.get(request_id, hours, ctx.obj["deployment"], ctx.obj["stackname"]), indent=4)) + if __name__ == "__main__": cli(obj={}) diff --git a/scripts/cxg_admin_scripts/schema_migration.py b/scripts/cxg_admin_scripts/schema_migration.py index ca5120d3ae266..89c92cfb578c5 100644 --- a/scripts/cxg_admin_scripts/schema_migration.py +++ b/scripts/cxg_admin_scripts/schema_migration.py @@ -1,7 +1,9 @@ import json from pathlib import Path +from backend.common.utils.json import CustomJSONEncoder from backend.layers.common.entities import CollectionVersionId, DatasetId +from backend.layers.processing.schema_migration import SchemaMigrate def rollback_dataset(ctx, report_path: Path): @@ -14,3 +16,12 @@ def rollback_dataset(ctx, report_path: Path): ctx.obj["business_logic"].restore_previous_dataset_version( CollectionVersionId(collection_version_id), DatasetId(dataset_id) ) + + +def generate_report(ctx, execution_id: str, report_path: str, artifact_bucket: str): + schema_migration = SchemaMigrate(ctx.obj["business_logic"], None) + report = schema_migration.report(execution_id=execution_id, artifact_bucket=artifact_bucket, dry_run=True) + report_file = Path(report_path).joinpath(f"{ctx.obj['deployment']}-{execution_id}.json") + with open(report_file, "w") as f: + json.dump(report, f, indent=4, sort_keys=True, cls=CustomJSONEncoder) + print(f"Report saved to {report_file}") diff --git a/tests/unit/processing/schema_migration/test_collection_migrate.py b/tests/unit/processing/schema_migration/test_collection_migrate.py index ed45ad3e5da08..c0106121637d3 100644 --- a/tests/unit/processing/schema_migration/test_collection_migrate.py +++ b/tests/unit/processing/schema_migration/test_collection_migrate.py @@ -7,7 +7,7 @@ class TestCollectionMigrate: def test_migrate_published_collection(self, schema_migrate_and_collections): schema_migrate, collections = schema_migrate_and_collections schema_migrate._store_sfn_response = Mock(wraps=schema_migrate._store_sfn_response) - schema_migrate.schema_version = "0.0.0" + schema_migrate._schema_version = "0.0.0" published = collections["published"][0] collection_version_id = CollectionVersionId() schema_migrate.business_logic.create_collection_version.return_value = Mock(version_id=collection_version_id) @@ -44,7 +44,7 @@ def test_migrate_published_collection(self, schema_migrate_and_collections): def test_migrate_private_collection(self, schema_migrate_and_collections): schema_migrate, collections = schema_migrate_and_collections schema_migrate._store_sfn_response = Mock(wraps=schema_migrate._store_sfn_response) - schema_migrate.schema_version = "0.0.0" + schema_migrate._schema_version = "0.0.0" private = collections["private"][0] datasets = [ { @@ -145,7 +145,7 @@ def test_no_datasets(self, schema_migrate_and_collections): def test_create_migration_revision__private(self, schema_migrate_and_collections): schema_migrate, collections = schema_migrate_and_collections schema_migrate._store_sfn_response = Mock(wraps=schema_migrate._store_sfn_response) - schema_migrate.schema_version = "0.0.0" + schema_migrate._schema_version = "0.0.0" private = collections["private"][0] schema_migrate.business_logic.create_collection_version = Mock( return_value=Mock(version_id=CollectionVersionId()) @@ -158,7 +158,7 @@ def test_create_migration_revision__private(self, schema_migrate_and_collections def test_create_migration_revision__published_with_revision(self, schema_migrate_and_collections): schema_migrate, collections = schema_migrate_and_collections schema_migrate._store_sfn_response = Mock(wraps=schema_migrate._store_sfn_response) - schema_migrate.schema_version = "0.0.0" + schema_migrate._schema_version = "0.0.0" published, revision = collections["revision"] schema_migrate.business_logic.create_collection_version = Mock( return_value=Mock(version_id=CollectionVersionId()) @@ -173,7 +173,7 @@ def test_create_migration_revision__published_with_revision(self, schema_migrate def test_create_migration_revision__published_no_revision(self, schema_migrate_and_collections): schema_migrate, collections = schema_migrate_and_collections schema_migrate._store_sfn_response = Mock(wraps=schema_migrate._store_sfn_response) - schema_migrate.schema_version = "0.0.0" + schema_migrate._schema_version = "0.0.0" published = collections["published"][0] schema_migrate.business_logic.create_collection_version = Mock( diff --git a/tests/unit/processing/schema_migration/test_report.py b/tests/unit/processing/schema_migration/test_report.py index 24d2bf7d6fd26..d044af20ee2ef 100644 --- a/tests/unit/processing/schema_migration/test_report.py +++ b/tests/unit/processing/schema_migration/test_report.py @@ -1,5 +1,4 @@ import json -from unittest.mock import call def mock_download_file(bucket: str, key: str, local_path: str): @@ -22,14 +21,10 @@ def test_report(schema_migrate_and_collections, tmpdir): schema_migrate.business_logic.s3_provider.list_directory = mock_list_directory schema_migrate._upload_to_slack = lambda *args: None schema_migrate.local_path = str(tmpdir) - assert schema_migrate.report() == { + assert schema_migrate.report(dry_run=False) == { "errors": ["files_0.json", "files_1.json", "files_2.json"], "migrate_changes": ["dataset_0_changes.json", "dataset_1_changes.json"], } - assert schema_migrate.s3_provider.delete_files.call_count == 2 - schema_migrate.s3_provider.delete_files.assert_has_calls( - [ - call(schema_migrate.artifact_bucket, ["files_0.json", "files_1.json", "files_2.json"]), - call(schema_migrate.artifact_bucket, ["dataset_0_changes.json", "dataset_1_changes.json"]), - ] + schema_migrate.s3_provider.delete_prefix.assert_called_once_with( + schema_migrate.artifact_bucket, f"schema_migration/{schema_migrate.execution_id}" ) From a4003aac64825af3bfe57c2e23805fcf9a76db52 Mon Sep 17 00:00:00 2001 From: Trent Smith <1429913+Bento007@users.noreply.github.com> Date: Mon, 8 Jul 2024 11:52:10 -0700 Subject: [PATCH 07/15] fix: lint (#7262) --- scripts/cxg_admin.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/scripts/cxg_admin.py b/scripts/cxg_admin.py index ea0a545539e5e..4b118ffb3bb91 100755 --- a/scripts/cxg_admin.py +++ b/scripts/cxg_admin.py @@ -310,7 +310,7 @@ def rollback_datasets(ctx, report_path: str): """ schema_migration.rollback_dataset(ctx, report_path) - + @schema_migration_cli.command() @click.pass_context @click.argument("execution_id") @@ -322,7 +322,7 @@ def generate_report(ctx, execution_id: str, output_path: str): """ schema_migration.generate_report(ctx, execution_id, output_path, os.environ["ARTIFACT_BUCKET"]) - + @cli.command() @click.pass_context @click.argument("request_id") @@ -336,6 +336,5 @@ def get_request_logs(ctx, request_id: str, hours: int): print(json.dumps(request_logs.get(request_id, hours, ctx.obj["deployment"], ctx.obj["stackname"]), indent=4)) - if __name__ == "__main__": cli(obj={}) From 1399eeb9a6df0639def81a8110f07eeaaf64b653 Mon Sep 17 00:00:00 2001 From: Mim Hastie Date: Mon, 8 Jul 2024 12:10:02 -0700 Subject: [PATCH 08/15] feat: update dataset title Curation endpoint (#7233) Co-authored-by: Trent Smith <1429913+Bento007@users.noreply.github.com> --- backend/curation/api/curation-api.yml | 37 +++++ .../datasets/dataset_id/actions.py | 34 ++++ .../backend/layers/api/test_curation_api.py | 151 ++++++++++++++++++ 3 files changed, 222 insertions(+) diff --git a/backend/curation/api/curation-api.yml b/backend/curation/api/curation-api.yml index ad618ee536285..19a9547022a7f 100644 --- a/backend/curation/api/curation-api.yml +++ b/backend/curation/api/curation-api.yml @@ -472,6 +472,43 @@ paths: $ref: "#/components/responses/404" "413": $ref: "#/components/responses/413" + patch: + tags: + - Collection + - Dataset + summary: Update a Dataset's metadata. + security: + - curatorAccess: [] + description: >- + Update a Dataset's title and title of corresponding dataset artifacts. + operationId: backend.curation.api.v1.curation.collections.collection_id.datasets.dataset_id.actions.patch + parameters: + - $ref: "#/components/parameters/path_collection_id" + - $ref: "#/components/parameters/path_dataset_id" + requestBody: + content: + application/json: + schema: + type: object + properties: + title: + type: string + description: The title of the Dataset. + responses: + "202": + $ref: "#/components/responses/202" + "400": + $ref: "#/components/responses/400" + "401": + $ref: "#/components/responses/401" + "403": + $ref: "#/components/responses/403" + "404": + $ref: "#/components/responses/404" + "405": + $ref: "#/components/responses/405" + "410": + $ref: "#/components/responses/410" /v1/collections/{collection_id}/s3-upload-credentials: get: diff --git a/backend/curation/api/v1/curation/collections/collection_id/datasets/dataset_id/actions.py b/backend/curation/api/v1/curation/collections/collection_id/datasets/dataset_id/actions.py index 565421bdc32f2..7af6951d489af 100644 --- a/backend/curation/api/v1/curation/collections/collection_id/datasets/dataset_id/actions.py +++ b/backend/curation/api/v1/curation/collections/collection_id/datasets/dataset_id/actions.py @@ -25,10 +25,12 @@ DatasetIsPrivateException, DatasetIsTombstonedException, DatasetNotFoundException, + InvalidMetadataException, InvalidURIException, ) from backend.layers.common.entities import ( CollectionVersionWithDatasets, + DatasetArtifactMetadataUpdate, DatasetId, DatasetVersion, ) @@ -141,3 +143,35 @@ def put(collection_id: str, dataset_id: str, body: dict, token_info: dict): "the submission has finished processing." ) from None # End of duplicate block + + +def patch(collection_id: str, dataset_id: str, body: dict, token_info: dict): + """ + Update a dataset's metadata. + """ + + # Find collection and dataset. + collection_version, dataset_version = _get_collection_and_dataset(collection_id, dataset_id) + + # Confirm user has permission to update dataset. + if not UserInfo(token_info).is_user_owner_or_allowed(collection_version.owner): + raise ForbiddenHTTPException() + + # Create payload and attempt update. + payload = DatasetArtifactMetadataUpdate(body.get("title")) + try: + get_business_logic().update_dataset_artifact_metadata( + collection_version.version_id, dataset_version.version_id, payload + ) + except InvalidMetadataException as ex: + raise InvalidParametersHTTPException(ext=dict(invalid_parameters=ex.errors)) from None + except CollectionNotFoundException: + raise NotFoundHTTPException() from None + except CollectionIsPublishedException: + raise ForbiddenHTTPException() from None + except DatasetInWrongStatusException: + raise MethodNotAllowedException( + detail="Dataset cannot be updated if processing status is not SUCCESS." + ) from None + + return Response(status=202) diff --git a/tests/unit/backend/layers/api/test_curation_api.py b/tests/unit/backend/layers/api/test_curation_api.py index 44e219d4812ef..a26f16eecfa0c 100644 --- a/tests/unit/backend/layers/api/test_curation_api.py +++ b/tests/unit/backend/layers/api/test_curation_api.py @@ -2303,6 +2303,157 @@ def test_get_datasets_by_schema_200(self): self.assertEqual(400, response.status_code) +class TestPatchDataset(BaseAPIPortalTest): + def test_patch_revision_dataset_owner_202(self): + # Generate revision of public collection. + dataset = self.generate_dataset( + statuses=[ + DatasetStatusUpdate(DatasetStatusKey.PROCESSING, DatasetProcessingStatus.SUCCESS), + DatasetStatusUpdate(DatasetStatusKey.UPLOAD, DatasetUploadStatus.UPLOADED), + ], + publish=True, + ) + revision = self.generate_revision(CollectionId(dataset.collection_id)) + + # Attempt to update dataset title. + response = self.app.patch( + f"/curation/v1/collections/{revision.version_id.id}/datasets/{dataset.dataset_id}", + data=json.dumps({"title": "new title"}), + headers=self.make_owner_header(), + ) + + # Confirm update was successful. + self.assertEqual(202, response.status_code) + + def test_patch_private_collection_dataset_owner_202(self): + # Generate private collection and dataset. + dataset = self.generate_dataset( + statuses=[ + DatasetStatusUpdate(DatasetStatusKey.PROCESSING, DatasetProcessingStatus.SUCCESS), + DatasetStatusUpdate(DatasetStatusKey.UPLOAD, DatasetUploadStatus.UPLOADED), + ], + ) + + # Attempt to update dataset title. + response = self.app.patch( + f"/curation/v1/collections/{dataset.collection_id}/datasets/{dataset.dataset_id}", + data=json.dumps({"title": "new title"}), + headers=self.make_owner_header(), + ) + + # Confirm update was successful. + self.assertEqual(202, response.status_code) + + def test_patch_private_collection_dataset_super_202(self): + # Generate private collection and dataset. + dataset = self.generate_dataset( + statuses=[ + DatasetStatusUpdate(DatasetStatusKey.PROCESSING, DatasetProcessingStatus.SUCCESS), + DatasetStatusUpdate(DatasetStatusKey.UPLOAD, DatasetUploadStatus.UPLOADED), + ], + ) + + # Attempt to update dataset title as super curator. + response = self.app.patch( + f"/curation/v1/collections/{dataset.collection_id}/datasets/{dataset.dataset_id}", + data=json.dumps({"title": "new title"}), + headers=self.make_super_curator_header(), + ) + + # Confirm update was successful. + self.assertEqual(202, response.status_code) + + def test_patch_private_collection_dataset_no_auth_401(self): + # Generate private collection and dataset. + dataset = self.generate_dataset( + statuses=[ + DatasetStatusUpdate(DatasetStatusKey.PROCESSING, DatasetProcessingStatus.SUCCESS), + DatasetStatusUpdate(DatasetStatusKey.UPLOAD, DatasetUploadStatus.UPLOADED), + ], + ) + + # Attempt to update dataset title without auth. + response = self.app.patch( + f"/curation/v1/collections/{dataset.collection_id}/datasets/{dataset.dataset_id}", + data=json.dumps({"title": "new title"}), + ) + + # Confirm forbidden response. + self.assertEqual(401, response.status_code) + + def test_patch_private_collection_dataset_not_authorized_403(self): + # Generate private collection and dataset. + dataset = self.generate_dataset( + statuses=[ + DatasetStatusUpdate(DatasetStatusKey.PROCESSING, DatasetProcessingStatus.SUCCESS), + DatasetStatusUpdate(DatasetStatusKey.UPLOAD, DatasetUploadStatus.UPLOADED), + ], + ) + + # Attempt to update dataset title as a non-owner. + response = self.app.patch( + f"/curation/v1/collections/{dataset.collection_id}/datasets/{dataset.dataset_id}", + data=json.dumps({"title": "new title"}), + headers=self.make_not_owner_header(), + ) + + # Confirm forbidden response. + self.assertEqual(403, response.status_code) + + def test_patch_public_collection_dataset_not_authorized_403(self): + # Generate public collection and dataset. + dataset = self.generate_dataset( + statuses=[ + DatasetStatusUpdate(DatasetStatusKey.PROCESSING, DatasetProcessingStatus.SUCCESS), + DatasetStatusUpdate(DatasetStatusKey.UPLOAD, DatasetUploadStatus.UPLOADED), + ], + publish=True, + ) + + # Attempt to update dataset title. + response = self.app.patch( + f"/curation/v1/collections/{dataset.collection_id}/datasets/{dataset.dataset_id}", + data=json.dumps({"title": "new title"}), + headers=self.make_owner_header(), + ) + + # Confirm forbidden response. + self.assertEqual(403, response.status_code) + + def test_patch_private_collection_dataset_invalid_collection_404(self): + # Generate private collection and dataset. + dataset = self.generate_dataset( + statuses=[ + DatasetStatusUpdate(DatasetStatusKey.PROCESSING, DatasetProcessingStatus.SUCCESS), + DatasetStatusUpdate(DatasetStatusKey.UPLOAD, DatasetUploadStatus.UPLOADED), + ], + ) + + # Attempt to update dataset title with an invalid collection ID. + response = self.app.patch( + f"/curation/v1/collections/{str(uuid.uuid4())}/datasets/{dataset.dataset_id}", + data=json.dumps({"title": "new title"}), + headers=self.make_owner_header(), + ) + + # Confirm not found response. + self.assertEqual(404, response.status_code) + + def test_patch_private_collection_dataset_invalid_status_405(self): + # Generate private collection and a dataset with a non-SUCCESS processing status. + dataset = self.generate_dataset() + + # Attempt to update dataset title. + response = self.app.patch( + f"/curation/v1/collections/{dataset.collection_id}/datasets/{dataset.dataset_id}", + data=json.dumps({"title": "new title"}), + headers=self.make_owner_header(), + ) + + # Confirm not allowed response. + self.assertEqual(405, response.status_code) + + class TestGetDatasetVersion(BaseAPIPortalTest): def test_get_dataset_version_ok(self): collection = self.generate_published_collection() From 4e9215fa659e167a438f2b629695409b9903d256 Mon Sep 17 00:00:00 2001 From: atarashansky Date: Mon, 8 Jul 2024 12:25:59 -0700 Subject: [PATCH 09/15] chore(de+cellguide): add share url feature to DE and add link button to cellguide (#7259) --- backend/de/api/v1.py | 16 +- .../components/Description/index.tsx | 40 ++++- .../components/Description/style.ts | 6 + .../components/CellGuideCard/index.tsx | 2 + .../components/Main/connect.ts | 154 ++++++++++++++++++ .../differentialExpression.test.ts | 38 +++++ 6 files changed, 248 insertions(+), 8 deletions(-) diff --git a/backend/de/api/v1.py b/backend/de/api/v1.py index 21a6e2eda459e..eab8fc70d586f 100644 --- a/backend/de/api/v1.py +++ b/backend/de/api/v1.py @@ -13,7 +13,7 @@ from backend.common.census_cube.data.query import CensusCubeQuery from backend.common.census_cube.data.schemas.cube_schema_diffexp import cell_counts_logical_dims_exclude_dataset_id from backend.common.census_cube.data.snapshot import CensusCubeSnapshot, load_snapshot -from backend.common.census_cube.utils import descendants +from backend.common.census_cube.utils import ancestors, descendants from backend.common.marker_genes.marker_gene_files.blacklist import marker_gene_blacklist from backend.de.api.config import ( CENSUS_CUBE_API_FORCE_LOAD_SNAPSHOT_ID, @@ -36,6 +36,12 @@ def filters(): with ServerTiming.time("calculate filters and build response"): q = CensusCubeQuery(snapshot, cube_query_params=None) + + if criteria.cell_type_ontology_term_ids: + criteria.cell_type_ontology_term_ids = list( + set(sum([descendants(i) for i in criteria.cell_type_ontology_term_ids], [])) + ) + response_filter_dims_values = build_filter_dims_values(criteria, snapshot, q) n_cells = _get_cell_counts_for_query(q, criteria) @@ -137,7 +143,9 @@ def build_filter_dims_values(criteria: BaseQueryCriteria, snapshot: CensusCubeSn dims["self_reported_ethnicity_ontology_term_id"] ), publication_citations=dims["publication_citation"], - cell_type_terms=build_ontology_term_id_label_mapping(dims["cell_type_ontology_term_id"]), + cell_type_terms=build_ontology_term_id_label_mapping( + set(sum([ancestors(i) for i in dims["cell_type_ontology_term_id"]], [])) + ), tissue_terms=build_ontology_term_id_label_mapping(dims["tissue_ontology_term_id"]), organism_terms=build_ontology_term_id_label_mapping(dims["organism_ontology_term_id"]), ) @@ -311,10 +319,6 @@ def run_differential_expression( def _get_cell_counts_for_query(q: CensusCubeQuery, criteria: BaseQueryCriteria) -> pd.DataFrame: - if criteria.cell_type_ontology_term_ids: - criteria.cell_type_ontology_term_ids = list( - set(sum([descendants(i) for i in criteria.cell_type_ontology_term_ids], [])) - ) cell_counts = q.cell_counts_diffexp_df(criteria) return int(cell_counts["n_total_cells"].sum()) diff --git a/frontend/src/views/CellGuide/components/CellGuideCard/components/Description/index.tsx b/frontend/src/views/CellGuide/components/CellGuideCard/components/Description/index.tsx index 1b82b58a97597..2d82f3d295eac 100644 --- a/frontend/src/views/CellGuide/components/CellGuideCard/components/Description/index.tsx +++ b/frontend/src/views/CellGuide/components/CellGuideCard/components/Description/index.tsx @@ -1,5 +1,11 @@ -import React, { Dispatch, SetStateAction, useEffect, useState } from "react"; -import { TooltipProps } from "@czi-sds/components"; +import React, { + Dispatch, + SetStateAction, + useEffect, + useMemo, + useState, +} from "react"; +import { Icon, TooltipProps } from "@czi-sds/components"; import { CellGuideCardDescription, ChatGptTooltipSubtext, @@ -18,6 +24,7 @@ import { StyledTag, ReferencesWrapper, ValidatedInlineWrapper, + StyledLinkLabel, } from "./style"; import { Label } from "src/components/Synonyms/style"; @@ -25,6 +32,7 @@ import { useGptDescription, useCellTypeMetadata, useValidatedDescription, + ORGANISM_NAME_TO_TAXON_ID_MAPPING, } from "src/common/queries/cellGuide"; import ValidatedIcon from "src/common/images/validated.svg"; import Link from "../common/Link"; @@ -42,6 +50,7 @@ import { } from "src/views/CellGuide/components/CellGuideCard/components/Description/constants"; import { useIsComponentPastBreakpointHeight } from "../common/hooks/useIsComponentPastBreakpoint"; import { StyledQuestionMarkIcon } from "src/common/style"; +import { ROUTES } from "src/common/constants/routes"; // TODO(SVGR) ADD BACK HOVER BRIGHTNESS CHANGE @@ -66,8 +75,12 @@ interface DescriptionProps { >; inSideBar?: boolean; synonyms?: string[]; + selectedOrganism?: string; + selectedOrganId?: string; } export default function Description({ + selectedOrganism, + selectedOrganId, cellTypeId, cellTypeName, skinnyMode, @@ -90,6 +103,18 @@ export default function Description({ DESCRIPTION_BREAKPOINT_HEIGHT_PX ); + const shareUrlForDE = useMemo(() => { + if (!selectedOrganism) { + return ""; + } + const organism = ORGANISM_NAME_TO_TAXON_ID_MAPPING[ + selectedOrganism as keyof typeof ORGANISM_NAME_TO_TAXON_ID_MAPPING + ].replace("_", ":"); + const tissueSuffix = + selectedOrganId == "" ? "" : `&tissues=${selectedOrganId}`; + return `${ROUTES.DE}?organism=${organism}&celltypes=${cellTypeId}${tissueSuffix}`; + }, [selectedOrganId, selectedOrganism, cellTypeId]); + useEffect(() => { if (isPastBreakpoint) { setDescriptionMaxHeight(DESCRIPTION_BREAKPOINT_HEIGHT_PX); @@ -322,6 +347,17 @@ export default function Description({ })} )} + {shareUrlForDE !== "" && ( + + Open in Differential Expression + + + } + /> + )} ); const validatedDescriptionComponent = ( diff --git a/frontend/src/views/CellGuide/components/CellGuideCard/components/Description/style.ts b/frontend/src/views/CellGuide/components/CellGuideCard/components/Description/style.ts index 078bf4d4babc9..24b5f0a1dfa89 100644 --- a/frontend/src/views/CellGuide/components/CellGuideCard/components/Description/style.ts +++ b/frontend/src/views/CellGuide/components/CellGuideCard/components/Description/style.ts @@ -157,3 +157,9 @@ export const ReferencesWrapper = styled.div` height: 36px; align-items: center; `; + +export const StyledLinkLabel = styled.div` + display: flex; + align-items: center; + gap: 4px; +`; diff --git a/frontend/src/views/CellGuide/components/CellGuideCard/index.tsx b/frontend/src/views/CellGuide/components/CellGuideCard/index.tsx index 387d858c441a7..374a04c336131 100644 --- a/frontend/src/views/CellGuide/components/CellGuideCard/index.tsx +++ b/frontend/src/views/CellGuide/components/CellGuideCard/index.tsx @@ -359,6 +359,8 @@ export default function CellGuideCard({ NotFoundError: Failed to execute 'insertBefore' on 'Node' */} { + // These flags control a state machine that ensures proper initialization of the DE state + // from the URL query parameters. First organism is initialized, then the query group is initialized. + const [organismInitializedFromShareURL, setOrganismInitializedFromShareURL] = + useState(false); + const [ + queryGroupInitializedFromShareURL, + setQueryGroupInitializedFromShareURL, + ] = useState(false); + const [isLoading, setIsLoading] = useState(false); const [isLoadingGetDeQuery, setIsLoadingGetDeQuery] = useState(false); @@ -58,6 +76,142 @@ export const useConnect = () => { const { n_cells: nCellsGroup2, isLoading: isLoadingGroup2 } = useProcessedQueryGroupFilterDimensions(queryGroup2); + const router = useRouter(); + const { data: availableOrganisms, isLoading: isLoadingOrganisms } = + useAvailableOrganisms(); + + const { data: allFilterOptions, isLoading: isLoadingFilterOptions } = + useQueryGroupFilterDimensions(EMPTY_FILTERS); + + // We have a state machine that ensures the below effects are only triggered on page load. + // First select the organism and set organismInitializedFromShareURL to true. + useEffect(() => { + if (!dispatch || isLoadingOrganisms || organismInitializedFromShareURL) + return; + const { search } = window.location; + const params = new URLSearchParams(search); + const organism: string | null = params.get("organism"); + if (!organism) return; + + const isOrganismValid = availableOrganisms.some( + (org) => org.id === organism + ); + if (isOrganismValid) { + dispatch(selectOrganism(organism)); + } + setOrganismInitializedFromShareURL(true); + }, [ + dispatch, + isLoadingOrganisms, + availableOrganisms, + organismInitializedFromShareURL, + ]); + + // Now, we parse the query groups from the URL and set queryGroupInitializedFromShareURL to true. + // We only want to do this if the organism has been initialized from the URL since the valid filter options + // will change based on the organism. + useEffect(() => { + if ( + !dispatch || + !organismInitializedFromShareURL || + isLoadingFilterOptions || + queryGroupInitializedFromShareURL + ) + return; + + const { search } = window.location; + const params = new URLSearchParams(search); + + const paramsToRemove: string[] = []; + + const getParamsAndRemove = (paramName: string) => { + const paramValues = params.get(paramName)?.split(","); + if (paramValues) { + paramsToRemove.push(paramName); + } + return paramValues; + }; + + const filterOptions = ( + paramValues: string[], + filterTerms: { id: string; name: string }[] + ) => { + return filterTerms.filter((term) => paramValues.includes(term.id)); + }; + + const cellTypes = getParamsAndRemove("celltypes"); + const diseases = getParamsAndRemove("diseases"); + const ethnicities = getParamsAndRemove("ethnicities"); + const publications = getParamsAndRemove("publications"); + const sexes = getParamsAndRemove("sexes"); + const tissues = getParamsAndRemove("tissues"); + const organism: string | null = params.get("organism"); + + if (organism) { + paramsToRemove.push("organism"); + } + + if (tissues) { + const tissuesFiltered = filterOptions( + tissues, + allFilterOptions.tissue_terms + ); + dispatch(selectQueryGroup1Filters("tissues", tissuesFiltered)); + } + + if (cellTypes) { + const cellTypesFiltered = filterOptions( + cellTypes, + allFilterOptions.cell_type_terms + ); + dispatch(selectQueryGroup1Filters("cellTypes", cellTypesFiltered)); + } + if (diseases) { + const diseasesFiltered = filterOptions( + diseases, + allFilterOptions.disease_terms + ); + dispatch(selectQueryGroup1Filters("diseases", diseasesFiltered)); + } + if (ethnicities) { + const ethnicitiesFiltered = filterOptions( + ethnicities, + allFilterOptions.self_reported_ethnicity_terms + ); + dispatch(selectQueryGroup1Filters("ethnicities", ethnicitiesFiltered)); + } + + if (publications) { + const publicationsFiltered = allFilterOptions.publication_citations + .filter((publication) => publications.includes(publication)) + .map((publication) => ({ + id: publication, + name: publication, + })); + dispatch( + selectQueryGroup1Filters("publicationCitations", publicationsFiltered) + ); + } + if (sexes) { + const sexesFiltered = filterOptions(sexes, allFilterOptions.sex_terms); + dispatch(selectQueryGroup1Filters("sexes", sexesFiltered)); + } + removeParams({ + params: paramsToRemove, + router: router, + }); + setQueryGroupInitializedFromShareURL(true); + }, [ + router, + allFilterOptions, + isLoadingFilterOptions, + dispatch, + organismInitializedFromShareURL, + setOrganismInitializedFromShareURL, + queryGroupInitializedFromShareURL, + setQueryGroupInitializedFromShareURL, + ]); + return { isLoading, setIsLoading, diff --git a/frontend/tests/features/differentialExpression/differentialExpression.test.ts b/frontend/tests/features/differentialExpression/differentialExpression.test.ts index 14f1e41a2e0d3..c02ed30a3e41d 100644 --- a/frontend/tests/features/differentialExpression/differentialExpression.test.ts +++ b/frontend/tests/features/differentialExpression/differentialExpression.test.ts @@ -472,6 +472,44 @@ describe("Differential Expression", () => { // Ensure "Find Genes" button is disabled again await expect(findGenesButton).toBeDisabled(); }); + + test("Differential Expression With Query Parameters", async ({ page }) => { + await goToPage( + `${TEST_URL}${ROUTES.DE}?organism=NCBITaxon:10090&celltypes=CL:0000622&tissues=UBERON:0001264`, + page + ); + await waitForFiltersEndpoint(page); + + // Ensure the organism dropdown is set to "Mus musculus" + const organismDropdown = page.getByTestId( + DIFFERENTIAL_EXPRESSION_ORGANISM_DROPDOWN + ); + await expect(organismDropdown).toHaveText("Mus musculus"); + + const cellTypeFilterAutocompleteGroup1 = page + .getByTestId(DIFFERENTIAL_EXPRESSION_CELL_GROUP_1_FILTER) + .getByTestId( + `${DIFFERENTIAL_EXPRESSION_FILTER_AUTOCOMPLETE_PREFIX}Cell Type` + ); + + const tissueFilterAutocompleteGroup1 = page + .getByTestId(DIFFERENTIAL_EXPRESSION_CELL_GROUP_1_FILTER) + .getByTestId( + `${DIFFERENTIAL_EXPRESSION_FILTER_AUTOCOMPLETE_PREFIX}Tissue` + ); + + // Ensure the filters are applied correctly + await expect( + cellTypeFilterAutocompleteGroup1.getByTestId( + DIFFERENTIAL_EXPRESSION_FILTER_TAG_PRIMARY + ) + ).toHaveText("acinar cell"); + await expect( + tissueFilterAutocompleteGroup1.getByTestId( + DIFFERENTIAL_EXPRESSION_FILTER_TAG_PRIMARY + ) + ).toHaveText("pancreas"); + }); }); describe("Results", () => { From 5954bec5c48ccc3ed74be84fff91fdbde7fcd081 Mon Sep 17 00:00:00 2001 From: atarashansky Date: Mon, 8 Jul 2024 12:26:18 -0700 Subject: [PATCH 10/15] chore(de): remove unavailable tags from query group boxes (#7257) --- .../common/store/actions.ts | 22 ++++++++- .../common/store/reducer.ts | 40 +++++++++++++++ .../components/QueryGroupTags/index.tsx | 40 +++++++-------- .../DifferentialExpressionResults/connect.ts | 8 ++- .../DifferentialExpressionResults/index.tsx | 13 ++--- .../Main/components/DeResults/utils.ts | 1 + .../Filters/components/CopyButton/connect.ts | 2 +- .../components/FilterDropdown/connect.ts | 31 +++++++++++- .../components/FilterDropdown/index.tsx | 9 ++-- .../components/FilterDropdown/style.ts | 2 +- .../components/FilterDropdown/types.ts | 7 ++- .../components/FilterDropdown/utils.ts | 2 +- .../Main/components/Filters/connect.ts | 3 +- .../Main/components/Filters/index.tsx | 2 + .../Main/components/Filters/types.ts | 6 --- .../common/query_group_filter_dimensions.ts | 6 ++- .../differentialExpression.test.ts | 49 +++++++++++++++++-- 17 files changed, 187 insertions(+), 56 deletions(-) diff --git a/frontend/src/views/DifferentialExpression/common/store/actions.ts b/frontend/src/views/DifferentialExpression/common/store/actions.ts index 3b3660283eaa4..0847b89651b7e 100644 --- a/frontend/src/views/DifferentialExpression/common/store/actions.ts +++ b/frontend/src/views/DifferentialExpression/common/store/actions.ts @@ -1,4 +1,4 @@ -import { QueryGroup, REDUCERS, State } from "./reducer"; +import { FilterOption, QueryGroup, REDUCERS, State } from "./reducer"; export function selectOrganism( organismId: State["organismId"] @@ -9,6 +9,26 @@ export function selectOrganism( }; } +export function setSelectedOptionsGroup1( + key: keyof QueryGroup, + options: FilterOption[] +): GetActionTypeOfReducer<(typeof REDUCERS)["setSelectedOptionsGroup1"]> { + return { + payload: { key, options }, + type: "setSelectedOptionsGroup1", + }; +} + +export function setSelectedOptionsGroup2( + key: keyof QueryGroup, + options: FilterOption[] +): GetActionTypeOfReducer<(typeof REDUCERS)["setSelectedOptionsGroup2"]> { + return { + payload: { key, options }, + type: "setSelectedOptionsGroup2", + }; +} + export function selectQueryGroup1Filters( key: keyof QueryGroup, options: { id: string; name: string }[] diff --git a/frontend/src/views/DifferentialExpression/common/store/reducer.ts b/frontend/src/views/DifferentialExpression/common/store/reducer.ts index 060188cae95aa..23966177eb19f 100644 --- a/frontend/src/views/DifferentialExpression/common/store/reducer.ts +++ b/frontend/src/views/DifferentialExpression/common/store/reducer.ts @@ -5,6 +5,12 @@ export interface PayloadAction { payload: Payload; } +export interface FilterOption { + name: string; + id: string; + unavailable?: boolean; +} + export interface QueryGroup { developmentStages: string[]; diseases: string[]; @@ -29,6 +35,12 @@ export interface State { submittedQueryGroupsWithNames: QueryGroupsWithNames | null; snapshotId: string | null; excludeOverlappingCells: ExcludeOverlappingCells; + selectedOptionsGroup1: { + [key in keyof QueryGroup]: FilterOption[]; + }; + selectedOptionsGroup2: { + [key in keyof QueryGroup]: FilterOption[]; + }; } export const EMPTY_FILTERS = { @@ -52,6 +64,8 @@ export const INITIAL_STATE: State = { }, submittedQueryGroups: null, submittedQueryGroupsWithNames: null, + selectedOptionsGroup1: EMPTY_FILTERS, + selectedOptionsGroup2: EMPTY_FILTERS, }; export const REDUCERS = { @@ -64,6 +78,8 @@ export const REDUCERS = { submitQueryGroups, clearSubmittedQueryGroups, setExcludeOverlappingCells, + setSelectedOptionsGroup1, + setSelectedOptionsGroup2, }; function setSnapshotId( @@ -102,6 +118,30 @@ function selectOrganism( }; } +function setSelectedOptionsGroup1( + state: State, + action: PayloadAction<{ key: keyof QueryGroup; options: FilterOption[] }> +): State { + const { key, options } = action.payload; + + return { + ...state, + selectedOptionsGroup1: { ...state.selectedOptionsGroup1, [key]: options }, + }; +} + +function setSelectedOptionsGroup2( + state: State, + action: PayloadAction<{ key: keyof QueryGroup; options: FilterOption[] }> +): State { + const { key, options } = action.payload; + + return { + ...state, + selectedOptionsGroup2: { ...state.selectedOptionsGroup2, [key]: options }, + }; +} + function selectQueryGroup1Filters( state: State, action: PayloadAction<{ diff --git a/frontend/src/views/DifferentialExpression/components/Main/components/DeResults/components/DifferentialExpressionResults/components/QueryGroupTags/index.tsx b/frontend/src/views/DifferentialExpression/components/Main/components/DeResults/components/DifferentialExpressionResults/components/QueryGroupTags/index.tsx index d5b8c422dce47..849311ba6e0e4 100644 --- a/frontend/src/views/DifferentialExpression/components/Main/components/DeResults/components/DifferentialExpressionResults/components/QueryGroupTags/index.tsx +++ b/frontend/src/views/DifferentialExpression/components/Main/components/DeResults/components/DifferentialExpressionResults/components/QueryGroupTags/index.tsx @@ -1,7 +1,7 @@ import { useMemo } from "react"; import { QueryGroup, - QueryGroups, + State, } from "src/views/DifferentialExpression/common/store/reducer"; import { Tooltip } from "@czi-sds/components"; import { StyledTag } from "./style"; @@ -11,47 +11,41 @@ import { NO_ORGAN_ID } from "src/views/CellGuide/components/CellGuideCard/compon import Link from "src/views/CellGuide/components/CellGuideCard/components/common/Link"; const QueryGroupTags = ({ - queryGroups, - queryGroupsWithNames, - isQueryGroup1, + selectedOptions, }: { - queryGroups: QueryGroups; - queryGroupsWithNames: QueryGroups; - isQueryGroup1?: boolean; + selectedOptions: State["selectedOptionsGroup1"]; }) => { - const queryGroup = isQueryGroup1 - ? queryGroupsWithNames.queryGroup1 - : queryGroupsWithNames.queryGroup2; - const queryGroupIds = isQueryGroup1 - ? queryGroups.queryGroup1 - : queryGroups.queryGroup2; const nonEmptyQueryGroupKeys = useMemo(() => { - return Object.keys(queryGroup).filter( - (key) => queryGroup[key as keyof QueryGroup].length > 0 + return Object.keys(selectedOptions).filter( + (key) => selectedOptions[key as keyof QueryGroup].length > 0 ); - }, [queryGroup]); + }, [selectedOptions]); return ( <> {nonEmptyQueryGroupKeys.map((key) => { const queryGroupKey = key as keyof QueryGroup; - const selected = queryGroup[queryGroupKey]; - const selectedId = queryGroupIds[queryGroupKey]; + const selected = selectedOptions[queryGroupKey].filter( + (option) => !option.unavailable + ); + const suffix = QUERY_GROUP_KEY_TO_TAG_SUFFIX_MAP[queryGroupKey]; const label = - selected.length > 1 ? `${selected.length} ${suffix}` : selected[0]; + selected.length > 1 + ? `${selected.length} ${suffix}` + : selected[0].name; const getValue = (index: number) => { return key === "cellTypes" ? ( ) : ( - selected[index] + selected[index].name ); }; const clickToViewText = "Click to view in CellGuide"; @@ -81,7 +75,7 @@ const QueryGroupTags = ({ { - const { excludeOverlappingCells } = useContext(StateContext); + const { + excludeOverlappingCells, + selectedOptionsGroup1, + selectedOptionsGroup2, + } = useContext(StateContext); const [page, setPage] = useState(1); const { n_cells: nCellsGroup1 } = useProcessedQueryGroupFilterDimensions( @@ -119,5 +123,7 @@ export const useConnect = ({ datasets2.length !== 1 ? "s" : "" }`, showOverlappingCellsCallout: excludeOverlappingCells === "retainBoth", + selectedOptionsGroup1, + selectedOptionsGroup2, }; }; diff --git a/frontend/src/views/DifferentialExpression/components/Main/components/DeResults/components/DifferentialExpressionResults/index.tsx b/frontend/src/views/DifferentialExpression/components/Main/components/DeResults/components/DifferentialExpressionResults/index.tsx index a354312f5f942..cfb16d093f809 100644 --- a/frontend/src/views/DifferentialExpression/components/Main/components/DeResults/components/DifferentialExpressionResults/index.tsx +++ b/frontend/src/views/DifferentialExpression/components/Main/components/DeResults/components/DifferentialExpressionResults/index.tsx @@ -68,6 +68,8 @@ const DifferentialExpressionResults = ({ numDatasetsText1, numDatasetsText2, showOverlappingCellsCallout, + selectedOptionsGroup1, + selectedOptionsGroup2, } = useConnect({ queryGroups, queryGroupsWithNames, @@ -214,11 +216,7 @@ const DifferentialExpressionResults = ({ - + @@ -258,10 +256,7 @@ const DifferentialExpressionResults = ({ - + {!!errorMessage && ( diff --git a/frontend/src/views/DifferentialExpression/components/Main/components/DeResults/utils.ts b/frontend/src/views/DifferentialExpression/components/Main/components/DeResults/utils.ts index ab660fef43553..d79378a5e1f6a 100644 --- a/frontend/src/views/DifferentialExpression/components/Main/components/DeResults/utils.ts +++ b/frontend/src/views/DifferentialExpression/components/Main/components/DeResults/utils.ts @@ -1,6 +1,7 @@ const parseExpressions = (expression: string) => { const expressions = expression .split(",") + .map((expr) => expr.replace(/\s+/g, "")) .map((expr) => { // This regex matches an expression with an optional comparison operator // (<, <=, >, >=) followed by an optional absolute value indicator (|), diff --git a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/CopyButton/connect.ts b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/CopyButton/connect.ts index 2c696ae9ba248..ae3531ec98b8e 100644 --- a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/CopyButton/connect.ts +++ b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/CopyButton/connect.ts @@ -8,7 +8,7 @@ import { } from "src/views/DifferentialExpression/common/store"; import { selectQueryGroup2Filters } from "src/views/DifferentialExpression/common/store/actions"; import { QUERY_GROUP_KEY_TO_FILTER_DIMENSION_MAP } from "../../../common/constants"; -import { FilterOption } from "../../types"; +import { FilterOption } from "src/views/DifferentialExpression/common/store/reducer"; import { track } from "src/common/analytics"; import { EVENTS } from "src/common/analytics/events"; import { Props } from "./types"; diff --git a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/connect.ts b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/connect.ts index a8b44d8f3a7e6..3e52bc34f07c1 100644 --- a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/connect.ts +++ b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/connect.ts @@ -1,16 +1,27 @@ -import { useMemo, useState } from "react"; +import { DispatchContext } from "src/views/DifferentialExpression/common/store"; +import { useContext, useMemo, useState } from "react"; import { Props } from "./types"; +import { + setSelectedOptionsGroup1, + setSelectedOptionsGroup2, +} from "src/views/DifferentialExpression/common/store/actions"; export const useConnect = ({ options, allAvailableOptions, selectedOptionIds, + queryGroupKey, + isQueryGroup1, }: { options: Props["options"]; selectedOptionIds: Props["selectedOptionIds"]; allAvailableOptions: Props["allAvailableOptions"]; + queryGroupKey: Props["queryGroupKey"]; + isQueryGroup1: Props["isQueryGroup1"]; }) => { + const dispatch = useContext(DispatchContext); + const [previousSelectedOptions, setPreviousSelectedOptions] = useState< Props["options"] >([]); @@ -33,8 +44,24 @@ export const useConnect = ({ (a, b) => selectedOptionIds.indexOf(a.id) - selectedOptionIds.indexOf(b.id) ); + if (dispatch) { + if (isQueryGroup1) { + dispatch(setSelectedOptionsGroup1(queryGroupKey, newOptions)); + } else { + dispatch(setSelectedOptionsGroup2(queryGroupKey, newOptions)); + } + } + return newOptions; - }, [options, allAvailableOptions, selectedOptionIds]); + }, [ + options, + allAvailableOptions, + selectedOptionIds, + queryGroupKey, + isQueryGroup1, + dispatch, + ]); + return { selectedOptions, previousSelectedOptions, diff --git a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/index.tsx b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/index.tsx index 95c08721d065f..1e79e3bcc76fb 100644 --- a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/index.tsx +++ b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/index.tsx @@ -1,6 +1,5 @@ import React from "react"; - -import { FilterOption } from "../../types"; +import { Icon } from "@czi-sds/components"; import { CloseIcon, PrimaryTag, @@ -14,10 +13,10 @@ import { DIFFERENTIAL_EXPRESSION_FILTER_TAG_GRAY, DIFFERENTIAL_EXPRESSION_FILTER_TAG_PRIMARY, } from "src/views/DifferentialExpression/common/constants"; +import { FilterOption } from "src/views/DifferentialExpression/common/store/reducer"; import { sortOptions } from "./utils"; import { useConnect } from "./connect"; import { Props } from "./types"; -import { Icon } from "@czi-sds/components"; function FilterDropdown({ options, @@ -25,6 +24,8 @@ function FilterDropdown({ allAvailableOptions, selectedOptionIds, handleChange, + isQueryGroup1, + queryGroupKey, }: Props): JSX.Element { const { selectedOptions, @@ -34,6 +35,8 @@ function FilterDropdown({ options, allAvailableOptions, selectedOptionIds, + queryGroupKey, + isQueryGroup1, }); return ( diff --git a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/style.ts b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/style.ts index 39ce173241f4a..973a756646be3 100644 --- a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/style.ts +++ b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/style.ts @@ -8,7 +8,7 @@ import { gray500, primary400, } from "src/common/theme"; -import { FilterOption } from "../../types"; +import { FilterOption } from "src/views/DifferentialExpression/common/store/reducer"; import { formControlClasses } from "@mui/material/FormControl"; import { inputBaseClasses } from "@mui/material/InputBase"; import { formLabelClasses } from "@mui/material/FormLabel"; diff --git a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/types.ts b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/types.ts index e499cd08352db..2e207ff504425 100644 --- a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/types.ts +++ b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/types.ts @@ -1,4 +1,7 @@ -import { FilterOption } from "../../types"; +import { + FilterOption, + QueryGroup, +} from "src/views/DifferentialExpression/common/store/reducer"; export interface Props { label: string; @@ -6,4 +9,6 @@ export interface Props { allAvailableOptions: FilterOption[]; selectedOptionIds: string[]; handleChange: (options: FilterOption[]) => void; + isQueryGroup1: boolean; + queryGroupKey: keyof QueryGroup; } diff --git a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/utils.ts b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/utils.ts index c27aa711e00e1..c4bcc07be4681 100644 --- a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/utils.ts +++ b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/components/FilterDropdown/utils.ts @@ -1,4 +1,4 @@ -import { FilterOption } from "../../types"; +import { FilterOption } from "src/views/DifferentialExpression/common/store/reducer"; import { FilterOptionsState } from "@mui/material"; export function sortOptions( diff --git a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/connect.ts b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/connect.ts index 54811822b484d..cf3da83367ace 100644 --- a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/connect.ts +++ b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/connect.ts @@ -5,13 +5,14 @@ import { DispatchContext } from "src/views/DifferentialExpression/common/store"; import { EMPTY_FILTERS, QueryGroup, + FilterOption, } from "src/views/DifferentialExpression/common/store/reducer"; import { selectQueryGroup1Filters, selectQueryGroup2Filters, } from "src/views/DifferentialExpression/common/store/actions"; -import { FilterOption, Props } from "./types"; +import { Props } from "./types"; import useProcessedQueryGroupFilterDimensions from "../common/query_group_filter_dimensions"; import { QUERY_GROUP_KEYS_TO_FILTER_EVENT_MAP } from "./constants"; diff --git a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/index.tsx b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/index.tsx index cfd41e2f18de2..933c653a7b1d0 100644 --- a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/index.tsx +++ b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/index.tsx @@ -27,11 +27,13 @@ export default memo(function Filters({ filterDropdownComponent: ( ), copyButtonComponent: isQueryGroup1 ? ( diff --git a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/types.ts b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/types.ts index b33d5a5d6e629..8ef01cacc2ac7 100644 --- a/frontend/src/views/DifferentialExpression/components/Main/components/Filters/types.ts +++ b/frontend/src/views/DifferentialExpression/components/Main/components/Filters/types.ts @@ -1,11 +1,5 @@ import { QueryGroup } from "src/views/DifferentialExpression/common/store/reducer"; -export interface FilterOption { - name: string; - id: string; - unavailable?: boolean; -} - export interface Props { queryGroup: QueryGroup; isQueryGroup1: boolean; diff --git a/frontend/src/views/DifferentialExpression/components/Main/components/common/query_group_filter_dimensions.ts b/frontend/src/views/DifferentialExpression/components/Main/components/common/query_group_filter_dimensions.ts index 12fda13072b9e..6749ae5ae75d6 100644 --- a/frontend/src/views/DifferentialExpression/components/Main/components/common/query_group_filter_dimensions.ts +++ b/frontend/src/views/DifferentialExpression/components/Main/components/common/query_group_filter_dimensions.ts @@ -1,8 +1,10 @@ import { useEffect, useState } from "react"; import isEqual from "lodash/isEqual"; import { useQueryGroupFilterDimensions } from "src/common/queries/differentialExpression"; -import { QueryGroup } from "src/views/DifferentialExpression/common/store/reducer"; -import { FilterOption } from "../Filters/types"; +import { + QueryGroup, + FilterOption, +} from "src/views/DifferentialExpression/common/store/reducer"; const EMPTY_FILTERS = { disease_terms: [], diff --git a/frontend/tests/features/differentialExpression/differentialExpression.test.ts b/frontend/tests/features/differentialExpression/differentialExpression.test.ts index c02ed30a3e41d..8ea5fb40ff63c 100644 --- a/frontend/tests/features/differentialExpression/differentialExpression.test.ts +++ b/frontend/tests/features/differentialExpression/differentialExpression.test.ts @@ -514,7 +514,7 @@ describe("Differential Expression", () => { describe("Results", () => { test("All tests", async ({ page }) => { - await runDEQuery(page, false); + await runDEQuery({ page, mode: "default" }); await test.step("Cell Group 1 and 2 contain the correct number of cells and filter tags", async () => { // Check number of cells @@ -779,7 +779,7 @@ describe("Differential Expression", () => { expect(dataRow3.length).toBe(columnNames.length); }); - await runDEQuery(page, true); + await runDEQuery({ page, mode: "test_include_overlapping_cells" }); await test.step("Overlapping cells info callout is visible when overlapping cells are not filtered", async () => { // Ensure the callout is not visible at first await expect( @@ -793,6 +793,15 @@ describe("Differential Expression", () => { .click(); await isElementVisible(page, DIFFERENTIAL_EXPRESSION_RESULTS_CALLOUT); }); + + await runDEQuery({ page, mode: "test_exclude_unavailable_tags" }); + await test.step("Unavailable tags are excluded from results query group boxes", async () => { + const cellGroup1Info = page.getByTestId( + DIFFERENTIAL_EXPRESSION_CELL_GROUP_1_INFO + ); + await expect(cellGroup1Info).toHaveText("plasma cell"); + await expect(cellGroup1Info).not.toHaveText("2 cell types"); + }); }); }); }); @@ -839,7 +848,16 @@ const clickOnAutocompleteDropdownItem = async ( await waitForFiltersEndpoint(autocomplete.page()); }; -const runDEQuery = async (page: Page, includeOverlappingCells = false) => { +const runDEQuery = async ({ + page, + mode = "default", +}: { + page: Page; + mode?: + | "default" + | "test_include_overlapping_cells" + | "test_exclude_unavailable_tags"; +}) => { // Type "lung" in tissue filter for group 1 await page.reload(); @@ -864,7 +882,7 @@ const runDEQuery = async (page: Page, includeOverlappingCells = false) => { cellTypeFilterAutocompleteGroup1, "plasma cell" ); - if (!includeOverlappingCells) { + if (mode !== "test_include_overlapping_cells") { // Type "acinar cell" in cell type filter for group 2 const cellTypeFilterAutocompleteGroup2 = page .getByTestId(DIFFERENTIAL_EXPRESSION_CELL_GROUP_2_FILTER) @@ -876,6 +894,29 @@ const runDEQuery = async (page: Page, includeOverlappingCells = false) => { "acinar cell" ); } + if (mode === "test_exclude_unavailable_tags") { + // Type "acinar cell" in cell type filter for group 1 + const cellTypeFilterAutocompleteGroup1 = page + .getByTestId(DIFFERENTIAL_EXPRESSION_CELL_GROUP_1_FILTER) + .getByTestId( + `${DIFFERENTIAL_EXPRESSION_FILTER_AUTOCOMPLETE_PREFIX}Cell Type` + ); + await clickOnAutocompleteDropdownItem( + cellTypeFilterAutocompleteGroup1, + "acinar cell" + ); + + // "chronic obstructive pulmonary disease" in disease filter for group 1 + const diseaseFilterAutocompleteGroup1 = page + .getByTestId(DIFFERENTIAL_EXPRESSION_CELL_GROUP_1_FILTER) + .getByTestId( + `${DIFFERENTIAL_EXPRESSION_FILTER_AUTOCOMPLETE_PREFIX}Disease` + ); + await clickOnAutocompleteDropdownItem( + diseaseFilterAutocompleteGroup1, + "chronic obstructive pulmonary disease" + ); + } // Hit the "Find Genes" button const findGenesButton = page.getByTestId( DIFFERENTIAL_EXPRESSION_FIND_GENES_BUTTON From 43ea109c5e575698860901902ec802592573c0e9 Mon Sep 17 00:00:00 2001 From: Ronen Date: Mon, 8 Jul 2024 18:01:44 -0400 Subject: [PATCH 11/15] chore: remove mvp related fields from cxg conversion (#7265) --- backend/layers/processing/utils/spatial.py | 1 - tests/unit/processing/test_spatial_assets_utils.py | 5 ----- 2 files changed, 6 deletions(-) diff --git a/backend/layers/processing/utils/spatial.py b/backend/layers/processing/utils/spatial.py index 6e29c73db9aaa..354690ff7df38 100644 --- a/backend/layers/processing/utils/spatial.py +++ b/backend/layers/processing/utils/spatial.py @@ -174,7 +174,6 @@ def filter_spatial_data(self, content, library_id): "width": width, "height": height, }, - "images": {"hires": content["images"]["hires"], "fullres": []}, "scalefactors": { "spot_diameter_fullres": content["scalefactors"]["spot_diameter_fullres"], "tissue_hires_scalef": content["scalefactors"]["tissue_hires_scalef"], diff --git a/tests/unit/processing/test_spatial_assets_utils.py b/tests/unit/processing/test_spatial_assets_utils.py index cc40bb43dface..0345c78af0a04 100644 --- a/tests/unit/processing/test_spatial_assets_utils.py +++ b/tests/unit/processing/test_spatial_assets_utils.py @@ -92,7 +92,6 @@ def test__valid_input_metadata_copy(spatial_processor, valid_spatial_data, libra "width": 20, "height": 20, }, - "images": {"hires": valid_spatial_data["images"]["hires"], "fullres": []}, "scalefactors": { "spot_diameter_fullres": valid_spatial_data["scalefactors"]["spot_diameter_fullres"], "tissue_hires_scalef": valid_spatial_data["scalefactors"]["tissue_hires_scalef"], @@ -321,10 +320,6 @@ def test__convert_uns_to_cxg_group( assert "spatial" in mock_metadata_array.meta spatial_data = pickle.loads(mock_metadata_array.meta["spatial"]) assert "library_id_1" in spatial_data - assert np.array_equal( - spatial_data["library_id_1"]["images"]["hires"], - valid_uns["spatial"]["library_id_1"]["images"]["hires"], - ) assert ( spatial_data["library_id_1"]["scalefactors"]["spot_diameter_fullres"] == valid_uns["spatial"]["library_id_1"]["scalefactors"]["spot_diameter_fullres"] From e66e3dfa1d91335fd500ec0e14a4c533f089537b Mon Sep 17 00:00:00 2001 From: atarashansky Date: Mon, 8 Jul 2024 15:21:21 -0700 Subject: [PATCH 12/15] chore(wmg): add link to DE (#7263) --- frontend/src/common/analytics/events.ts | 2 ++ frontend/src/common/constants/routes.ts | 2 +- .../src/components/GeneInfoSideBar/style.ts | 6 ++++ .../{de.tsx => differential-expression.tsx} | 0 .../components/Description/index.tsx | 6 ++++ .../Main/components/DeResults/connect.ts | 7 +++- .../components/Main/connect.ts | 10 ++++-- .../components/Main/utils.ts | 5 ++- .../components/CellInfoSideBar/connect.ts | 23 +++++++++++-- .../components/CellInfoSideBar/constants.ts | 2 ++ .../components/CellInfoSideBar/index.tsx | 24 +++++++++++--- .../components/ShareButton/utils.tsx | 32 +++++++++++++++++++ 12 files changed, 107 insertions(+), 12 deletions(-) rename frontend/src/pages/{de.tsx => differential-expression.tsx} (100%) diff --git a/frontend/src/common/analytics/events.ts b/frontend/src/common/analytics/events.ts index 52ab5966f1ae7..fd6eb02b8ba37 100644 --- a/frontend/src/common/analytics/events.ts +++ b/frontend/src/common/analytics/events.ts @@ -35,6 +35,7 @@ export enum EVENTS { WMG_CITATION_CLICKED = "WMG_CITATION_CLICKED", WMG_SHARE_CLICKED = "WMG_SHARE_CLICKED", WMG_SHARE_LOADED = "WMG_SHARE_LOADED", + WMG_OPEN_IN_DE_CLICKED = "WMG_OPEN_IN_DE_CLICKED", WMG_DOWNLOAD_CLICKED = "WMG_DOWNLOAD_CLICKED", WMG_DOWNLOAD_COMPLETE = "WMG_DOWNLOAD_COMPLETE", WMG_FMG_INFO_CLICKED = "WMG_FMG_INFO_CLICKED", @@ -111,6 +112,7 @@ export enum EVENTS { CG_CHAT_GPT_HOVER = "CG_CHAT_GPT_HOVER", SUGGEST_CHANGE_CLICKED = "SUGGEST_CHANGE_CLICKED", CG_CANONICAL_TAB_CLICKED = "CG_CANONICAL_TAB_CLICKED", + CG_OPEN_IN_DE_CLICKED = "CG_OPEN_IN_DE_CLICKED", CG_COMPUTATIONAL_TAB_CLICKED = "CG_COMPUTATIONAL_TAB_CLICKED", CG_TREE_NODE_HOVER = "CG_TREE_NODE_HOVER", CG_DESCRIPTION_READ_MORE_CLICKED = "CG_DESCRIPTION_READ_MORE_CLICKED", diff --git a/frontend/src/common/constants/routes.ts b/frontend/src/common/constants/routes.ts index b325c58a13f62..44812440aadcb 100644 --- a/frontend/src/common/constants/routes.ts +++ b/frontend/src/common/constants/routes.ts @@ -15,7 +15,7 @@ export enum ROUTES { WMG_DOCS_ORDERING = "/docs/04__Analyze%20Public%20Data/4_2__Gene%20Expression%20Documentation/4_2_2__Cell%20Type%20and%20Gene%20Ordering", WMG_DOCS_DATA_PROCESSING = "/docs/04__Analyze%20Public%20Data/4_2__Gene%20Expression%20Documentation/4_2_3__Gene%20Expression%20Data%20Processing", SITEMAP = "/sitemap", - DE = "/de", + DE = "/differential-expression", CELL_GUIDE = "/cellguide", CELL_GUIDE_CELL_TYPE = "/cellguide/:cellTypeId", CELL_GUIDE_TISSUE = "/cellguide/tissues/:tissueId", diff --git a/frontend/src/components/GeneInfoSideBar/style.ts b/frontend/src/components/GeneInfoSideBar/style.ts index eca27180cf789..985586404bd9b 100644 --- a/frontend/src/components/GeneInfoSideBar/style.ts +++ b/frontend/src/components/GeneInfoSideBar/style.ts @@ -29,6 +29,12 @@ export const Link = styled.a` color: ${primary400}; `; +export const StyledLink = styled(Link)` + display: flex; + align-items: center; + gap: 4px; +`; + export const GeneName = styled.div` ${fontBodyS} font-weight: 500; diff --git a/frontend/src/pages/de.tsx b/frontend/src/pages/differential-expression.tsx similarity index 100% rename from frontend/src/pages/de.tsx rename to frontend/src/pages/differential-expression.tsx diff --git a/frontend/src/views/CellGuide/components/CellGuideCard/components/Description/index.tsx b/frontend/src/views/CellGuide/components/CellGuideCard/components/Description/index.tsx index 2d82f3d295eac..52ebd39746855 100644 --- a/frontend/src/views/CellGuide/components/CellGuideCard/components/Description/index.tsx +++ b/frontend/src/views/CellGuide/components/CellGuideCard/components/Description/index.tsx @@ -356,6 +356,12 @@ export default function Description({ } + onClick={() => { + track(EVENTS.CG_OPEN_IN_DE_CLICKED, { + cell_type: cellTypeId, + tissue: selectedOrganId, + }); + }} /> )} diff --git a/frontend/src/views/DifferentialExpression/components/Main/components/DeResults/connect.ts b/frontend/src/views/DifferentialExpression/components/Main/components/DeResults/connect.ts index 9e394f6d7c253..87fdcc08022d7 100644 --- a/frontend/src/views/DifferentialExpression/components/Main/components/DeResults/connect.ts +++ b/frontend/src/views/DifferentialExpression/components/Main/components/DeResults/connect.ts @@ -32,6 +32,7 @@ export const useConnect = ({ setIsLoading }: Props) => { organismId, submittedQueryGroups: queryGroups, submittedQueryGroupsWithNames: queryGroupsWithNames, + excludeOverlappingCells, } = useContext(StateContext); useEffect(() => { @@ -136,12 +137,16 @@ export const useConnect = ({ setIsLoading }: Props) => { link.click(); document.body.removeChild(link); - track(EVENTS.DE_DOWNLOAD_CLICKED, craftPayloadWithQueryGroups(queryGroups)); + track( + EVENTS.DE_DOWNLOAD_CLICKED, + craftPayloadWithQueryGroups(queryGroups, excludeOverlappingCells) + ); }, [ sortedAndFilteredResults, queryGroups, queryGroupsWithNames, isLoadingRaw, + excludeOverlappingCells, ]); return { diff --git a/frontend/src/views/DifferentialExpression/components/Main/connect.ts b/frontend/src/views/DifferentialExpression/components/Main/connect.ts index 80c40ee7c16ca..658c173e05c33 100644 --- a/frontend/src/views/DifferentialExpression/components/Main/connect.ts +++ b/frontend/src/views/DifferentialExpression/components/Main/connect.ts @@ -42,7 +42,7 @@ export const useConnect = () => { setIsLoadingGetDeQuery(isLoadingGetDeQuery); }, [isLoadingGetDeQuery]); const dispatch = useContext(DispatchContext); - const { queryGroups } = useContext(StateContext); + const { queryGroups, excludeOverlappingCells } = useContext(StateContext); const { queryGroup1, queryGroup2 } = queryGroups; // check if any values in queryGroup1 are not empty @@ -61,7 +61,7 @@ export const useConnect = () => { track( EVENTS.DE_FIND_GENES_CLICKED, - craftPayloadWithQueryGroups(queryGroups) + craftPayloadWithQueryGroups(queryGroups, excludeOverlappingCells) ); }; @@ -91,7 +91,11 @@ export const useConnect = () => { const { search } = window.location; const params = new URLSearchParams(search); const organism: string | null = params.get("organism"); - if (!organism) return; + + if (!organism) { + setOrganismInitializedFromShareURL(true); // homo sapiens is default + return; + } const isOrganismValid = availableOrganisms.some( (org) => org.id === organism diff --git a/frontend/src/views/DifferentialExpression/components/Main/utils.ts b/frontend/src/views/DifferentialExpression/components/Main/utils.ts index a58656f8a10a4..e6ebd45a31d78 100644 --- a/frontend/src/views/DifferentialExpression/components/Main/utils.ts +++ b/frontend/src/views/DifferentialExpression/components/Main/utils.ts @@ -1,7 +1,9 @@ import { QueryGroups, QueryGroup } from "../../common/store/reducer"; +import { ExcludeOverlappingCells } from "../../common/types"; export const craftPayloadWithQueryGroups = ( - queryGroups: QueryGroups + queryGroups: QueryGroups, + excludeOverlappingCells: ExcludeOverlappingCells ): Record => { const payload: Record = {}; @@ -13,5 +15,6 @@ export const craftPayloadWithQueryGroups = ( } }); + payload["overlap"] = excludeOverlappingCells; return payload; }; diff --git a/frontend/src/views/WheresMyGeneV2/components/CellInfoSideBar/connect.ts b/frontend/src/views/WheresMyGeneV2/components/CellInfoSideBar/connect.ts index 6c9036b6a8353..c8c024f394fd0 100644 --- a/frontend/src/views/WheresMyGeneV2/components/CellInfoSideBar/connect.ts +++ b/frontend/src/views/WheresMyGeneV2/components/CellInfoSideBar/connect.ts @@ -1,8 +1,8 @@ -import { useCallback, useContext, useState } from "react"; +import { useCallback, useContext, useMemo, useState } from "react"; import { track } from "src/common/analytics"; import { EVENTS } from "src/common/analytics/events"; import { useMarkerGenes } from "src/common/queries/wheresMyGene"; -import { DispatchContext, State } from "../../common/store"; +import { DispatchContext, State, StateContext } from "../../common/store"; import { addSelectedGenes } from "../../common/store/actions"; import { HOVER_START_TIME_MS } from "../../common/constants"; import { @@ -10,12 +10,14 @@ import { MARKER_SCORE_LABEL, SPECIFICITY_LABEL, } from "src/common/constants/markerGenes"; +import { generateDifferentialExpressionUrl } from "../GeneSearchBar/components/ShareButton/utils"; export const useConnect = ({ cellInfoCellType, }: { cellInfoCellType: Exclude; }) => { + const { selectedFilters, selectedOrganismId } = useContext(StateContext); const dispatch = useContext(DispatchContext); const [hoverStartTime, setHoverStartTime] = useState(0); @@ -66,6 +68,22 @@ export const useConnect = ({ { label: SPECIFICITY_LABEL } ); + const differentialExpressionUrl = useMemo( + () => + generateDifferentialExpressionUrl({ + filters: selectedFilters, + organism: selectedOrganismId, + tissue: cellInfoCellType.tissueID, + cellType: cellInfoCellType.cellType.id, + }), + [ + selectedFilters, + selectedOrganismId, + cellInfoCellType.tissueID, + cellInfoCellType.cellType.id, + ] + ); + return { handleCopyGenes, isLoading, @@ -75,5 +93,6 @@ export const useConnect = ({ handleMarkerScoreHoverEnd, handleSpecificityHoverEnd, setHoverStartTime, + differentialExpressionUrl, }; }; diff --git a/frontend/src/views/WheresMyGeneV2/components/CellInfoSideBar/constants.ts b/frontend/src/views/WheresMyGeneV2/components/CellInfoSideBar/constants.ts index bef4332020f9b..15d4a0d6bbc31 100644 --- a/frontend/src/views/WheresMyGeneV2/components/CellInfoSideBar/constants.ts +++ b/frontend/src/views/WheresMyGeneV2/components/CellInfoSideBar/constants.ts @@ -3,6 +3,8 @@ export const SPECIFICITY_TOOLTIP_CONTENT_FIRST_HALF = export const SPECIFICITY_TOOLTIP_CONTENT_SECOND_HALF = "with a lower Effect Size for this gene. Higher Specificity values indicate markers which are more specific for this cell type."; export const MARKER_SCORE_CELLGUIDE_LINK_TEXT = "Open in CellGuide"; +export const DIFFERENTIAL_EXPRESSION_LINK_TEXT = + "Open in Differential Expression"; export const MARKER_SCORE_DOTPLOT_BUTTON_TEXT = "Add to Dot Plot"; export const NO_MARKER_GENES_HEADER = "No Marker Genes"; export const NO_MARKER_GENES_DESCRIPTION = diff --git a/frontend/src/views/WheresMyGeneV2/components/CellInfoSideBar/index.tsx b/frontend/src/views/WheresMyGeneV2/components/CellInfoSideBar/index.tsx index 867408e389a28..692a905636c65 100644 --- a/frontend/src/views/WheresMyGeneV2/components/CellInfoSideBar/index.tsx +++ b/frontend/src/views/WheresMyGeneV2/components/CellInfoSideBar/index.tsx @@ -19,7 +19,7 @@ import { TooltipLink, AddToDotplotButton, } from "./style"; -import { Link } from "src/components/GeneInfoSideBar/style"; +import { StyledLink } from "src/components/GeneInfoSideBar/style"; import InfoSVG from "src/common/images/info-sign-icon.svg"; import { InfoButtonWrapper } from "src/components/common/Filter/common/style"; import { CellInfoBarProps } from "./types"; @@ -48,6 +48,7 @@ import { TABLE_HEADER_SPECIFICITY, SPECIFICITY_TOOLTIP_CONTENT_FIRST_HALF, SPECIFICITY_TOOLTIP_CONTENT_SECOND_HALF, + DIFFERENTIAL_EXPRESSION_LINK_TEXT, } from "./constants"; import { useConnect } from "./connect"; import { @@ -74,6 +75,7 @@ function CellInfoSideBar({ handleMarkerScoreHoverEnd, handleSpecificityHoverEnd, setHoverStartTime, + differentialExpressionUrl, } = useConnect({ cellInfoCellType, }); @@ -97,7 +99,7 @@ function CellInfoSideBar({ skinnyMode={true} inSideBar /> - track(EVENTS.WMG_OPEN_IN_CG_CLICKED, { @@ -108,8 +110,22 @@ function CellInfoSideBar({ rel="noreferrer noopener" > {MARKER_SCORE_CELLGUIDE_LINK_TEXT} - - + + + + track(EVENTS.WMG_OPEN_IN_DE_CLICKED, { + cell_type: cellInfoCellType.cellType.id, + tissue: cellInfoCellType.tissueID, + }) + } + target="_blank" + rel="noreferrer noopener" + > + {DIFFERENTIAL_EXPRESSION_LINK_TEXT} + + {MARKER_GENE_LABEL} diff --git a/frontend/src/views/WheresMyGeneV2/components/GeneSearchBar/components/ShareButton/utils.tsx b/frontend/src/views/WheresMyGeneV2/components/GeneSearchBar/components/ShareButton/utils.tsx index dfe0066a5940e..a4133b1b7c4ce 100644 --- a/frontend/src/views/WheresMyGeneV2/components/GeneSearchBar/components/ShareButton/utils.tsx +++ b/frontend/src/views/WheresMyGeneV2/components/GeneSearchBar/components/ShareButton/utils.tsx @@ -1,5 +1,6 @@ import { NextRouter } from "next/router"; import { Dispatch } from "react"; +import { ROUTES } from "src/common/constants/routes"; import { TissueMetadataQueryResponse } from "src/common/queries/cellGuide"; import { isSSR } from "src/common/utils/isSSR"; import { removeParams } from "src/common/utils/removeParams"; @@ -64,6 +65,37 @@ export const generateAndCopyShareUrl = ({ return urlString; }; +export const generateDifferentialExpressionUrl = ({ + filters, + organism, + tissue, + cellType, +}: { + filters: State["selectedFilters"]; + organism: State["selectedOrganismId"]; + tissue: string; + cellType: string; +}) => { + // Create a URL that contains the selected filters, cell type, and tissue as params in the URL + // This URL can be shared with others to reproduce the same view + const url = new URL(ROUTES.DE, window.location.origin); + + // human is empty default + if (organism && organism !== HUMAN_ORGANISM_ID) { + url.searchParams.set("organism", organism); + } + + Object.entries(stripEmptyFilters(filters)).forEach(([key, value]) => { + if (["diseases", "ethnicities", "publications", "sexes"].includes(key)) { + url.searchParams.set(key, value.join(",")); + } + }); + + url.searchParams.set("celltypes", cellType); + url.searchParams.set("tissues", tissue); + return String(url); +}; + const stripEmptyFilters = ( filters: State["selectedFilters"] ): Partial => { From cb1df2ecf14b3234332f4f79f048ee55678d07a0 Mon Sep 17 00:00:00 2001 From: Emanuele Bezzi Date: Mon, 8 Jul 2024 15:55:22 -0700 Subject: [PATCH 13/15] fix: update models page for the new LTS version (#7266) --- .../src/common/queries/censusDirectory.ts | 24 +++++++++++-------- .../Project/ProjectButtons/index.tsx | 4 +++- frontend/src/views/CensusDirectory/style.ts | 5 +++- 3 files changed, 21 insertions(+), 12 deletions(-) diff --git a/frontend/src/common/queries/censusDirectory.ts b/frontend/src/common/queries/censusDirectory.ts index 95b0d22707c0d..57238a9e06b89 100644 --- a/frontend/src/common/queries/censusDirectory.ts +++ b/frontend/src/common/queries/censusDirectory.ts @@ -79,17 +79,21 @@ async function fetchProjects(): Promise { } ); - const response = await fetch(url); - const result = await response.json(); - let publication_info; - if (!response.ok) { - console.error(result); - } else { - publication_info = parseCrossRefResponse(result); + // If CrossRef fails (e.g. due to Too Many Requests), we still want to show the project + try { + const response = await fetch(url); + const result = await response.json(); + let publication_info; + if (!response.ok) { + console.error(result); + } else { + publication_info = parseCrossRefResponse(result); + data[id].publication_info = publication_info; + data[id].publication_link = result.message.URL; + } + } catch (error) { + console.log(error); } - - data[id].publication_info = publication_info; - data[id].publication_link = result.message.URL; } ) ); diff --git a/frontend/src/views/CensusDirectory/components/Project/ProjectButtons/index.tsx b/frontend/src/views/CensusDirectory/components/Project/ProjectButtons/index.tsx index 0ce5f2f37b2fe..0bda67f5c6828 100644 --- a/frontend/src/views/CensusDirectory/components/Project/ProjectButtons/index.tsx +++ b/frontend/src/views/CensusDirectory/components/Project/ProjectButtons/index.tsx @@ -12,10 +12,12 @@ const IGNORE_DIFFERENT_METADATA_KEYS = [ "id", "relative_uri", "indexes", + "submission_date", ]; const ATTRIBUTE_TO_LABEL: Record = { experiment_name: "Organism", - n_cells: "Cells", + census_version: "Census Version", + n_embeddings: "Cells", data_type: "Embedding", }; diff --git a/frontend/src/views/CensusDirectory/style.ts b/frontend/src/views/CensusDirectory/style.ts index f1ba5c01e74c4..6741b287d90fb 100644 --- a/frontend/src/views/CensusDirectory/style.ts +++ b/frontend/src/views/CensusDirectory/style.ts @@ -25,7 +25,7 @@ export const Content = styled.div` display: flex; flex-direction: column; margin: 80px auto; - max-width: 1200px; + max-width: 1400px; `; export const Header = styled.h1` @@ -75,6 +75,7 @@ export const ProjectSubmitter = styled.h4` export const ProjectDescription = styled(Paragraph)` max-width: 85ch; + padding-right: 10px; `; export const ProjectContainer = styled.div` @@ -100,6 +101,7 @@ export const ItemContainer = styled.div` display: flex; flex-direction: column; gap: ${spacesXxs}px; + min-width: 56px; `; export const ItemLabel = styled.div` @@ -109,6 +111,7 @@ export const ItemLabel = styled.div` "clig" off, "liga" off; color: ${gray400}; + white-space: nowrap; `; export const StyledRadioGroup = styled(RadioGroup)` From 1d73ab38a97c015e903d30b854708c8b522b13ca Mon Sep 17 00:00:00 2001 From: Trent Smith <1429913+Bento007@users.noreply.github.com> Date: Tue, 9 Jul 2024 06:34:25 -0700 Subject: [PATCH 14/15] fix(RDS CA): Install the update AWS root CA in docker images (#7267) --- Dockerfile.backend | 3 +++ Dockerfile.backend_de | 3 +++ Dockerfile.backend_wmg | 3 +++ Dockerfile.cellguide_pipeline | 4 ++++ Dockerfile.dataset_submissions | 3 +++ Dockerfile.processing | 3 +++ Dockerfile.upload_failures | 3 +++ Dockerfile.upload_success | 3 +++ Dockerfile.wmg_pipeline | 4 ++++ 9 files changed, 29 insertions(+) diff --git a/Dockerfile.backend b/Dockerfile.backend index 29bcf2830066b..1563bf75e66c8 100644 --- a/Dockerfile.backend +++ b/Dockerfile.backend @@ -5,6 +5,9 @@ ENV EXPORT_ENV_VARS_TO_LAMBDA="APP_NAME DEPLOYMENT_STAGE" ENV LC_ALL=C.UTF-8 ENV DEBIAN_FRONTEND=noninteractive +# Update AWS root Certificates +ADD https://truststore.pki.rds.amazonaws.com/global/global-bundle.pem /etc/ssl/certs/rds-global-bundle.pem + RUN apt-get update && \ apt-get install -y python3 libhdf5-dev python3-h5py gettext moreutils build-essential libxml2-dev python3-dev python3-pip zlib1g-dev python3-requests python3-aiohttp llvm jq && \ rm -rf /var/lib/apt/lists/* diff --git a/Dockerfile.backend_de b/Dockerfile.backend_de index 02d2eb4004159..5e6062502d881 100644 --- a/Dockerfile.backend_de +++ b/Dockerfile.backend_de @@ -5,6 +5,9 @@ ENV EXPORT_ENV_VARS_TO_LAMBDA="APP_NAME DEPLOYMENT_STAGE" ENV LC_ALL=C.UTF-8 ENV DEBIAN_FRONTEND=noninteractive +# Update AWS root Certificates +ADD https://truststore.pki.rds.amazonaws.com/global/global-bundle.pem /etc/ssl/certs/rds-global-bundle.pem + RUN apt-get update && \ apt-get install -y python3 libhdf5-dev python3-h5py gettext moreutils build-essential libxml2-dev python3-dev python3-pip zlib1g-dev python3-requests python3-aiohttp llvm jq && \ rm -rf /var/lib/apt/lists/* diff --git a/Dockerfile.backend_wmg b/Dockerfile.backend_wmg index 6244f776972d0..f5b20bf8b00f5 100644 --- a/Dockerfile.backend_wmg +++ b/Dockerfile.backend_wmg @@ -5,6 +5,9 @@ ENV EXPORT_ENV_VARS_TO_LAMBDA="APP_NAME DEPLOYMENT_STAGE" ENV LC_ALL=C.UTF-8 ENV DEBIAN_FRONTEND=noninteractive +# Update AWS root Certificates +ADD https://truststore.pki.rds.amazonaws.com/global/global-bundle.pem /etc/ssl/certs/rds-global-bundle.pem + RUN apt-get update && \ apt-get install -y python3 libhdf5-dev python3-h5py gettext moreutils build-essential libxml2-dev python3-dev python3-pip zlib1g-dev python3-requests python3-aiohttp llvm jq && \ rm -rf /var/lib/apt/lists/* diff --git a/Dockerfile.cellguide_pipeline b/Dockerfile.cellguide_pipeline index 77eb47eb7483e..365327ec354e8 100644 --- a/Dockerfile.cellguide_pipeline +++ b/Dockerfile.cellguide_pipeline @@ -1,6 +1,10 @@ ARG BASE_TAG=branch-main FROM python:3.10 + +# Update AWS root Certificates +ADD https://truststore.pki.rds.amazonaws.com/global/global-bundle.pem /etc/ssl/certs/rds-global-bundle.pem + RUN /usr/local/bin/python -m pip install --upgrade pip && \ apt update && apt -y install graphviz graphviz-dev && \ rm -rf /var/lib/apt/lists/* diff --git a/Dockerfile.dataset_submissions b/Dockerfile.dataset_submissions index aa93bf98b8cf1..75bc557f31681 100644 --- a/Dockerfile.dataset_submissions +++ b/Dockerfile.dataset_submissions @@ -1,5 +1,8 @@ FROM public.ecr.aws/lambda/python:3.9 +# Update AWS root Certificates +ADD https://truststore.pki.rds.amazonaws.com/global/global-bundle.pem /etc/ssl/certs/rds-global-bundle.pem + COPY backend/layers/processing/submissions . COPY backend/layers ./backend/layers diff --git a/Dockerfile.processing b/Dockerfile.processing index 56dfc9549e348..8b6fa048f0ccf 100644 --- a/Dockerfile.processing +++ b/Dockerfile.processing @@ -2,6 +2,9 @@ ARG BASE_TAG=branch-main FROM ghcr.io/chanzuckerberg/corpora-upload-base:$BASE_TAG +# Update AWS root Certificates +ADD https://truststore.pki.rds.amazonaws.com/global/global-bundle.pem /etc/ssl/certs/rds-global-bundle.pem + # Install cellxgene so we get the remote server that has the converter in it # The cellxgene install script expects executables named python and pip, not python3 and pip3 RUN apt-get update && \ diff --git a/Dockerfile.upload_failures b/Dockerfile.upload_failures index 357ee64a7fbe7..7c8c70496df7a 100644 --- a/Dockerfile.upload_failures +++ b/Dockerfile.upload_failures @@ -1,5 +1,8 @@ FROM public.ecr.aws/lambda/python:3.8 +# Update AWS root Certificates +ADD https://truststore.pki.rds.amazonaws.com/global/global-bundle.pem /etc/ssl/certs/rds-global-bundle.pem + COPY backend/layers/processing/upload_failures . COPY backend/layers ./backend/layers COPY backend/portal ./backend/portal diff --git a/Dockerfile.upload_success b/Dockerfile.upload_success index a4d48307f65b9..a0da68b1991d8 100644 --- a/Dockerfile.upload_success +++ b/Dockerfile.upload_success @@ -1,5 +1,8 @@ FROM public.ecr.aws/lambda/python:3.8 +# Update AWS root Certificates +ADD https://truststore.pki.rds.amazonaws.com/global/global-bundle.pem /etc/ssl/certs/rds-global-bundle.pem + COPY backend/layers/processing/upload_failures backend/layers/processing/upload_failures COPY backend/layers/processing/upload_success . COPY backend/layers ./backend/layers diff --git a/Dockerfile.wmg_pipeline b/Dockerfile.wmg_pipeline index e33c87fc65315..592a04baffeb8 100644 --- a/Dockerfile.wmg_pipeline +++ b/Dockerfile.wmg_pipeline @@ -1,6 +1,10 @@ ARG BASE_TAG=branch-main FROM python:3.10 + +# Update AWS root Certificates +ADD https://truststore.pki.rds.amazonaws.com/global/global-bundle.pem /etc/ssl/certs/rds-global-bundle.pem + RUN /usr/local/bin/python -m pip install --upgrade pip && \ apt update && apt -y install graphviz graphviz-dev && \ rm -rf /var/lib/apt/lists/* && \ From 5ef3ce0678eaa1539c547a6c6d567222422e3d23 Mon Sep 17 00:00:00 2001 From: atarashansky Date: Tue, 9 Jul 2024 09:55:16 -0700 Subject: [PATCH 15/15] chore: comment out dependabot (#7268) --- .github/dependabot.yml | 226 ++++++++++++++++++++--------------------- 1 file changed, 113 insertions(+), 113 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index c0dd41a9e4386..8ccd6face3989 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,114 +1,114 @@ -version: 2 -updates: - - package-ecosystem: "github-actions" - directory: "/" - open-pull-requests-limit: 3 - schedule: - interval: "weekly" - day: "sunday" - assignees: - - "seve" - - package-ecosystem: pip - directory: "/python_dependencies/backend/" - open-pull-requests-limit: 3 - schedule: - interval: "daily" - assignees: - - "Bento007" - groups: - database: - patterns: - - "alembic" - - "sqlalchemy" - - "psycopg2" - server: - patterns: - - "flask" - - "gunicorn" - - "setproctitle" - - "connexion" +# version: 2 +# updates: +# - package-ecosystem: "github-actions" +# directory: "/" +# open-pull-requests-limit: 3 +# schedule: +# interval: "weekly" +# day: "sunday" +# assignees: +# - "seve" +# - package-ecosystem: pip +# directory: "/python_dependencies/backend/" +# open-pull-requests-limit: 3 +# schedule: +# interval: "daily" +# assignees: +# - "Bento007" +# groups: +# database: +# patterns: +# - "alembic" +# - "sqlalchemy" +# - "psycopg2" +# server: +# patterns: +# - "flask" +# - "gunicorn" +# - "setproctitle" +# - "connexion" - - package-ecosystem: pip - directory: "/python_dependencies/wmg/" - open-pull-requests-limit: 3 - schedule: - interval: "weekly" - day: "sunday" - assignees: - - "prathapsridharan" - - package-ecosystem: pip - directory: "/python_dependencies/upload_handler/" - open-pull-requests-limit: 3 - schedule: - interval: "weekly" - day: "sunday" - assignees: - - "ebezzi" - - package-ecosystem: pip - directory: "/python_dependencies/submissions/" - open-pull-requests-limit: 3 - schedule: - interval: "weekly" - day: "sunday" - assignees: - - "nayib-jose-gloria" - - package-ecosystem: pip - directory: "/python_dependencies/cellguide_pipeline/" - open-pull-requests-limit: 3 - schedule: - interval: "weekly" - day: "sunday" - assignees: - - "atarashansky" - - package-ecosystem: pip - directory: "/python_dependencies/processing/" - open-pull-requests-limit: 3 - schedule: - interval: "weekly" - day: "sunday" - assignees: - - "Bento007" - groups: - database: - patterns: - - "sqlalchemy*" - - "psycopg2*" - non-major-updates: - patterns: - - "*" - applies-to: version-updates - update-types: - - patch - - minor - exclude-patterns: - # The following packages do not have major versions. So a minor version updates may contain breaking changes. - - "anndata" - - "tiledb" - - "numba" - - "s3fs" - - package-ecosystem: pip - directory: "/python_dependencies/common/" - open-pull-requests-limit: 3 - schedule: - interval: "weekly" - day: "sunday" - assignees: - - "nayib-jose-gloria" - groups: - formatters: - patterns: - - "black" - - "ruff" - test-runners: - patterns: - - "pytest" - - "coverage" - - "allure" - - package-ecosystem: npm - directory: "/frontend/" - open-pull-requests-limit: 3 - schedule: - interval: "weekly" - day: "sunday" - assignees: - - "tihuan" +# - package-ecosystem: pip +# directory: "/python_dependencies/wmg/" +# open-pull-requests-limit: 3 +# schedule: +# interval: "weekly" +# day: "sunday" +# assignees: +# - "prathapsridharan" +# - package-ecosystem: pip +# directory: "/python_dependencies/upload_handler/" +# open-pull-requests-limit: 3 +# schedule: +# interval: "weekly" +# day: "sunday" +# assignees: +# - "ebezzi" +# - package-ecosystem: pip +# directory: "/python_dependencies/submissions/" +# open-pull-requests-limit: 3 +# schedule: +# interval: "weekly" +# day: "sunday" +# assignees: +# - "nayib-jose-gloria" +# - package-ecosystem: pip +# directory: "/python_dependencies/cellguide_pipeline/" +# open-pull-requests-limit: 3 +# schedule: +# interval: "weekly" +# day: "sunday" +# assignees: +# - "atarashansky" +# - package-ecosystem: pip +# directory: "/python_dependencies/processing/" +# open-pull-requests-limit: 3 +# schedule: +# interval: "weekly" +# day: "sunday" +# assignees: +# - "Bento007" +# groups: +# database: +# patterns: +# - "sqlalchemy*" +# - "psycopg2*" +# non-major-updates: +# patterns: +# - "*" +# applies-to: version-updates +# update-types: +# - patch +# - minor +# exclude-patterns: +# # The following packages do not have major versions. So a minor version updates may contain breaking changes. +# - "anndata" +# - "tiledb" +# - "numba" +# - "s3fs" +# - package-ecosystem: pip +# directory: "/python_dependencies/common/" +# open-pull-requests-limit: 3 +# schedule: +# interval: "weekly" +# day: "sunday" +# assignees: +# - "nayib-jose-gloria" +# groups: +# formatters: +# patterns: +# - "black" +# - "ruff" +# test-runners: +# patterns: +# - "pytest" +# - "coverage" +# - "allure" +# - package-ecosystem: npm +# directory: "/frontend/" +# open-pull-requests-limit: 3 +# schedule: +# interval: "weekly" +# day: "sunday" +# assignees: +# - "tihuan"