From a6c5ddb08444c81ae3cf2da291d36944a4d849ca Mon Sep 17 00:00:00 2001 From: Uxio Fuentefria Date: Fri, 2 Jun 2023 12:28:52 +0200 Subject: [PATCH 01/35] Fix logging optional as int - Closes #1495 --- .../history/indexers/ethereum_indexer.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/safe_transaction_service/history/indexers/ethereum_indexer.py b/safe_transaction_service/history/indexers/ethereum_indexer.py index e98ab7d4..30ab550e 100644 --- a/safe_transaction_service/history/indexers/ethereum_indexer.py +++ b/safe_transaction_service/history/indexers/ethereum_indexer.py @@ -448,10 +448,10 @@ def start(self) -> Tuple[int, int]: ) number_processed_elements = len(processed_elements) logger.debug( - "%s: Processed %d elements for almost updated addresses. From-block-number=%d to-block-number=%d", + "%s: Processed %d elements for almost updated addresses. From-block-number=%s to-block-number=%d", self.__class__.__name__, number_processed_elements, - from_block_number, + from_block_number, # Can be None to_block_number, ) total_number_processed_elements += number_processed_elements @@ -504,10 +504,10 @@ def start(self) -> Tuple[int, int]: number_processed_elements = len(processed_elements) logger.debug( - "%s: Processed %d elements for not updated addresses. From-block-number=%d to-block-number=%d", + "%s: Processed %d elements for not updated addresses. From-block-number=%s to-block-number=%d", self.__class__.__name__, number_processed_elements, - from_block_number, + from_block_number, # Can be None to_block_number, ) total_number_processed_elements += number_processed_elements From 1171235cc0cadcb13be4514cfbf2956101fab2d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mois=C3=A9s=20Fern=C3=A1ndez?= <7888669+moisses89@users.noreply.github.com> Date: Mon, 12 Jun 2023 11:55:02 +0200 Subject: [PATCH 02/35] Remove deprecated delegate endpoints (#1394) --- .../history/serializers.py | 37 --- .../history/tests/test_views.py | 217 ------------------ safe_transaction_service/history/urls.py | 5 - safe_transaction_service/history/views.py | 105 --------- 4 files changed, 364 deletions(-) diff --git a/safe_transaction_service/history/serializers.py b/safe_transaction_service/history/serializers.py index 4f47516e..98cc4a86 100644 --- a/safe_transaction_service/history/serializers.py +++ b/safe_transaction_service/history/serializers.py @@ -981,40 +981,3 @@ def validate(self, attrs): attrs["delegator"] = delegator return attrs - - -class SafeDelegateSerializer(SafeDelegateDeleteSerializer): - """ - Deprecated in favour of DelegateSerializer - """ - - label = serializers.CharField(max_length=50) - - def get_valid_delegators( - self, - ethereum_client: EthereumClient, - safe_address: ChecksumAddress, - delegate: ChecksumAddress, - ) -> List[ChecksumAddress]: - """ - :param ethereum_client: - :param safe_address: - :param delegate: - :return: Valid delegators for a Safe. A delegate shouldn't be able to add itself - """ - return get_safe_owners(safe_address) - - def save(self, **kwargs): - safe_address = self.validated_data["safe"] - delegate = self.validated_data["delegate"] - delegator = self.validated_data["delegator"] - label = self.validated_data["label"] - obj, _ = SafeContractDelegate.objects.update_or_create( - safe_contract_id=safe_address, - delegate=delegate, - defaults={ - "label": label, - "delegator": delegator, - }, - ) - return obj diff --git a/safe_transaction_service/history/tests/test_views.py b/safe_transaction_service/history/tests/test_views.py index fbc35371..b2ee3b17 100644 --- a/safe_transaction_service/history/tests/test_views.py +++ b/safe_transaction_service/history/tests/test_views.py @@ -1698,223 +1698,6 @@ def test_safe_balances_usd_view( ], ) - def test_get_safe_delegate_list(self): - safe_address = Account.create().address - response = self.client.get( - reverse("v1:history:safe-delegates", args=(safe_address,)), format="json" - ) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data["count"], 0) - - safe_contract_delegate = SafeContractDelegateFactory() - safe_address = safe_contract_delegate.safe_contract_id - response = self.client.get( - reverse("v1:history:safe-delegates", args=(safe_address,)), format="json" - ) - self.assertEqual(response.status_code, status.HTTP_200_OK) - - self.assertEqual(response.data["count"], 1) - result = response.data["results"][0] - self.assertEqual(result["delegate"], safe_contract_delegate.delegate) - self.assertEqual(result["delegator"], safe_contract_delegate.delegator) - self.assertEqual(result["label"], safe_contract_delegate.label) - - safe_contract_delegate = SafeContractDelegateFactory( - safe_contract=safe_contract_delegate.safe_contract - ) - response = self.client.get( - reverse("v1:history:safe-delegates", args=(safe_address,)), format="json" - ) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data["count"], 2) - - # A different non related Safe should not increase the number - SafeContractDelegateFactory() - response = self.client.get( - reverse("v1:history:safe-delegates", args=(safe_address,)), format="json" - ) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data["count"], 2) - - def test_delete_safe_delegate_list(self): - endpoint = "v1:history:safe-delegates" - - owner_account = Account.create() - safe_address = self.deploy_test_safe(owners=[owner_account.address]).address - safe_contract = SafeContractFactory(address=safe_address) - response = self.client.delete( - reverse(endpoint, args=(safe_address,)), format="json" - ) - self.assertEqual( - response.status_code, status.HTTP_400_BAD_REQUEST - ) # Data is missing - - data = { - "signature": "0x" + "1" * 130, - } - not_existing_safe = Account.create().address - response = self.client.delete( - reverse(endpoint, args=(not_existing_safe,)), format="json", data=data - ) - self.assertIn( - f"Safe={not_existing_safe} does not exist", - response.data["non_field_errors"][0], - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - - hash_to_sign = DelegateSignatureHelper.calculate_hash( - safe_address, eth_sign=True - ) - data["signature"] = owner_account.signHash(hash_to_sign)["signature"].hex() - SafeContractDelegateFactory(safe_contract=safe_contract) - SafeContractDelegateFactory(safe_contract=safe_contract) - SafeContractDelegateFactory(safe_contract=SafeContractFactory()) - self.assertEqual(SafeContractDelegate.objects.count(), 3) - response = self.client.delete( - reverse(endpoint, args=(safe_address,)), format="json", data=data - ) - self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(SafeContractDelegate.objects.count(), 1) - - # Sign random address instead of the Safe address - hash_to_sign = DelegateSignatureHelper.calculate_hash( - Account.create().address, eth_sign=True - ) - data["signature"] = owner_account.signHash(hash_to_sign)["signature"].hex() - response = self.client.delete( - reverse(endpoint, args=(safe_address,)), format="json", data=data - ) - self.assertIn( - "Signing owner is not an owner of the Safe", - response.data["non_field_errors"][0], - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - - def test_post_safe_delegate(self): - safe_address = Account.create().address - delegate_address = Account.create().address - label = "Saul Goodman" - response = self.client.post( - reverse("v1:history:safe-delegates", args=(safe_address,)), format="json" - ) - self.assertEqual( - response.status_code, status.HTTP_400_BAD_REQUEST - ) # Data is missing - - data = { - "delegate": delegate_address, - "label": label, - "signature": "0x" + "1" * 130, - } - - owner_account = Account.create() - safe_address = self.deploy_test_safe(owners=[owner_account.address]).address - response = self.client.post( - reverse("v1:history:safe-delegates", args=(safe_address,)), - format="json", - data=data, - ) - self.assertIn( - f"Safe={safe_address} does not exist", response.data["non_field_errors"][0] - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - - safe_contract = SafeContractFactory(address=safe_address) - response = self.client.post( - reverse("v1:history:safe-delegates", args=(safe_address,)), - format="json", - data=data, - ) - self.assertIn( - "Signing owner is not an owner of the Safe", - response.data["non_field_errors"][0], - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - - self.assertEqual(SafeContractDelegate.objects.count(), 0) - hash_to_sign = DelegateSignatureHelper.calculate_hash(delegate_address) - data["signature"] = owner_account.signHash(hash_to_sign)["signature"].hex() - response = self.client.post( - reverse("v1:history:safe-delegates", args=(safe_address,)), - format="json", - data=data, - ) - self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(SafeContractDelegate.objects.count(), 1) - safe_contract_delegate = SafeContractDelegate.objects.first() - self.assertEqual(safe_contract_delegate.delegate, delegate_address) - self.assertEqual(safe_contract_delegate.delegator, owner_account.address) - self.assertEqual(safe_contract_delegate.label, label) - - label = "Jimmy McGill" - data["label"] = label - response = self.client.post( - reverse("v1:history:safe-delegates", args=(safe_address,)), - format="json", - data=data, - ) - self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(SafeContractDelegate.objects.count(), 1) - safe_contract_delegate.refresh_from_db() - self.assertEqual(safe_contract_delegate.label, label) - - another_label = "Kim Wexler" - another_delegate_address = Account.create().address - data = { - "delegate": another_delegate_address, - "label": another_label, - "signature": owner_account.signHash( - DelegateSignatureHelper.calculate_hash( - another_delegate_address, eth_sign=True - ) - )["signature"].hex(), - } - response = self.client.post( - reverse("v1:history:safe-delegates", args=(safe_address,)), - format="json", - data=data, - ) - self.assertEqual(response.status_code, status.HTTP_201_CREATED) - - # Test not internal server error on contract signature - signature = signature_to_bytes( - 0, int(owner_account.address, 16), 65 - ) + HexBytes("0" * 65) - data["signature"] = signature.hex() - response = self.client.post( - reverse("v1:history:safe-delegates", args=(safe_address,)), - format="json", - data=data, - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - - response = self.client.get( - reverse("v1:history:safe-delegates", args=(safe_address,)), format="json" - ) - self.assertCountEqual( - response.data["results"], - [ - { - "delegate": delegate_address, - "delegator": owner_account.address, - "label": label, - "safe": safe_address, - }, - { - "delegate": another_delegate_address, - "delegator": owner_account.address, - "label": another_label, - "safe": safe_address, - }, - ], - ) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(SafeContractDelegate.objects.count(), 2) - self.assertCountEqual( - SafeContractDelegate.objects.get_delegates_for_safe(safe_address), - [delegate_address, another_delegate_address], - ) - def test_delegates_post(self): url = reverse("v1:history:delegates") safe_address = Account.create().address diff --git a/safe_transaction_service/history/urls.py b/safe_transaction_service/history/urls.py index a0f6bfd5..b5627dd9 100644 --- a/safe_transaction_service/history/urls.py +++ b/safe_transaction_service/history/urls.py @@ -92,11 +92,6 @@ views.SafeBalanceUsdView.as_view(), name="safe-balances-usd", ), - path( - "safes//delegates/", - views.SafeDelegateListView.as_view(), - name="safe-delegates", - ), path( "safes//delegates//", views.SafeDelegateDestroyView.as_view(), diff --git a/safe_transaction_service/history/views.py b/safe_transaction_service/history/views.py index 19bc55f4..0b480447 100644 --- a/safe_transaction_service/history/views.py +++ b/safe_transaction_service/history/views.py @@ -626,111 +626,6 @@ def get(self, *args, **kwargs): return super().get(*args, **kwargs) -class SafeDelegateListView(ListCreateAPIView): - pagination_class = pagination.DefaultPagination - - def get_queryset(self): - return SafeContractDelegate.objects.filter( - safe_contract_id=self.kwargs["address"] - ) - - def get_serializer_class(self): - if self.request.method == "GET": - return serializers.SafeDelegateResponseSerializer - elif self.request.method == "POST": - return serializers.SafeDelegateSerializer - elif self.request.method == "DELETE": - return serializers.SafeDelegateDeleteSerializer - - @swagger_auto_schema( - deprecated=True, - operation_description="Use /delegates endpoint", - responses={400: "Invalid data", 422: "Invalid Ethereum address"}, - ) - def get(self, request, address, **kwargs): - """ - Get the list of delegates for a Safe address - """ - if not fast_is_checksum_address(address): - return Response( - status=status.HTTP_422_UNPROCESSABLE_ENTITY, - data={ - "code": 1, - "message": "Checksum address validation failed", - "arguments": [address], - }, - ) - - return super().get(request, address, **kwargs) - - @swagger_auto_schema( - deprecated=True, - operation_description="Use /delegates endpoint", - responses={ - 202: "Accepted", - 400: "Malformed data", - 422: "Invalid Ethereum address/Error processing data", - }, - ) - def post(self, request, address, **kwargs): - """ - Create a delegate for a Safe address with a custom label. Calls with same delegate but different label or - signer will update the label or delegator if different. - For the signature we are using TOTP with `T0=0` and `Tx=3600`. TOTP is calculated by taking the - Unix UTC epoch time (no milliseconds) and dividing by 3600 (natural division, no decimals) - For signature this hash need to be signed: keccak(checksummed address + str(int(current_epoch // 3600))) - For example: - - We want to add the delegate `0x132512f995866CcE1b0092384A6118EDaF4508Ff` and `epoch=1586779140`. - - `TOTP = epoch // 3600 = 1586779140 // 3600 = 440771` - - The hash to sign by a Safe owner would be `keccak("0x132512f995866CcE1b0092384A6118EDaF4508Ff440771")` - """ - if not fast_is_checksum_address(address): - return Response( - status=status.HTTP_422_UNPROCESSABLE_ENTITY, - data={ - "code": 1, - "message": "Checksum address validation failed", - "arguments": [address], - }, - ) - - request.data["safe"] = address - return super().post(request, address, **kwargs) - - @swagger_auto_schema( - operation_id="safes_delegates_delete_all", - deprecated=True, - operation_description="Use /delegates endpoint", - responses={ - 204: "Deleted", - 400: "Malformed data", - 422: "Invalid Ethereum address/Error processing data", - }, - ) - def delete(self, request, address, *args, **kwargs): - """ - Delete all delegates for a Safe. Signature is built the same way that for adding a delegate using the Safe - address as the delegate. - - Check `POST /delegates/` - """ - if not fast_is_checksum_address(address): - return Response( - status=status.HTTP_422_UNPROCESSABLE_ENTITY, - data={ - "code": 1, - "message": "Checksum address validation failed", - "arguments": [address], - }, - ) - request.data["safe"] = address - request.data["delegate"] = address - serializer = self.get_serializer(data=request.data) - serializer.is_valid(raise_exception=True) - SafeContractDelegate.objects.filter(safe_contract_id=address).delete() - return Response(status=status.HTTP_204_NO_CONTENT) - - class SafeDelegateDestroyView(DestroyAPIView): serializer_class = serializers.SafeDelegateDeleteSerializer From ce26961013d14991e2b0811f0a5e5dcab2959025 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Jun 2023 06:59:04 +0000 Subject: [PATCH 03/35] Bump faker from 18.9.0 to 18.10.1 Bumps [faker](https://github.com/joke2k/faker) from 18.9.0 to 18.10.1. - [Release notes](https://github.com/joke2k/faker/releases) - [Changelog](https://github.com/joke2k/faker/blob/master/CHANGELOG.md) - [Commits](https://github.com/joke2k/faker/compare/v18.9.0...v18.10.1) --- updated-dependencies: - dependency-name: faker dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-test.txt b/requirements-test.txt index 4893b902..5a3266fc 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -3,7 +3,7 @@ coverage==7.2.5 django-stubs==4.2.0 django-test-migrations==1.3.0 factory-boy==3.2.1 -faker==18.9.0 +faker==18.10.1 mypy==1.0.1 pytest==7.3.1 pytest-celery==0.0.0 From 5b29a619392de94407816cf8b5b0a7e928aceadf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Jun 2023 06:59:14 +0000 Subject: [PATCH 04/35] Bump coverage from 7.2.5 to 7.2.7 Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.2.5 to 7.2.7. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/7.2.5...7.2.7) --- updated-dependencies: - dependency-name: coverage dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-test.txt b/requirements-test.txt index 5a3266fc..c7c73654 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,5 +1,5 @@ -r requirements.txt -coverage==7.2.5 +coverage==7.2.7 django-stubs==4.2.0 django-test-migrations==1.3.0 factory-boy==3.2.1 From ba3d1583b04c4376e43656a4d494f6fec8708f50 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 07:00:06 +0000 Subject: [PATCH 05/35] Bump boto3 from 1.26.142 to 1.26.151 Bumps [boto3](https://github.com/boto/boto3) from 1.26.142 to 1.26.151. - [Release notes](https://github.com/boto/boto3/releases) - [Changelog](https://github.com/boto/boto3/blob/develop/CHANGELOG.rst) - [Commits](https://github.com/boto/boto3/compare/1.26.142...1.26.151) --- updated-dependencies: - dependency-name: boto3 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 0add0ede..93e2e0d4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -boto3==1.26.142 +boto3==1.26.151 cachetools==5.3.1 celery==5.2.7 django==4.2.1 From 730ef29b175cf843e3804a8aae1a6506ba341b2f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mois=C3=A9s=20Fern=C3=A1ndez?= <7888669+moisses89@users.noreply.github.com> Date: Wed, 14 Jun 2023 12:07:30 +0200 Subject: [PATCH 06/35] Send events to queue (#1465) --- .github/workflows/python.yml | 10 + config/settings/base.py | 10 + config/settings/test.py | 2 + docker-compose.yml | 2 + requirements.txt | 1 + run_tests.sh | 6 +- safe_transaction_service/events/__init__.py | 0 safe_transaction_service/events/apps.py | 6 + .../events/services/__init__.py | 0 .../events/services/queue_service.py | 283 ++++++++++++++++++ safe_transaction_service/events/tasks.py | 14 + .../events/tests/__init__.py | 0 .../events/tests/test_queue_service.py | 55 ++++ .../events/tests/test_tasks.py | 38 +++ safe_transaction_service/history/signals.py | 2 + .../history/tests/test_signals.py | 37 ++- 16 files changed, 448 insertions(+), 18 deletions(-) create mode 100644 safe_transaction_service/events/__init__.py create mode 100644 safe_transaction_service/events/apps.py create mode 100644 safe_transaction_service/events/services/__init__.py create mode 100644 safe_transaction_service/events/services/queue_service.py create mode 100644 safe_transaction_service/events/tasks.py create mode 100644 safe_transaction_service/events/tests/__init__.py create mode 100644 safe_transaction_service/events/tests/test_queue_service.py create mode 100644 safe_transaction_service/events/tests/test_tasks.py diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml index 9b9d1a82..cc065d48 100644 --- a/.github/workflows/python.yml +++ b/.github/workflows/python.yml @@ -53,6 +53,15 @@ jobs: --health-retries 5 ports: - 5432:5432 + rabbitmq: + image: rabbitmq:alpine + options: >- + --health-cmd "rabbitmqctl await_startup" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - "5672:5672" steps: - name: Setup and run ganache run: | @@ -86,6 +95,7 @@ jobs: ETHEREUM_TRACING_NODE_URL: http://localhost:8545 ETH_HASH_BACKEND: pysha3 REDIS_URL: redis://localhost:6379/0 + EVENTS_QUEUE_URL: amqp://guest:guest@localhost:5672/ - name: Send results to coveralls continue-on-error: true # Ignore coveralls problems run: coveralls --service=github diff --git a/config/settings/base.py b/config/settings/base.py index ce370c5f..9b3ee80d 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -99,6 +99,7 @@ "safe_transaction_service.notifications.apps.NotificationsConfig", "safe_transaction_service.safe_messages.apps.SafeMessagesConfig", "safe_transaction_service.tokens.apps.TokensConfig", + "safe_transaction_service.events.apps.EventsConfig", ] # https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS @@ -242,6 +243,10 @@ "safe_transaction_service.history.tasks.send_webhook_task", {"queue": "webhooks", "delivery_mode": "transient"}, ), + ( + "safe_transaction_service.events.tasks.send_event_to_queue_task", + {"queue": "webhooks", "delivery_mode": "transient"}, + ), ( "safe_transaction_service.history.tasks.reindex_mastercopies_last_hours_task", {"queue": "indexing"}, @@ -489,6 +494,11 @@ "ALERT_OUT_OF_SYNC_EVENTS_THRESHOLD", default=0.1 ) # Percentage of Safes allowed to be out of sync without alerting. By default 10% +# Events +# ------------------------------------------------------------------------------ +EVENTS_QUEUE_URL = env("EVENTS_QUEUE_URL", default=None) +EVENTS_QUEUE_ASYNC_CONNECTION = env("EVENTS_QUEUE_ASYNC_CONNECTION", default=False) +EVENTS_QUEUE_EXCHANGE_NAME = env("EVENTS_QUEUE_EXCHANGE_NAME", default="amq.fanout") # AWS S3 https://github.com/etianen/django-s3-storage # ------------------------------------------------------------------------------ diff --git a/config/settings/test.py b/config/settings/test.py index 0c339eb0..e3926085 100644 --- a/config/settings/test.py +++ b/config/settings/test.py @@ -47,3 +47,5 @@ "level": "DEBUG", } } + +EVENTS_QUEUE_ASYNC_CONNECTION = False diff --git a/docker-compose.yml b/docker-compose.yml index 4ebe3a43..7015f496 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -24,6 +24,8 @@ services: rabbitmq: image: rabbitmq:alpine + ports: + - "5672:5672" db: image: postgres:13-alpine diff --git a/requirements.txt b/requirements.txt index 93e2e0d4..77a40fe5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -26,6 +26,7 @@ gunicorn[gevent]==20.1.0 hexbytes==0.2.3 hiredis==2.2.3 packaging>=21.0 +pika==1.3.2 pillow==9.5.0 psycogreen==1.0.2 psycopg2==2.9.6 diff --git a/run_tests.sh b/run_tests.sh index 04601490..f2cda5e7 100755 --- a/run_tests.sh +++ b/run_tests.sh @@ -4,9 +4,9 @@ set -euo pipefail export DJANGO_SETTINGS_MODULE=config.settings.test export DJANGO_DOT_ENV_FILE=.env.test -docker compose -f docker-compose.yml -f docker-compose.dev.yml build --force-rm db redis ganache -docker compose -f docker-compose.yml -f docker-compose.dev.yml up --no-start db redis ganache -docker compose -f docker-compose.yml -f docker-compose.dev.yml start db redis ganache +docker compose -f docker-compose.yml -f docker-compose.dev.yml build --force-rm db redis ganache rabbitmq +docker compose -f docker-compose.yml -f docker-compose.dev.yml up --no-start db redis ganache rabbitmq +docker compose -f docker-compose.yml -f docker-compose.dev.yml start db redis ganache rabbitmq sleep 10 diff --git a/safe_transaction_service/events/__init__.py b/safe_transaction_service/events/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/safe_transaction_service/events/apps.py b/safe_transaction_service/events/apps.py new file mode 100644 index 00000000..447f6137 --- /dev/null +++ b/safe_transaction_service/events/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class EventsConfig(AppConfig): + name = "safe_transaction_service.events" + verbose_name = "Events queue for Safe Transaction Service" diff --git a/safe_transaction_service/events/services/__init__.py b/safe_transaction_service/events/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/safe_transaction_service/events/services/queue_service.py b/safe_transaction_service/events/services/queue_service.py new file mode 100644 index 00000000..2637eda3 --- /dev/null +++ b/safe_transaction_service/events/services/queue_service.py @@ -0,0 +1,283 @@ +import json +import logging +from typing import Any, Dict, List, Optional + +from django.conf import settings + +import pika.exceptions +from pika import BlockingConnection, URLParameters +from pika.adapters.gevent_connection import GeventConnection +from pika.channel import Channel +from pika.exchange_type import ExchangeType + +logger = logging.getLogger(__name__) + + +class QueueServiceProvider: + def __new__(cls): + if not hasattr(cls, "instance"): + if settings.EVENTS_QUEUE_URL: + if settings.EVENTS_QUEUE_ASYNC_CONNECTION: + cls.instance = AsyncQueueService() + else: + cls.instance = SyncQueueService() + else: + # Mock send_event to not configured host us is not mandatory configure a queue for events + cls.instance = MockedQueueService() + logger.warning("MockedQueueService is used") + return cls.instance + + @classmethod + def del_singleton(cls): + if hasattr(cls, "instance"): + del cls.instance + + +class QueueService: + def __init__(self): + self.exchange_name: str = settings.EVENTS_QUEUE_EXCHANGE_NAME + self._channel: Channel = None + self._connection: GeventConnection = None + self.unsent_events: List = [] + self._connection_parameters: URLParameters = URLParameters( + settings.EVENTS_QUEUE_URL + ) + + def send_event( + self, payload: Dict[str, Any], fail_retry: Optional[bool] = True + ) -> bool: + """ + Send an event to rabbitMq exchange + + :param payload: Dict with the payload of the event + :param fail_retry: if True the unsent event because any error will be retried. + """ + if self._channel is None or not self._channel.is_open: + logger.warning("Connection is still not initialized") + if fail_retry: + self.unsent_events.append(payload) + return False + + try: + event = json.dumps(payload) + self._channel.basic_publish( + exchange=self.exchange_name, routing_key="", body=event + ) + return True + except pika.exceptions.ConnectionClosedByBroker: + logger.warning("Event can not be sent due to there is no channel opened") + if fail_retry: + self.unsent_events.append(payload) + return False + + def send_unsent_events(self) -> int: + """ + If connection is ready send the unsent messages list due connection broken + + :return: number of messages sent + """ + sent_events = 0 + if self._channel.is_open and len(self.unsent_events) > 0: + logger.info("Sending %i not sent messages", len(self.unsent_events)) + for unsent_message in list(self.unsent_events): + if self.send_event(unsent_message, fail_retry=False): + self.unsent_events.remove(unsent_message) + sent_events += 1 + else: + break + + return sent_events + + def remove_unsent_events(self): + self.unsent_events = [] + + +class AsyncQueueService(QueueService): + + # Singleton class definition + def __init__(self): + super().__init__() + self.connect() + + def connect(self) -> GeventConnection: + """ + This method connects to RabbitMq. + When the connection is established, the on_connection_open method + will be invoked by pika. + + :return: GeventConnection + """ + return GeventConnection( + self._connection_parameters, + on_open_callback=self.on_connection_open, + on_open_error_callback=self.on_connection_open_error, + on_close_callback=self.on_connection_closed, + ) + + def on_connection_open(self, connection: GeventConnection): + """ + This method is called by pika once the connection to RabbitMQ has + been established. It passes the handle to the connection object. + + :param GeventConnection connection: The connection + """ + + logger.info("Connection opened with %s", self._connection_parameters.host) + self._connection = connection + self.open_channel() + + def on_connection_open_error(self, connection: GeventConnection, err: Exception): + """ + This method is called by pika if the connection to RabbitMQ + can't be established. Connection object is paased if were necessary + Always retry the reconnection every 5 seconds. + + :param GeventConnection: The connection + :param Exception err: The error + """ + logger.error( + "Connection open failed with %s, retrying in 5 seconds: %s", + self._connection_parameters.host, + err, + ) + self._connection.ioloop.call_later(5, self.connect) + + def on_connection_closed(self, connection: GeventConnection, reason: Exception): + """ + This method is invoked by pika when the connection to RabbitMQ is + closed unexpectedly. Since it is unexpected, we will reconnect to + RabbitMQ if it disconnects. + + :param GeventConnection: The closed connection obj + :param Exception reason: exception representing reason for loss of + connection. + """ + self._channel = None + logger.error( + "Connection closed with %s, reopening in 5 seconds: %s", + self._connection_parameters.host, + reason, + ) + self._connection.ioloop.call_later(5, self.connect) + + def open_channel(self): + """ + This method will open a new channel with RabbitMQ by issuing the + Channel.Open RPC command. When RabbitMQ confirms the channel is open + by sending the Channel.OpenOK RPC reply, the on_channel_open method + will be invoked. + """ + logger.info("Opening a new channel") + self._connection.channel(on_open_callback=self.on_channel_open) + + def on_channel_open(self, channel: Channel): + """ + This method is invoked by pika when the channel has been opened. + The channel object is passed in so we can make use of it. + + :param pika.channel.Channel channel: The channel object + """ + logger.info("Channel with number %i opened", channel.channel_number) + self._channel = channel + self._channel.add_on_close_callback(self.on_channel_closed) + self.setup_exchange() + + def on_channel_closed(self, channel: Channel, reason: Exception): + """ + Invoked by pika when RabbitMQ unexpectedly closes the channel. + Channels are usually closed if you attempt to do something that + violates the protocol. + In this method we retry to open a new channel with rabbitMQ if the connection is still open. + + :param Channel channel: The closed channel + :param Exception reason: why the channel was closed + """ + logger.warning("Channel %i was closed: %s", channel.channel_number, reason) + self._channel = None + if self._connection and self._connection.is_open: + # If channel was closed and connection is still active we try to reopen the channel + logger.error( + "Connection is opened retry to open channel in 5 seconds: %s", + self._connection_parameters.host, + reason, + ) + self._connection.ioloop.call_later(5, self.open_channel()) + + def setup_exchange(self): + """ + Setup the exchange on RabbitMQ by invoking the Exchange.Declare RPC + command. When it is complete, the on_exchange_declareok method will + be invoked by pika. + """ + logger.info("Declaring exchange %s", self.exchange_name) + + self._channel.exchange_declare( + exchange=self.exchange_name, + exchange_type=ExchangeType.fanout, + durable=True, + callback=self.on_exchange_declareok, + ) + + def on_exchange_declareok(self, _unused_frame): + """Invoked by pika when RabbitMQ has finished the Exchange.Declare RPC + command. + Send unsent messages that cannot be sent as due connection errors. + + :param pika.Frame.Method unused_frame: Exchange.DeclareOk response frame + """ + + logger.info("Exchange declared: %s", self.exchange_name) + self.send_unsent_events() + + +class SyncQueueService(QueueService): + """ + Synchronous connection with test purpose as we cannot test using gevent connection + """ + + def __init__(self): + super().__init__() + self.connect() + + def connect(self) -> BlockingConnection: + """ + This method connects to RabbitMq using Blockingconnection. + Store in _connection the BlocingConnection object and creates a new channel + + :return: BlockingConnection + """ + try: + self._connection = BlockingConnection(self._connection_parameters) + self._channel = self.open_channel() + self.setup_exchange() + return self._connection + except pika.exceptions.AMQPConnectionError: + logger.error("Cannot open connection, retrying") + + def open_channel(self) -> Channel: + """ + Open a new channel + + :return: channel opened + """ + return self._connection.channel() + + def setup_exchange(self): + """ + Setup the exchange on RabbitMQ by invoking the Exchange.Declare RPC + command. + """ + logger.info("Declaring exchange %s", self.exchange_name) + + self._channel.exchange_declare( + exchange=self.exchange_name, exchange_type=ExchangeType.fanout, durable=True + ) + + +class MockedQueueService: + """ + Mocked class to use in case that there is not rabbitMq queue to send events + """ + + def send_event(self, event: Dict[str, Any]): + logger.debug("MockedQueueService: Not sending event with payload %s", event) diff --git a/safe_transaction_service/events/tasks.py b/safe_transaction_service/events/tasks.py new file mode 100644 index 00000000..8b2391c8 --- /dev/null +++ b/safe_transaction_service/events/tasks.py @@ -0,0 +1,14 @@ +from typing import Any, Dict + +from celery import app + +from safe_transaction_service.events.services.queue_service import QueueServiceProvider + + +@app.shared_task() +def send_event_to_queue_task(payload: Dict[str, Any]) -> bool: + if payload: + queue_service = QueueServiceProvider() + return queue_service.send_event(payload) + + return False diff --git a/safe_transaction_service/events/tests/__init__.py b/safe_transaction_service/events/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/safe_transaction_service/events/tests/test_queue_service.py b/safe_transaction_service/events/tests/test_queue_service.py new file mode 100644 index 00000000..518d1173 --- /dev/null +++ b/safe_transaction_service/events/tests/test_queue_service.py @@ -0,0 +1,55 @@ +import json +from unittest import mock + +from django.test import TestCase + +from pika.channel import Channel +from pika.exceptions import ConnectionClosedByBroker + +from safe_transaction_service.events.services.queue_service import QueueServiceProvider + + +class TestQueueService(TestCase): + def setUp(self): + self.queue_service = QueueServiceProvider() + # Create queue for test + self.queue = "test_queue" + self.queue_service._channel.queue_declare(self.queue) + self.queue_service._channel.queue_bind( + self.queue, self.queue_service.exchange_name + ) + + def test_send_unsent_messages(self): + queue_service = QueueServiceProvider() + messages_to_send = 10 + queue_service.remove_unsent_events() + with mock.patch.object( + Channel, + "basic_publish", + side_effect=ConnectionClosedByBroker(320, "Connection closed"), + ): + for i in range(messages_to_send): + payload = f"not sent {i}" + self.assertFalse(queue_service.send_event(payload)) + # Shouldn't add this message to unsent_messages list + self.assertFalse(queue_service.send_event(payload, fail_retry=False)) + + self.assertEquals(len(queue_service.unsent_events), messages_to_send) + self.assertEquals(queue_service.send_unsent_events(), 0) + + # After reconnection should send messages + self.assertEquals(queue_service.send_unsent_events(), messages_to_send) + self.assertEquals(len(queue_service.unsent_events), 0) + for i in range(messages_to_send): + payload = f"not sent {i}" + _, _, body = queue_service._channel.basic_get(self.queue, auto_ack=True) + self.assertEquals(json.loads(body), payload) + + def test_send_event_to_queue(self): + payload = {"event": "test_event", "type": "event type"} + + self.assertTrue(self.queue_service.send_event(payload)) + + # Check if message was written to the queue + _, _, body = self.queue_service._channel.basic_get(self.queue, auto_ack=True) + self.assertEquals(json.loads(body), payload) diff --git a/safe_transaction_service/events/tests/test_tasks.py b/safe_transaction_service/events/tests/test_tasks.py new file mode 100644 index 00000000..639b7d80 --- /dev/null +++ b/safe_transaction_service/events/tests/test_tasks.py @@ -0,0 +1,38 @@ +import json +from unittest import mock + +from django.test import TestCase + +from pika.channel import Channel +from pika.exceptions import ConnectionClosedByBroker + +from safe_transaction_service.events.tasks import send_event_to_queue_task +from safe_transaction_service.events.tests.test_queue_service import TestQueueService + + +class TestTasks(TestQueueService, TestCase): + def test_send_event_to_queue_task(self): + self.assertFalse(send_event_to_queue_task(None)) + payload = {"event": "test_event_task", "type": "event task type"} + with mock.patch.object( + Channel, "basic_publish", return_value=None + ) as mock_publish: + self.assertTrue(send_event_to_queue_task(payload)) + mock_publish.assert_called_once_with( + exchange=self.queue_service.exchange_name, + routing_key="", + body=json.dumps(payload), + properties=None, + mandatory=False, + ) + + self.assertTrue(send_event_to_queue_task(payload)) + _, _, body = self.queue_service._channel.basic_get(self.queue, auto_ack=True) + self.assertEquals(json.loads(body), payload) + + with mock.patch.object( + Channel, + "basic_publish", + side_effect=ConnectionClosedByBroker(320, "Connection closed"), + ): + self.assertFalse(send_event_to_queue_task(payload)) diff --git a/safe_transaction_service/history/signals.py b/safe_transaction_service/history/signals.py index d915789b..893912a2 100644 --- a/safe_transaction_service/history/signals.py +++ b/safe_transaction_service/history/signals.py @@ -6,6 +6,7 @@ from django.dispatch import receiver from django.utils import timezone +from safe_transaction_service.events.tasks import send_event_to_queue_task from safe_transaction_service.notifications.tasks import send_notification_task from .models import ( @@ -167,6 +168,7 @@ def process_webhook( countdown=5, priority=2, # Almost lowest priority ) + send_event_to_queue_task.delay(payload) else: logger.debug( "Notification will not be sent for created=%s object=%s", diff --git a/safe_transaction_service/history/tests/test_signals.py b/safe_transaction_service/history/tests/test_signals.py index 4784683f..3f8d4468 100644 --- a/safe_transaction_service/history/tests/test_signals.py +++ b/safe_transaction_service/history/tests/test_signals.py @@ -8,6 +8,7 @@ from gnosis.eth import EthereumNetwork +from safe_transaction_service.events.tasks import send_event_to_queue_task from safe_transaction_service.notifications.tasks import send_notification_task from ..models import ( @@ -77,24 +78,30 @@ def test_build_webhook_payload(self): self.assertEqual(payload["chainId"], str(EthereumNetwork.GANACHE.value)) @factory.django.mute_signals(post_save) - def test_process_webhook(self): + @mock.patch.object(send_webhook_task, "apply_async") + @mock.patch.object(send_notification_task, "apply_async") + @mock.patch.object(send_event_to_queue_task, "delay") + def test_process_webhook( + self, + webhook_task_mock, + send_notification_task_mock, + send_event_to_queue_task_mock, + ): multisig_confirmation = MultisigConfirmationFactory() - with mock.patch.object(send_webhook_task, "apply_async") as webhook_task_mock: - with mock.patch.object( - send_notification_task, "apply_async" - ) as send_notification_task_mock: - process_webhook(MultisigConfirmation, multisig_confirmation, True) - webhook_task_mock.assert_called() - send_notification_task_mock.assert_called() + process_webhook(MultisigConfirmation, multisig_confirmation, True) + webhook_task_mock.assert_called() + send_notification_task_mock.assert_called() + send_event_to_queue_task_mock.assert_called() + # reset calls + webhook_task_mock.reset_mock() + send_notification_task_mock.reset_mock() + send_event_to_queue_task_mock.reset_mock() multisig_confirmation.created -= timedelta(minutes=75) - with mock.patch.object(send_webhook_task, "apply_async") as webhook_task_mock: - with mock.patch.object( - send_notification_task, "apply_async" - ) as send_notification_task_mock: - process_webhook(MultisigConfirmation, multisig_confirmation, True) - webhook_task_mock.assert_not_called() - send_notification_task_mock.assert_not_called() + process_webhook(MultisigConfirmation, multisig_confirmation, True) + webhook_task_mock.assert_not_called() + send_notification_task_mock.assert_not_called() + send_event_to_queue_task_mock.assert_not_called() @factory.django.mute_signals(post_save) def test_is_relevant_notification_multisig_confirmation(self): From 7aa918b3f4552ac0685d7c5bae76578f58fc7d57 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 14 Jun 2023 09:04:00 +0000 Subject: [PATCH 07/35] Bump django-stubs from 4.2.0 to 4.2.1 Bumps [django-stubs](https://github.com/typeddjango/django-stubs) from 4.2.0 to 4.2.1. - [Release notes](https://github.com/typeddjango/django-stubs/releases) - [Commits](https://github.com/typeddjango/django-stubs/compare/4.2.0...4.2.1) --- updated-dependencies: - dependency-name: django-stubs dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-test.txt b/requirements-test.txt index c7c73654..81b2cbba 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,6 +1,6 @@ -r requirements.txt coverage==7.2.7 -django-stubs==4.2.0 +django-stubs==4.2.1 django-test-migrations==1.3.0 factory-boy==3.2.1 faker==18.10.1 From 7a80e53d67c3189004750ec42a384e47cc406b99 Mon Sep 17 00:00:00 2001 From: moisses89 <7888669+moisses89@users.noreply.github.com> Date: Wed, 14 Jun 2023 13:32:43 +0200 Subject: [PATCH 08/35] Fix connection is NoneType --- safe_transaction_service/events/services/queue_service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/safe_transaction_service/events/services/queue_service.py b/safe_transaction_service/events/services/queue_service.py index 2637eda3..c53a85a8 100644 --- a/safe_transaction_service/events/services/queue_service.py +++ b/safe_transaction_service/events/services/queue_service.py @@ -140,7 +140,7 @@ def on_connection_open_error(self, connection: GeventConnection, err: Exception) self._connection_parameters.host, err, ) - self._connection.ioloop.call_later(5, self.connect) + connection.ioloop.call_later(5, self.connect) def on_connection_closed(self, connection: GeventConnection, reason: Exception): """ @@ -158,7 +158,7 @@ def on_connection_closed(self, connection: GeventConnection, reason: Exception): self._connection_parameters.host, reason, ) - self._connection.ioloop.call_later(5, self.connect) + connection.ioloop.call_later(5, self.connect) def open_channel(self): """ From 1f37e7fe014333d06509595e22d97e40b4add23f Mon Sep 17 00:00:00 2001 From: moisses89 <7888669+moisses89@users.noreply.github.com> Date: Thu, 15 Jun 2023 15:15:04 +0200 Subject: [PATCH 09/35] Remove swagger mail contact --- config/urls.py | 1 - 1 file changed, 1 deletion(-) diff --git a/config/urls.py b/config/urls.py index 090691ff..79be7374 100644 --- a/config/urls.py +++ b/config/urls.py @@ -14,7 +14,6 @@ title="Safe Transaction Service API", default_version="v1", description="API to keep track of transactions sent via Gnosis Safe smart contracts", - contact=openapi.Contact(email="safe@gnosis.io"), license=openapi.License(name="MIT License"), ), validators=["flex", "ssv"], From 25503acbffd2cf0f6f6983e1ccca21ecf2bd8b05 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Jun 2023 06:58:15 +0000 Subject: [PATCH 10/35] Bump pytest-env from 0.8.1 to 0.8.2 Bumps [pytest-env](https://github.com/pytest-dev/pytest-env) from 0.8.1 to 0.8.2. - [Release notes](https://github.com/pytest-dev/pytest-env/releases) - [Commits](https://github.com/pytest-dev/pytest-env/compare/0.8.1...0.8.2) --- updated-dependencies: - dependency-name: pytest-env dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-test.txt b/requirements-test.txt index 81b2cbba..aa1e61b7 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -8,6 +8,6 @@ mypy==1.0.1 pytest==7.3.1 pytest-celery==0.0.0 pytest-django==4.5.2 -pytest-env==0.8.1 +pytest-env==0.8.2 pytest-rerunfailures==11.1.2 pytest-sugar==0.9.7 From 41993c716a99c76b1a40e40c48990ae85a9289e9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Jun 2023 06:58:07 +0000 Subject: [PATCH 11/35] Bump web3 from 6.4.0 to 6.5.0 Bumps [web3](https://github.com/ethereum/web3.py) from 6.4.0 to 6.5.0. - [Release notes](https://github.com/ethereum/web3.py/releases) - [Changelog](https://github.com/ethereum/web3.py/blob/main/docs/releases.rst) - [Commits](https://github.com/ethereum/web3.py/compare/v6.4.0...v6.5.0) --- updated-dependencies: - dependency-name: web3 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 77a40fe5..e787fb54 100644 --- a/requirements.txt +++ b/requirements.txt @@ -33,4 +33,4 @@ psycopg2==2.9.6 redis==4.5.5 requests==2.31.0 safe-eth-py[django]==5.4.3 -web3==6.4.0 +web3==6.5.0 From 06f62b5f7ff22d2f1938f3f5bd17146cbfc3795b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Jun 2023 06:58:02 +0000 Subject: [PATCH 12/35] Bump django-timezone-field from 5.0.0 to 5.1 Bumps [django-timezone-field](https://github.com/mfogel/django-timezone-field) from 5.0.0 to 5.1. - [Commits](https://github.com/mfogel/django-timezone-field/compare/5.0...5.1) --- updated-dependencies: - dependency-name: django-timezone-field dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e787fb54..a0f7e495 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,7 +15,7 @@ django-imagekit==4.1.0 django-model-utils==4.3.1 django-redis==5.2.0 django-s3-storage==0.14.0 -django-timezone-field==5.0.0 +django-timezone-field==5.1 djangorestframework==3.14.0 djangorestframework-camel-case==1.4.2 docutils==0.20.1 From 41ee52ce7aaeca0aa06f4b5ab6208eb6a41dced7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Jun 2023 06:57:53 +0000 Subject: [PATCH 13/35] Bump django-extensions from 3.2.1 to 3.2.3 Bumps [django-extensions](https://github.com/django-extensions/django-extensions) from 3.2.1 to 3.2.3. - [Release notes](https://github.com/django-extensions/django-extensions/releases) - [Changelog](https://github.com/django-extensions/django-extensions/blob/main/CHANGELOG.md) - [Commits](https://github.com/django-extensions/django-extensions/compare/3.2.1...3.2.3) --- updated-dependencies: - dependency-name: django-extensions dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a0f7e495..9e1438d7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,7 +9,7 @@ django-db-geventpool==4.0.1 django-debug-toolbar django-debug-toolbar-force django-environ==0.10.0 -django-extensions==3.2.1 +django-extensions==3.2.3 django-filter==23.2 django-imagekit==4.1.0 django-model-utils==4.3.1 From 9d5a30e6e10c88b9752124182d9742bdf509f581 Mon Sep 17 00:00:00 2001 From: moisses89 <7888669+moisses89@users.noreply.github.com> Date: Tue, 20 Jun 2023 10:10:14 +0200 Subject: [PATCH 14/35] Fix swagger doc --- safe_transaction_service/safe_messages/views.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/safe_transaction_service/safe_messages/views.py b/safe_transaction_service/safe_messages/views.py index 01674b5d..fa1139b7 100644 --- a/safe_transaction_service/safe_messages/views.py +++ b/safe_transaction_service/safe_messages/views.py @@ -43,6 +43,9 @@ def get_serializer_context(self): ) return context + @swagger_auto_schema( + responses={201: "Created"}, + ) def post(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) @@ -89,7 +92,7 @@ def get(self, request, address, *args, **kwargs): @swagger_auto_schema( request_body=serializers.SafeMessageSerializer, - responses={201: serializers.SafeMessageResponseSerializer}, + responses={201: "Created"}, ) def post(self, request, address, *args, **kwargs): if not fast_is_checksum_address(address): From 59fab16ae2b9413c49e5710bc7393e38056ec255 Mon Sep 17 00:00:00 2001 From: moisses89 <7888669+moisses89@users.noreply.github.com> Date: Wed, 21 Jun 2023 17:23:42 +0200 Subject: [PATCH 15/35] Add default value for delegates serializer --- safe_transaction_service/history/serializers.py | 2 +- safe_transaction_service/history/tests/test_views.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/safe_transaction_service/history/serializers.py b/safe_transaction_service/history/serializers.py index 98cc4a86..7133944d 100644 --- a/safe_transaction_service/history/serializers.py +++ b/safe_transaction_service/history/serializers.py @@ -369,7 +369,7 @@ def check_delegate_signature( class DelegateSerializer(DelegateSignatureCheckerMixin, serializers.Serializer): - safe = EthereumAddressField(allow_null=True, required=False) + safe = EthereumAddressField(allow_null=True, required=False, default=None) delegate = EthereumAddressField() delegator = EthereumAddressField() signature = HexadecimalField(min_length=65) diff --git a/safe_transaction_service/history/tests/test_views.py b/safe_transaction_service/history/tests/test_views.py index b2ee3b17..dcbbd8a6 100644 --- a/safe_transaction_service/history/tests/test_views.py +++ b/safe_transaction_service/history/tests/test_views.py @@ -1771,7 +1771,6 @@ def test_delegates_post(self): "label": another_label, "delegate": delegate.address, "delegator": delegator.address, - "safe": None, "signature": delegator.signHash( DelegateSignatureHelper.calculate_hash(delegate.address, eth_sign=True) )["signature"].hex(), From 0597c25d088c8047e5f8d666d736917760ba258f Mon Sep 17 00:00:00 2001 From: Uxio Fuentefria Date: Fri, 23 Jun 2023 11:06:03 +0200 Subject: [PATCH 16/35] Update postgres to v14 --- .github/workflows/python.yml | 2 +- docker-compose.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml index cc065d48..e7339fd0 100644 --- a/.github/workflows/python.yml +++ b/.github/workflows/python.yml @@ -42,7 +42,7 @@ jobs: ports: - 6379:6379 postgres: - image: postgres:13 + image: postgres:14 env: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres diff --git a/docker-compose.yml b/docker-compose.yml index 7015f496..d658d97f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -28,7 +28,7 @@ services: - "5672:5672" db: - image: postgres:13-alpine + image: postgres:14-alpine ports: - "5432:5432" environment: From 8b592d07d8d3bc1990ee87c3b9092e59dc49d3db Mon Sep 17 00:00:00 2001 From: moisses89 <7888669+moisses89@users.noreply.github.com> Date: Fri, 23 Jun 2023 14:04:54 +0200 Subject: [PATCH 17/35] Fix missing variable --- .env.test | 1 + 1 file changed, 1 insertion(+) diff --git a/.env.test b/.env.test index bf702220..1477f4f6 100644 --- a/.env.test +++ b/.env.test @@ -11,3 +11,4 @@ ETHEREUM_NODE_URL=http://localhost:8545 ETHEREUM_TRACING_NODE_URL=http://localhost:8545 ETH_HASH_BACKEND=pysha3 ENABLE_ANALYTICS=True +EVENTS_QUEUE_URL=amqp://guest:guest@rabbitmq/ From 56f04700165c9d0f6ae044d8f90d9a9f84edcef8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Jun 2023 06:58:49 +0000 Subject: [PATCH 18/35] Bump firebase-admin from 6.1.0 to 6.2.0 Bumps [firebase-admin](https://github.com/firebase/firebase-admin-python) from 6.1.0 to 6.2.0. - [Release notes](https://github.com/firebase/firebase-admin-python/releases) - [Commits](https://github.com/firebase/firebase-admin-python/compare/v6.1.0...v6.2.0) --- updated-dependencies: - dependency-name: firebase-admin dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 4bf69ee9..dc5588d1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,7 +20,7 @@ djangorestframework==3.14.0 djangorestframework-camel-case==1.4.2 docutils==0.20.1 drf-yasg[validation]==1.21.5 -firebase-admin==6.1.0 +firebase-admin==6.2.0 flower==1.2.0 gunicorn[gevent]==20.1.0 hexbytes==0.2.3 From 86797fbf65563c36c38c2e18bd010935a10ff427 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Jun 2023 06:58:45 +0000 Subject: [PATCH 19/35] Bump pytest from 7.3.1 to 7.4.0 Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.3.1 to 7.4.0. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.3.1...7.4.0) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-test.txt b/requirements-test.txt index aa1e61b7..66100af6 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -5,7 +5,7 @@ django-test-migrations==1.3.0 factory-boy==3.2.1 faker==18.10.1 mypy==1.0.1 -pytest==7.3.1 +pytest==7.4.0 pytest-celery==0.0.0 pytest-django==4.5.2 pytest-env==0.8.2 From e8ef01662ed59b8d4f3e710487733c555e47a351 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Jun 2023 10:23:52 +0000 Subject: [PATCH 20/35] Bump drf-yasg[validation] from 1.21.5 to 1.21.6 Bumps [drf-yasg[validation]](https://github.com/axnsan12/drf-yasg) from 1.21.5 to 1.21.6. - [Release notes](https://github.com/axnsan12/drf-yasg/releases) - [Changelog](https://github.com/axnsan12/drf-yasg/blob/1.21.6/docs/changelog.rst) - [Commits](https://github.com/axnsan12/drf-yasg/compare/1.21.5...1.21.6) --- updated-dependencies: - dependency-name: drf-yasg[validation] dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index dc5588d1..6c21675e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,7 +19,7 @@ django-timezone-field==5.1 djangorestframework==3.14.0 djangorestframework-camel-case==1.4.2 docutils==0.20.1 -drf-yasg[validation]==1.21.5 +drf-yasg[validation]==1.21.6 firebase-admin==6.2.0 flower==1.2.0 gunicorn[gevent]==20.1.0 From 4603e01ec97f2d1603e4ede6fefa7d5aef048d55 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ux=C3=ADo?= Date: Mon, 26 Jun 2023 14:18:39 +0200 Subject: [PATCH 21/35] Revert "Bump drf-yasg[validation] from 1.21.5 to 1.21.6" This reverts commit e8ef01662ed59b8d4f3e710487733c555e47a351. --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 6c21675e..dc5588d1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,7 +19,7 @@ django-timezone-field==5.1 djangorestframework==3.14.0 djangorestframework-camel-case==1.4.2 docutils==0.20.1 -drf-yasg[validation]==1.21.6 +drf-yasg[validation]==1.21.5 firebase-admin==6.2.0 flower==1.2.0 gunicorn[gevent]==20.1.0 From 9eb86f52fb34bc06a73385adc32c9c107941bdb7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Jun 2023 12:19:57 +0000 Subject: [PATCH 22/35] Bump django from 4.2.1 to 4.2.2 Bumps [django](https://github.com/django/django) from 4.2.1 to 4.2.2. - [Commits](https://github.com/django/django/compare/4.2.1...4.2.2) --- updated-dependencies: - dependency-name: django dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index dc5588d1..96a8a233 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ boto3==1.26.151 cachetools==5.3.1 celery==5.2.7 -django==4.2.1 +django==4.2.2 django-cache-memoize==0.1.10 django-celery-beat==2.5.0 django-cors-headers==4.0.0 From aeb913ebdcad77d4bfe22b1c63958e72a30e7555 Mon Sep 17 00:00:00 2001 From: Uxio Fuentefria Date: Mon, 26 Jun 2023 18:46:42 +0200 Subject: [PATCH 23/35] Add migration to rename safe apps url - Closes #1469 --- .../migrations/0073_safe_apps_links.py | 32 +++++++ .../history/tests/test_migrations.py | 87 +++++++++++++++++++ 2 files changed, 119 insertions(+) create mode 100644 safe_transaction_service/history/migrations/0073_safe_apps_links.py diff --git a/safe_transaction_service/history/migrations/0073_safe_apps_links.py b/safe_transaction_service/history/migrations/0073_safe_apps_links.py new file mode 100644 index 00000000..c69ae57e --- /dev/null +++ b/safe_transaction_service/history/migrations/0073_safe_apps_links.py @@ -0,0 +1,32 @@ +""" +Migrate Safe Apps links from apps.gnosis-safe.io -> apps-portal.safe.global + +Generated by Django 4.2.1 on 2023-06-26 14:18 +""" + +from django.db import migrations + +# No way to do this efficiently using Django's ORM, so using a raw SQL +migrate_safe_apps_links_sql = """ + UPDATE history_multisigtransaction + SET origin = replace(origin::text, 'https://apps.gnosis-safe.io', 'https://apps-portal.safe.global')::jsonb + WHERE origin->>'url' LIKE 'https://apps.gnosis-safe.io%' +""" + +reverse_migrate_safe_apps_links_sql = """ + UPDATE history_multisigtransaction + SET origin = replace(origin::text, 'https://apps-portal.safe.global', 'https://apps.gnosis-safe.io')::jsonb + WHERE origin->>'url' LIKE 'https://apps-portal.safe.global%' +""" + + +class Migration(migrations.Migration): + dependencies = [ + ("history", "0072_safecontract_banned_and_more"), + ] + + operations = [ + migrations.RunSQL( + migrate_safe_apps_links_sql, reverse_sql=reverse_migrate_safe_apps_links_sql + ) + ] diff --git a/safe_transaction_service/history/tests/test_migrations.py b/safe_transaction_service/history/tests/test_migrations.py index d0424ad7..ee6a12b8 100644 --- a/safe_transaction_service/history/tests/test_migrations.py +++ b/safe_transaction_service/history/tests/test_migrations.py @@ -7,6 +7,8 @@ from eth_account import Account from web3 import Web3 +from safe_transaction_service.history.tests.factories import MultisigTransactionFactory + class TestMigrations(TestCase): def setUp(self) -> None: @@ -241,3 +243,88 @@ def test_migration_backward_0069_db_empty(self): ) SafeContract = old_state.apps.get_model("history", "SafeContract") self.assertEqual(SafeContract.objects.filter(erc20_block_number=0).count(), 3) + + def test_migration_forward_0073_safe_apps_links(self): + """ + Migrate safe apps links from 'apps.gnosis-safe.io' -> 'apps-portal.safe.global' + """ + + new_state = self.migrator.apply_initial_migration( + ("history", "0072_safecontract_banned_and_more"), + ) + + # Factories can be used as there are no database definition changes + # Make sure there are no issues with empty `origin` or `origin` lacking `url` + MultisigTransactionFactory(origin={"not_url": "random"}) + + # Make sure other urls are not affected + MultisigTransactionFactory( + origin={"url": "https://app.zerion.io", "name": "Zerion"} + ) + + # This origin must be replaced + MultisigTransactionFactory( + origin={ + "url": "https://apps.gnosis-safe.io/tx-builder/", + "name": "Transaction Builder", + } + ) + + new_state = self.migrator.apply_tested_migration( + ("history", "0073_safe_apps_links"), + ) + MultisigTransaction = new_state.apps.get_model("history", "MultisigTransaction") + self.assertCountEqual( + MultisigTransaction.objects.values_list("origin", flat=True), + [ + {"not_url": "random"}, + {"url": "https://app.zerion.io", "name": "Zerion"}, + { + "url": "https://apps-portal.safe.global/tx-builder/", + "name": "Transaction Builder", + }, + ], + ) + + def test_migration_backward_0073_safe_apps_links(self): + """ + Migrate safe apps links from 'apps.gnosis-safe.io' -> 'apps-portal.safe.global' + """ + + new_state = self.migrator.apply_initial_migration( + ("history", "0073_safe_apps_links"), + ) + + # Factories can be used as there are no database definition changes + # Make sure there are no issues with empty `origin` or `origin` lacking `url` + MultisigTransactionFactory(origin={"not_url": "random"}) + + # Make sure other urls are not affected + MultisigTransactionFactory( + origin={"url": "https://app.zerion.io", "name": "Zerion"} + ) + + # This origin must be replaced + MultisigTransactionFactory( + origin={ + "url": "https://apps-portal.safe.global/tx-builder/", + "name": "Transaction Builder", + } + ) + + new_state = self.migrator.apply_tested_migration( + ("history", "0072_safecontract_banned_and_more"), + ) + + MultisigTransaction = new_state.apps.get_model("history", "MultisigTransaction") + self.assertCountEqual( + MultisigTransaction.objects.values_list("origin", flat=True), + [ + {"not_url": "random"}, + {"url": "https://app.zerion.io", "name": "Zerion"}, + { + "url": "https://apps.gnosis-safe.io/tx-builder/", + "name": "Transaction Builder", + }, + ], + ) From f5144ad228c3e1d4574ac5a8036c811ddf913fba Mon Sep 17 00:00:00 2001 From: Uxio Fuentefria Date: Mon, 26 Jun 2023 18:52:32 +0200 Subject: [PATCH 24/35] Set version 4.20.4 --- safe_transaction_service/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/safe_transaction_service/__init__.py b/safe_transaction_service/__init__.py index 728595d2..4e0f0a6b 100644 --- a/safe_transaction_service/__init__.py +++ b/safe_transaction_service/__init__.py @@ -1,4 +1,4 @@ -__version__ = "4.20.3" +__version__ = "4.20.4" __version_info__ = tuple( int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") From 254f0bfba888ed489d5695deefc0e4ca35b341c4 Mon Sep 17 00:00:00 2001 From: Uxio Fuentefria Date: Mon, 26 Jun 2023 15:01:36 +0200 Subject: [PATCH 25/35] Add swagger test - Detects issues with swagger breaking - Related to #1531 --- safe_transaction_service/history/tests/test_views.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/safe_transaction_service/history/tests/test_views.py b/safe_transaction_service/history/tests/test_views.py index dcbbd8a6..45c4c8f2 100644 --- a/safe_transaction_service/history/tests/test_views.py +++ b/safe_transaction_service/history/tests/test_views.py @@ -69,6 +69,16 @@ def test_about_view(self): response = self.client.get(url, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) + def test_swagger_json_schema(self): + url = reverse("schema-json", args=(".json",)) + response = self.client.get(url, format="json") + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_swagger_ui(self): + url = reverse("schema-swagger-ui") + response = self.client.get(url, format="json") + self.assertEqual(response.status_code, status.HTTP_200_OK) + def test_about_ethereum_rpc_url(self): for url_name in ( "v1:history:about-ethereum-rpc", From 0f83abff097b2d7ebf21d1ca070fe2776cac73da Mon Sep 17 00:00:00 2001 From: Uxio Fuentefria Date: Tue, 27 Jun 2023 13:46:43 +0200 Subject: [PATCH 26/35] Refactor reindexing - Increase default `ETH_REORG_BLOCKS` from `100` -> `150` - Don't log all addresses when reindexing - Remove not used anymore ALERT_OUT_OF_SYNC_EVENTS_THRESHOLD - Increase default number of blocks to reindex again during indexing --- config/settings/base.py | 10 +++------- .../history/services/index_service.py | 9 +++++---- safe_transaction_service/history/tasks.py | 4 ++-- 3 files changed, 10 insertions(+), 13 deletions(-) diff --git a/config/settings/base.py b/config/settings/base.py index 2629f6c1..09fef421 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -415,7 +415,7 @@ "ETH_INTERNAL_TXS_BLOCK_PROCESS_LIMIT", default=10_000 ) ETH_INTERNAL_TXS_BLOCKS_TO_REINDEX_AGAIN = env.int( - "ETH_INTERNAL_TXS_BLOCKS_TO_REINDEX_AGAIN", default=6 + "ETH_INTERNAL_TXS_BLOCKS_TO_REINDEX_AGAIN", default=10 ) ETH_INTERNAL_TXS_NUMBER_TRACE_BLOCKS = env.int( "ETH_INTERNAL_TXS_NUMBER_TRACE_BLOCKS", default=10 @@ -442,7 +442,7 @@ "ETH_EVENTS_BLOCK_PROCESS_LIMIT_MAX", default=0 ) # Maximum number of blocks to process together when searching for events. 0 == no limit. ETH_EVENTS_BLOCKS_TO_REINDEX_AGAIN = env.int( - "ETH_EVENTS_BLOCKS_TO_REINDEX_AGAIN", default=10 + "ETH_EVENTS_BLOCKS_TO_REINDEX_AGAIN", default=20 ) # Blocks to reindex again every indexer run when service is synced. Useful for RPCs not reliable ETH_EVENTS_GET_LOGS_CONCURRENCY = env.int( "ETH_EVENTS_GET_LOGS_CONCURRENCY", default=20 @@ -454,7 +454,7 @@ "ETH_EVENTS_UPDATED_BLOCK_BEHIND", default=24 * 60 * 60 // 15 ) # Number of blocks to consider an address 'almost updated'. ETH_REORG_BLOCKS = env.int( - "ETH_REORG_BLOCKS", default=100 if ETH_L2_NETWORK else 10 + "ETH_REORG_BLOCKS", default=150 if ETH_L2_NETWORK else 10 ) # Number of blocks from the current block number needed to consider a block valid/stable # Tokens @@ -496,10 +496,6 @@ ) ) -ALERT_OUT_OF_SYNC_EVENTS_THRESHOLD = env.float( - "ALERT_OUT_OF_SYNC_EVENTS_THRESHOLD", default=0.1 -) # Percentage of Safes allowed to be out of sync without alerting. By default 10% - # Events # ------------------------------------------------------------------------------ EVENTS_QUEUE_URL = env("EVENTS_QUEUE_URL", default=None) diff --git a/safe_transaction_service/history/services/index_service.py b/safe_transaction_service/history/services/index_service.py index 0291ca0b..80ebaba1 100644 --- a/safe_transaction_service/history/services/index_service.py +++ b/safe_transaction_service/history/services/index_service.py @@ -67,7 +67,6 @@ def __new__(cls): EthereumClientProvider(), settings.ETH_REORG_BLOCKS, settings.ETH_L2_NETWORK, - settings.ALERT_OUT_OF_SYNC_EVENTS_THRESHOLD, ) return cls.instance @@ -84,12 +83,10 @@ def __init__( ethereum_client: EthereumClient, eth_reorg_blocks: int, eth_l2_network: bool, - alert_out_of_sync_events_threshold: float, ): self.ethereum_client = ethereum_client self.eth_reorg_blocks = eth_reorg_blocks self.eth_l2_network = eth_l2_network - self.alert_out_of_sync_events_threshold = alert_out_of_sync_events_threshold def block_get_or_create_from_block_hash(self, block_hash: int): try: @@ -405,7 +402,11 @@ def _reindex( if not addresses: logger.warning("No addresses to process") else: - logger.info("Start reindexing addresses %s", addresses) + # Don't log all the addresses + addresses_str = ( + str(addresses) if len(addresses) < 10 else f"{addresses[:10]}..." + ) + logger.info("Start reindexing addresses %s", addresses_str) current_block_number = self.ethereum_client.current_block_number stop_block_number = ( min(current_block_number, to_block_number) diff --git a/safe_transaction_service/history/tasks.py b/safe_transaction_service/history/tasks.py index ab238dc0..d17b9da7 100644 --- a/safe_transaction_service/history/tasks.py +++ b/safe_transaction_service/history/tasks.py @@ -131,8 +131,8 @@ def index_erc20_events_out_of_sync_task( current_block_number = erc20_events_indexer.ethereum_client.current_block_number addresses = addresses or [ - x.address - for x in erc20_events_indexer.get_almost_updated_addresses( + almost_updated_address.address + for almost_updated_address in erc20_events_indexer.get_almost_updated_addresses( current_block_number )[:number_of_addresses] ] From cb54c2717f583d554472ccd600efbf63dfc2eb51 Mon Sep 17 00:00:00 2001 From: Uxio Fuentefria Date: Wed, 28 Jun 2023 12:14:15 +0200 Subject: [PATCH 27/35] Reenable auto reindex of Safes when finding issues - It was disabled due to it taking a lot of resources - When setting one Safe address to reindex instead of a lot reindexing is really fast --- .env.test | 2 +- safe_transaction_service/history/tasks.py | 39 ++++++++++++------- .../history/tests/test_tasks.py | 14 ++++--- 3 files changed, 36 insertions(+), 19 deletions(-) diff --git a/.env.test b/.env.test index 1477f4f6..8a31f6e5 100644 --- a/.env.test +++ b/.env.test @@ -11,4 +11,4 @@ ETHEREUM_NODE_URL=http://localhost:8545 ETHEREUM_TRACING_NODE_URL=http://localhost:8545 ETH_HASH_BACKEND=pysha3 ENABLE_ANALYTICS=True -EVENTS_QUEUE_URL=amqp://guest:guest@rabbitmq/ +EVENTS_QUEUE_URL=amqp://guest:guest@localhost:5672/ diff --git a/safe_transaction_service/history/tasks.py b/safe_transaction_service/history/tasks.py index d17b9da7..c11b42ba 100644 --- a/safe_transaction_service/history/tasks.py +++ b/safe_transaction_service/history/tasks.py @@ -255,7 +255,7 @@ def process_decoded_internal_txs_task(self) -> Optional[int]: if safe_to_process not in banned_safes: count += 1 process_decoded_internal_txs_for_safe_task.delay( - safe_to_process, reindex_master_copies=False + safe_to_process, reindex_master_copies=True ) else: logger.info( @@ -291,7 +291,9 @@ def reindex_mastercopies_last_hours_task(self, hours: float = 2.5) -> Optional[i from_block_number, to_block_number, ) - reindex_master_copies_task.delay(from_block_number, to_block_number) + reindex_master_copies_task.delay( + from_block_number, to_block_number=to_block_number + ) @app.shared_task(bind=True, soft_time_limit=SOFT_TIMEOUT, time_limit=LOCK_TIMEOUT) @@ -316,13 +318,17 @@ def reindex_erc20_erc721_last_hours_task(self, hours: float = 2.5) -> Optional[i from_block_number, to_block_number, ) - # countdown of 30 minutes to execute this reindex after mastercopies reindex is finished - reindex_erc20_events_task.delay(from_block_number, to_block_number) + reindex_erc20_events_task.delay( + from_block_number, to_block_number=to_block_number + ) @app.shared_task(bind=True, soft_time_limit=SOFT_TIMEOUT, time_limit=LOCK_TIMEOUT) def reindex_master_copies_task( - self, from_block_number: int, to_block_number: int + self, + from_block_number: int, + to_block_number: Optional[int] = None, + addresses: Optional[ChecksumAddress] = None, ) -> None: """ Reindexes master copies @@ -331,19 +337,22 @@ def reindex_master_copies_task( with only_one_running_task(self): index_service = IndexServiceProvider() logger.info( - "Reindexing master copies from-block=%d to-block=%d", + "Reindexing master copies from-block=%d to-block=%s addresses=%s", from_block_number, to_block_number, + addresses, ) index_service.reindex_master_copies( - from_block_number=from_block_number, - to_block_number=to_block_number, + from_block_number, to_block_number=to_block_number, addresses=addresses ) @app.shared_task(bind=True, soft_time_limit=SOFT_TIMEOUT, time_limit=LOCK_TIMEOUT) def reindex_erc20_events_task( - self, from_block_number: int, to_block_number: int + self, + from_block_number: int, + to_block_number: Optional[int] = None, + addresses: Optional[ChecksumAddress] = None, ) -> None: """ Reindexes master copies @@ -352,13 +361,13 @@ def reindex_erc20_events_task( with only_one_running_task(self): index_service = IndexServiceProvider() logger.info( - "Reindexing erc20/721 events from-block=%d to-block=%d", + "Reindexing erc20/721 events from-block=%d to-block=%s addresses=%s", from_block_number, to_block_number, + addresses, ) index_service.reindex_erc20_events( - from_block_number=from_block_number, - to_block_number=to_block_number, + from_block_number, to_block_number=to_block_number, addresses=addresses ) @@ -421,8 +430,12 @@ def process_decoded_internal_txs_for_safe_task( block_number, to_block_number, ) + # Setting the safe address reindexing should be very fast reindex_master_copies_task.delay( - block_number, to_block_number + block_number, + # Reindex until current block + # to_block_number=to_block_number, + addresses=[safe_address], ) logger.info( "Safe-address=%s Processing traces again after reindexing", diff --git a/safe_transaction_service/history/tests/test_tasks.py b/safe_transaction_service/history/tests/test_tasks.py index fea4055f..8e2c48af 100644 --- a/safe_transaction_service/history/tests/test_tasks.py +++ b/safe_transaction_service/history/tests/test_tasks.py @@ -104,8 +104,9 @@ def test_reindex_mastercopies_last_hours_task( reindex_mastercopies_last_hours_task() reindex_master_copies_mock.assert_called_once_with( - from_block_number=ethereum_block_1.number, + ethereum_block_1.number, to_block_number=ethereum_block_3.number, + addresses=None, ) @patch.object(IndexService, "reindex_erc20_events") @@ -127,8 +128,9 @@ def test_reindex_erc20_erc721_last_hours_task( reindex_erc20_erc721_last_hours_task() reindex_erc20_events.assert_called_once_with( - from_block_number=ethereum_block_1.number, + ethereum_block_1.number, to_block_number=ethereum_block_3.number, + addresses=None, ) @patch.object(EthereumClient, "get_network", return_value=EthereumNetwork.GANACHE) @@ -211,8 +213,10 @@ def test_process_decoded_internal_txs_for_safe_task(self): process_decoded_internal_txs_for_safe_task.delay(safe_address) reprocess_mock.assert_called_with([safe_address]) reindex_mock.assert_called_with( - from_block_number=safe_status_0.block_number, - to_block_number=safe_status_5.block_number, + safe_status_0.block_number, + # to_block_number=safe_status_5.block_number, + to_block_number=None, + addresses=[safe_address], ) self.assertIn( f"Safe-address={safe_address} A problem was found in SafeStatus " @@ -231,7 +235,7 @@ def test_process_decoded_internal_txs_for_safe_task(self): ) self.assertIn( f"Reindexing master copies from-block={safe_status_0.internal_tx.ethereum_tx.block_id} " - f"to-block={safe_status_5.block_number}", + f"to-block=None addresses={[safe_address]}", cm.output[4], ) self.assertIn( From c76c70861ab79136ae4c3ed7d0a9abdb2e1518cc Mon Sep 17 00:00:00 2001 From: Uxio Fuentefria Date: Wed, 28 Jun 2023 13:38:00 +0200 Subject: [PATCH 28/35] Set version 4.21.0 --- safe_transaction_service/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/safe_transaction_service/__init__.py b/safe_transaction_service/__init__.py index 4e0f0a6b..f3062e41 100644 --- a/safe_transaction_service/__init__.py +++ b/safe_transaction_service/__init__.py @@ -1,4 +1,4 @@ -__version__ = "4.20.4" +__version__ = "4.21.0" __version_info__ = tuple( int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") From 1f498620d003be5a4421c2a1f5b26d5a701debe8 Mon Sep 17 00:00:00 2001 From: Uxio Fuentefria Date: Thu, 29 Jun 2023 12:14:40 +0200 Subject: [PATCH 29/35] Fix reorg strategy - I don't know why service was resetting to `250 blocks` before the reorg when finding a reorg. That's a lot and I guess is a typo. It was reduced to 10. Even leaving it to 0 should be alright. - Erc20/721 indexing has no protection for reorgs. If a reorg happened it will keep indexing as it nothing happened, not reindexing the reorged blocks. Now if database block was reduced indexed status will not updated. - Increase ETH_REORG_BLOCKS to 200 for every chain but mainnet. Looks like a safe assumption for all networks - Add more logging to `ReorgService` --- config/settings/base.py | 2 +- .../history/indexers/erc20_events_indexer.py | 14 +++++++-- .../history/indexers/ethereum_indexer.py | 3 +- safe_transaction_service/history/models.py | 20 +++++++++---- .../history/services/reorg_service.py | 22 ++++++++++++-- .../history/tests/test_models.py | 29 +++++++++++++++++++ 6 files changed, 76 insertions(+), 14 deletions(-) diff --git a/config/settings/base.py b/config/settings/base.py index 09fef421..4a316517 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -454,7 +454,7 @@ "ETH_EVENTS_UPDATED_BLOCK_BEHIND", default=24 * 60 * 60 // 15 ) # Number of blocks to consider an address 'almost updated'. ETH_REORG_BLOCKS = env.int( - "ETH_REORG_BLOCKS", default=150 if ETH_L2_NETWORK else 10 + "ETH_REORG_BLOCKS", default=200 if ETH_L2_NETWORK else 10 ) # Number of blocks from the current block number needed to consider a block valid/stable # Tokens diff --git a/safe_transaction_service/history/indexers/erc20_events_indexer.py b/safe_transaction_service/history/indexers/erc20_events_indexer.py index 9ebe609d..224756c0 100644 --- a/safe_transaction_service/history/indexers/erc20_events_indexer.py +++ b/safe_transaction_service/history/indexers/erc20_events_indexer.py @@ -230,6 +230,16 @@ def get_minimum_block_number( def update_monitored_address( self, addresses: Sequence[str], from_block_number: int, to_block_number: int ) -> int: - return int( - IndexingStatus.objects.set_erc20_721_indexing_status(to_block_number + 1) + # Keep indexing going on the next block + new_to_block_number = to_block_number + 1 + result = IndexingStatus.objects.set_erc20_721_indexing_status( + new_to_block_number, from_block_number=from_block_number ) + if not result: + logger.warning( + "%s: Possible reorg - Cannot update erc20_721 indexing status from-block-number=%d to-block-number=%d", + self.__class__.__name__, + from_block_number, + to_block_number, + ) + return int(result) diff --git a/safe_transaction_service/history/indexers/ethereum_indexer.py b/safe_transaction_service/history/indexers/ethereum_indexer.py index 30ab550e..c1273031 100644 --- a/safe_transaction_service/history/indexers/ethereum_indexer.py +++ b/safe_transaction_service/history/indexers/ethereum_indexer.py @@ -278,8 +278,7 @@ def update_monitored_address( **{ "address__in": addresses, self.database_field - + "__gte": from_block_number - - 1, # Protect in case of reorg + + "__gte": from_block_number, # Protect in case of reorg self.database_field + "__lt": new_to_block_number, # Don't update to a lower block number } diff --git a/safe_transaction_service/history/models.py b/safe_transaction_service/history/models.py index 00683b20..73d53d41 100644 --- a/safe_transaction_service/history/models.py +++ b/safe_transaction_service/history/models.py @@ -166,12 +166,20 @@ class IndexingStatusManager(models.Manager): def get_erc20_721_indexing_status(self) -> "IndexingStatus": return self.get(indexing_type=IndexingStatusType.ERC20_721_EVENTS.value) - def set_erc20_721_indexing_status(self, block_number: int) -> bool: - return bool( - self.filter(indexing_type=IndexingStatusType.ERC20_721_EVENTS.value).update( - block_number=block_number - ) - ) + def set_erc20_721_indexing_status( + self, block_number: int, from_block_number: Optional[int] = None + ) -> bool: + """ + + :param block_number: + :param from_block_number: If provided, only update the field if bigger than `from_block_number`, to protect + from reorgs + :return: + """ + queryset = self.filter(indexing_type=IndexingStatusType.ERC20_721_EVENTS.value) + if from_block_number is not None: + queryset = queryset.filter(block_number__gte=from_block_number) + return bool(queryset.update(block_number=block_number)) class IndexingStatus(models.Model): diff --git a/safe_transaction_service/history/services/reorg_service.py b/safe_transaction_service/history/services/reorg_service.py index 71740a93..f556f76e 100644 --- a/safe_transaction_service/history/services/reorg_service.py +++ b/safe_transaction_service/history/services/reorg_service.py @@ -40,7 +40,7 @@ def __init__( self, ethereum_client: EthereumClient, eth_reorg_blocks: int, - eth_reorg_rewind_blocks: Optional[int] = 250, + eth_reorg_rewind_blocks: Optional[int] = 10, ): """ :param ethereum_client: @@ -83,6 +83,7 @@ def check_reorgs(self) -> Optional[int]: EthereumBlock.objects.not_confirmed(to_block_number=to_block) .only("number", "block_hash", "confirmed") .order_by("number") + .iterator() ): blockchain_block, blockchain_next_block = self.ethereum_client.get_blocks( [database_block.number, database_block.number + 1], @@ -93,8 +94,19 @@ def check_reorgs(self) -> Optional[int]: == HexBytes(blockchain_next_block["parentHash"]) == HexBytes(database_block.block_hash) ): + logger.debug( + "Block with number=%d and hash=%s is matching blockchain one, setting as confirmed", + database_block.number, + HexBytes(blockchain_block["hash"]).hex(), + ) database_block.set_confirmed() else: + logger.warning( + "Block with number=%d and hash=%s is not matching blockchain hash=%s, reorg found", + database_block.number, + HexBytes(database_block.block_hash).hex(), + HexBytes(blockchain_block["hash"]).hex(), + ) return database_block.number @transaction.atomic @@ -129,10 +141,14 @@ def recover_from_reorg(self, reorg_block_number: int) -> int: ) updated = self.reset_all_to_block(safe_reorg_block_number) - EthereumBlock.objects.filter(number__gte=reorg_block_number).delete() + number_deleted_blocks, _ = EthereumBlock.objects.filter( + number__gte=reorg_block_number + ).delete() logger.warning( - "Reorg of block-number=%d fixed, %d elements updated", + "Reorg of block-number=%d fixed, indexing was reset to safe block=%d, %d elements updated and %d blocks deleted", reorg_block_number, + safe_reorg_block_number, updated, + number_deleted_blocks, ) return updated diff --git a/safe_transaction_service/history/tests/test_models.py b/safe_transaction_service/history/tests/test_models.py index 38f70dd0..38c1fd7e 100644 --- a/safe_transaction_service/history/tests/test_models.py +++ b/safe_transaction_service/history/tests/test_models.py @@ -152,6 +152,35 @@ def test_indexing_status(self): # IndexingStatus should be inserted with a migration and `indexing_type` is unique IndexingStatusFactory(indexing_type=0) + def test_set_erc20_721_indexing_status(self): + self.assertTrue(IndexingStatus.objects.set_erc20_721_indexing_status(5)) + self.assertEqual( + IndexingStatus.objects.get_erc20_721_indexing_status().block_number, 5 + ) + + self.assertTrue(IndexingStatus.objects.set_erc20_721_indexing_status(2)) + self.assertEqual( + IndexingStatus.objects.get_erc20_721_indexing_status().block_number, 2 + ) + + self.assertTrue( + IndexingStatus.objects.set_erc20_721_indexing_status( + 10, from_block_number=2 + ) + ) + self.assertEqual( + IndexingStatus.objects.get_erc20_721_indexing_status().block_number, 10 + ) + + self.assertFalse( + IndexingStatus.objects.set_erc20_721_indexing_status( + 20, from_block_number=11 + ) + ) + self.assertEqual( + IndexingStatus.objects.get_erc20_721_indexing_status().block_number, 10 + ) + class TestMultisigTransaction(TestCase): def test_data_should_be_decoded(self): From c756235fc5e2fd232db213569d10abd57af1c6ba Mon Sep 17 00:00:00 2001 From: Uxio Fuentefria Date: Thu, 29 Jun 2023 11:03:01 +0200 Subject: [PATCH 30/35] Reindex until next issue - Currently a full reindex was triggered - It can interfere with reorgs --- safe_transaction_service/history/tasks.py | 11 +++++++---- safe_transaction_service/history/tests/test_tasks.py | 5 ++--- safe_transaction_service/utils/tasks.py | 2 +- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/safe_transaction_service/history/tasks.py b/safe_transaction_service/history/tasks.py index c11b42ba..c6106fa3 100644 --- a/safe_transaction_service/history/tasks.py +++ b/safe_transaction_service/history/tasks.py @@ -334,7 +334,9 @@ def reindex_master_copies_task( Reindexes master copies """ with contextlib.suppress(LockError): - with only_one_running_task(self): + with only_one_running_task( + self, lock_name_suffix=str(addresses) if addresses else None + ): index_service = IndexServiceProvider() logger.info( "Reindexing master copies from-block=%d to-block=%s addresses=%s", @@ -358,7 +360,9 @@ def reindex_erc20_events_task( Reindexes master copies """ with contextlib.suppress(LockError): - with only_one_running_task(self): + with only_one_running_task( + self, lock_name_suffix=str(addresses) if addresses else None + ): index_service = IndexServiceProvider() logger.info( "Reindexing erc20/721 events from-block=%d to-block=%s addresses=%s", @@ -433,8 +437,7 @@ def process_decoded_internal_txs_for_safe_task( # Setting the safe address reindexing should be very fast reindex_master_copies_task.delay( block_number, - # Reindex until current block - # to_block_number=to_block_number, + to_block_number=to_block_number, addresses=[safe_address], ) logger.info( diff --git a/safe_transaction_service/history/tests/test_tasks.py b/safe_transaction_service/history/tests/test_tasks.py index 8e2c48af..0b713285 100644 --- a/safe_transaction_service/history/tests/test_tasks.py +++ b/safe_transaction_service/history/tests/test_tasks.py @@ -214,8 +214,7 @@ def test_process_decoded_internal_txs_for_safe_task(self): reprocess_mock.assert_called_with([safe_address]) reindex_mock.assert_called_with( safe_status_0.block_number, - # to_block_number=safe_status_5.block_number, - to_block_number=None, + to_block_number=safe_status_5.block_number, addresses=[safe_address], ) self.assertIn( @@ -235,7 +234,7 @@ def test_process_decoded_internal_txs_for_safe_task(self): ) self.assertIn( f"Reindexing master copies from-block={safe_status_0.internal_tx.ethereum_tx.block_id} " - f"to-block=None addresses={[safe_address]}", + f"to-block={safe_status_5.block_number} addresses={[safe_address]}", cm.output[4], ) self.assertIn( diff --git a/safe_transaction_service/utils/tasks.py b/safe_transaction_service/utils/tasks.py index 1bb63b28..dc425858 100644 --- a/safe_transaction_service/utils/tasks.py +++ b/safe_transaction_service/utils/tasks.py @@ -77,7 +77,7 @@ def only_one_running_task( if WORKER_STOPPED: raise LockError("Worker is stopping") redis = get_redis() - lock_name = f"tasks:{task.name}" + lock_name = f"locks:tasks:{task.name}" if lock_name_suffix: lock_name += f":{lock_name_suffix}" with redis.lock( From 0f18446b9c3aa31c0d824182a897202119bbf5ed Mon Sep 17 00:00:00 2001 From: Uxio Fuentefria Date: Thu, 29 Jun 2023 11:09:47 +0200 Subject: [PATCH 31/35] Set version v4.21.1 --- safe_transaction_service/__init__.py | 2 +- safe_transaction_service/contracts/tx_decoder.py | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/safe_transaction_service/__init__.py b/safe_transaction_service/__init__.py index f3062e41..28f9d547 100644 --- a/safe_transaction_service/__init__.py +++ b/safe_transaction_service/__init__.py @@ -1,4 +1,4 @@ -__version__ = "4.21.0" +__version__ = "4.21.1" __version_info__ = tuple( int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") diff --git a/safe_transaction_service/contracts/tx_decoder.py b/safe_transaction_service/contracts/tx_decoder.py index f4e2eacd..779460d2 100644 --- a/safe_transaction_service/contracts/tx_decoder.py +++ b/safe_transaction_service/contracts/tx_decoder.py @@ -539,8 +539,10 @@ def get_contract_abi( :param address: Contract address :return: Dictionary of function selects with ABIFunction if found, `None` otherwise """ - abis = ContractAbi.objects.filter(contracts__address=address).values_list( - "abi", flat=True + abis = ( + ContractAbi.objects.filter(contracts__address=address) + .order_by("relevance") + .values_list("abi", flat=True) ) if abis: return self._generate_selectors_with_abis_from_abi(abis[0]) @@ -564,7 +566,7 @@ def get_abi_function( and selector in contract_selectors_with_abis ): # If the selector is available in the abi specific for the address we will use that one - # Otherwise we fallback to the general abi that matches the selector + # Otherwise we fall back to the general abi that matches the selector return contract_selectors_with_abis[selector] return self.fn_selectors_with_abis[selector] From 5935251c3b568b3108425f2c4bef8decd448a3cc Mon Sep 17 00:00:00 2001 From: Uxio Fuentefria Date: Wed, 28 Jun 2023 14:03:02 +0200 Subject: [PATCH 32/35] Add maintenance commands to README Co-authored-by: Frederico Sabino <3332770+fmrsabino@users.noreply.github.com> --- README.md | 28 +++++++++++++++++++++++++--- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index aa1a2924..e4902c85 100644 --- a/README.md +++ b/README.md @@ -142,11 +142,27 @@ docker exec -it safe-transaction-service-web-1 python manage.py createsuperuser - [v1.3.0 L2](https://github.com/safe-global/safe-deployments/blob/main/src/assets/v1.3.0/gnosis_safe_l2.json) - [Other related contracts and previous Safe versions](https://github.com/safe-global/safe-deployments/blob/main/src/assets) -## Troubleshooting +## Service maintenance -### Issues installing grpc on a Mac M1 +Service can run into some issues when running in production: -If you face issues installing the `grpc` dependency locally (required by this project) on a M1 chip, set `GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1` and `GRPC_PYTHON_BUILD_SYSTEM_ZLIB=1` and then try to install the dependency again. +### Indexing issues +You can tell there are indexing issues if: +- Executed transactions are missing from the API (`all-transactions`, `multisig-transactions`, `module-transactions`... endpoints). If you use the [Safe{Wallet} Web client](https://github.com/safe-global/safe-wallet-web) you should check what is the current state of the Safe Client Gateway cache as it might have outdated data. +- Asset transfers (ERC20/721) are missing from `all-transactions` or `transfers` endpoints. +- You see error logs such as "Cannot remove owner" or similar inconsistent errors when `worker-indexer` is processing decoded data. + +There are multiple options for this. Connect to either `web` or `worker` instances. Running commands inside of `tmux` is recommended +(installed by default): +- `python manage.py check_index_problems`: it will try to automatically fix missing transactions. +Tokens related transactions (ERC20/721) will not be fixed with this method. This method will take a while, as it needs to compare +database data with blockchain data for every Safe. +- `python manage.py reindex_master_copies --from-block-number X --addresses 0x111 0x222`: if you know the first problematic block, +it's faster if you trigger a manual reindex. `--addresses` argument is optional, but if you know the problematic Safes providing +them will make reindexing **way** faster, as only those Safes will be reindexed (instead of the entire collection). + +If you see ERC20/ERC721 transfers missing: +- `python manage.py reindex_erc20 --from-block-number X --addresses 0x111 0x222`: same logic as with `reindex_master_copies`. ## FAQ ### Why `/v1/safes/{address}` endpoint shows a nonce that indicates that a transaction was executed but the transaction is not shown or marked as executed in the other endpoints? @@ -176,5 +192,11 @@ https://docs.safe.global/learn/safe-core/safe-core-api/available-services ### What means banned field in SafeContract model? The `banned` field in the `SafeContract` model is used to prevent indexing of certain Safes that have an unsupported `MasterCopy` or unverified proxies that have issues during indexing. This field does not remove the banned Safe and indexing can be resumed once the issue has been resolved. +## Troubleshooting + +### Issues installing grpc on a Mac M1 + +If you face issues installing the `grpc` dependency locally (required by this project) on a M1 chip, set `GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1` and `GRPC_PYTHON_BUILD_SYSTEM_ZLIB=1` and then try to install the dependency again. + ## Contributors [See contributors](https://github.com/safe-global/safe-transaction-service/graphs/contributors) From b18026b9d01de9a3ed89dbb37aa462d96437c4ae Mon Sep 17 00:00:00 2001 From: Uxio Fuentefria Date: Thu, 29 Jun 2023 14:18:01 +0200 Subject: [PATCH 33/35] Return correct chainId on About RPC view - If chain is not defined in EthereumNetwork, -1 was returned --- safe_transaction_service/history/views.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/safe_transaction_service/history/views.py b/safe_transaction_service/history/views.py index 0b480447..11b118d2 100644 --- a/safe_transaction_service/history/views.py +++ b/safe_transaction_service/history/views.py @@ -24,12 +24,13 @@ from rest_framework.response import Response from rest_framework.views import APIView -from gnosis.eth import EthereumClient, EthereumClientProvider +from gnosis.eth import EthereumClient, EthereumClientProvider, EthereumNetwork from gnosis.eth.constants import NULL_ADDRESS from gnosis.eth.utils import fast_is_checksum_address from gnosis.safe import CannotEstimateGas from safe_transaction_service import __version__ +from safe_transaction_service.utils.ethereum import get_chain_id from safe_transaction_service.utils.utils import parse_boolean_query_param from . import filters, pagination, serializers @@ -116,11 +117,12 @@ def _get_info(self, ethereum_client: EthereumClient) -> Dict[str, Any]: except (IOError, ValueError): syncing = "Error getting syncing status" - ethereum_network = ethereum_client.get_network() + ethereum_chain_id = get_chain_id() + ethereum_network = EthereumNetwork(ethereum_chain_id) return { "version": client_version, "block_number": ethereum_client.current_block_number, - "chain_id": ethereum_network.value, + "chain_id": ethereum_chain_id, "chain": ethereum_network.name, "syncing": syncing, } From f6256ab9f2ef503ccbc5be8c82940328dc14c95c Mon Sep 17 00:00:00 2001 From: Uxio Fuentefria Date: Thu, 29 Jun 2023 14:37:00 +0200 Subject: [PATCH 34/35] Stop indexer if index status on database cannot be updated - That means that someone modified it, probably the Reorg checker - Indexer should be interrupted and will be restarted again --- .../history/indexers/erc20_events_indexer.py | 10 +++++----- .../history/indexers/ethereum_indexer.py | 15 +++++++++++---- .../history/services/reorg_service.py | 2 +- 3 files changed, 17 insertions(+), 10 deletions(-) diff --git a/safe_transaction_service/history/indexers/erc20_events_indexer.py b/safe_transaction_service/history/indexers/erc20_events_indexer.py index 224756c0..c93f590c 100644 --- a/safe_transaction_service/history/indexers/erc20_events_indexer.py +++ b/safe_transaction_service/history/indexers/erc20_events_indexer.py @@ -227,19 +227,19 @@ def get_minimum_block_number( ) -> Optional[int]: return IndexingStatus.objects.get_erc20_721_indexing_status().block_number - def update_monitored_address( + def update_monitored_addresses( self, addresses: Sequence[str], from_block_number: int, to_block_number: int - ) -> int: + ) -> bool: # Keep indexing going on the next block new_to_block_number = to_block_number + 1 - result = IndexingStatus.objects.set_erc20_721_indexing_status( + updated = IndexingStatus.objects.set_erc20_721_indexing_status( new_to_block_number, from_block_number=from_block_number ) - if not result: + if not updated: logger.warning( "%s: Possible reorg - Cannot update erc20_721 indexing status from-block-number=%d to-block-number=%d", self.__class__.__name__, from_block_number, to_block_number, ) - return int(result) + return updated diff --git a/safe_transaction_service/history/indexers/ethereum_indexer.py b/safe_transaction_service/history/indexers/ethereum_indexer.py index c1273031..72349ad0 100644 --- a/safe_transaction_service/history/indexers/ethereum_indexer.py +++ b/safe_transaction_service/history/indexers/ethereum_indexer.py @@ -256,9 +256,9 @@ def get_not_updated_addresses( ) return not_updated_addresses - def update_monitored_address( + def update_monitored_addresses( self, addresses: Sequence[str], from_block_number: int, to_block_number: int - ) -> int: + ) -> bool: """ :param addresses: Addresses to have the block number updated :param from_block_number: Make sure that no reorg has happened checking that block number was not rollbacked @@ -284,7 +284,8 @@ def update_monitored_address( } ).update(**{self.database_field: new_to_block_number}) - if updated_addresses != len(addresses): + all_updated = updated_addresses == len(addresses) + if not all_updated: logger.warning( "%s: Possible reorg - Cannot update all indexed addresses... Updated %d/%d addresses " "from-block-number=%d to-block-number=%d", @@ -403,7 +404,13 @@ def process_addresses( processed_elements = self.process_elements(elements) - self.update_monitored_address(addresses, from_block_number, to_block_number) + if not self.update_monitored_addresses( + addresses, from_block_number, to_block_number + ): + raise ValueError( + "Possible reorg, indexed addresses were updated while indexer was running" + ) + return processed_elements, from_block_number, to_block_number, updated def start(self) -> Tuple[int, int]: diff --git a/safe_transaction_service/history/services/reorg_service.py b/safe_transaction_service/history/services/reorg_service.py index f556f76e..bd993bc0 100644 --- a/safe_transaction_service/history/services/reorg_service.py +++ b/safe_transaction_service/history/services/reorg_service.py @@ -49,7 +49,7 @@ def __init__( :param eth_reorg_rewind_blocks: Number of blocks to rewind indexing when a reorg is found """ self.ethereum_client = ethereum_client - self.eth_reorg_blocks = eth_reorg_blocks # + self.eth_reorg_blocks = eth_reorg_blocks self.eth_reorg_rewind_blocks = eth_reorg_rewind_blocks # List with functions for database models to recover from reorgs From c43ae12147ed198f2ae4e064e9aa4f6222a1559a Mon Sep 17 00:00:00 2001 From: Uxio Fuentefria Date: Thu, 29 Jun 2023 14:43:17 +0200 Subject: [PATCH 35/35] Set version 4.21.2 --- safe_transaction_service/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/safe_transaction_service/__init__.py b/safe_transaction_service/__init__.py index 28f9d547..a4513dbd 100644 --- a/safe_transaction_service/__init__.py +++ b/safe_transaction_service/__init__.py @@ -1,4 +1,4 @@ -__version__ = "4.21.1" +__version__ = "4.21.2" __version_info__ = tuple( int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".")