diff --git a/.dockerignore b/.dockerignore index fe9cd312..0a407ba9 100644 --- a/.dockerignore +++ b/.dockerignore @@ -2,10 +2,11 @@ .dockerignore .gitignore .git +.github .env .pylintrc __pycache__ *.pyc *.egg-info .idea/ - +.vscode diff --git a/.env.custom b/.env.custom new file mode 100644 index 00000000..109eeb00 --- /dev/null +++ b/.env.custom @@ -0,0 +1 @@ +APPLICATION_VERSION=5.0.0 \ No newline at end of file diff --git a/.env.dev b/.env.dev index b37703ce..0132f376 100644 --- a/.env.dev +++ b/.env.dev @@ -7,3 +7,4 @@ ETHEREUM_TRACING_NODE_URL= ETH_L2_NETWORK=1 REDIS_URL=redis://redis:6379/0 CELERY_BROKER_URL=amqp://guest:guest@rabbitmq/ +CSRF_TRUSTED_ORIGINS="http://localhost:8000" diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 00000000..532424f8 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +13c96f0b5c51b6f20b5f9c4370ae8141a2809156 +b26efe58f1c4bf89c5461c38065bc3c51ba4af04 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 2b8b1f3b..26d7482a 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,3 +1,3 @@ # These owners will be the default owners for everything in # the repo. Unless a later match takes precedence. -* @safe-global/safe-services +* @safe-global/core-api diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 324e9b75..4da3abba 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,27 +2,28 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/PyCQA/isort - rev: 5.12.0 + rev: 5.13.2 hooks: - id: isort - repo: https://github.com/psf/black - rev: 22.12.0 + rev: 24.2.0 hooks: - id: black - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + rev: 7.0.0 hooks: - id: flake8 - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: + - id: check-added-large-files - id: check-docstring-first - id: check-merge-conflict + - id: check-symlinks + - id: check-yaml - id: debug-statements - id: detect-private-key - - id: requirements-txt-fixer - - id: trailing-whitespace - id: end-of-file-fixer types: [python] - - id: check-yaml - - id: check-added-large-files + - id: requirements-txt-fixer + - id: trailing-whitespace diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt deleted file mode 100644 index c4f59daf..00000000 --- a/CONTRIBUTORS.txt +++ /dev/null @@ -1 +0,0 @@ -Gnosis diff --git a/README.md b/README.md index b5b5bea9..8a77831c 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ -![Build Status](https://github.com/safe-global/safe-transaction-service/workflows/Python%20CI/badge.svg?branch=master) -[![Coverage Status](https://coveralls.io/repos/github/safe-global/safe-transaction-service/badge.svg?branch=master)](https://coveralls.io/github/safe-global/safe-transaction-service?branch=master) +[![CI/CD](https://github.com/safe-global/safe-transaction-service/actions/workflows/python.yml/badge.svg)](https://github.com/safe-global/safe-transaction-service/actions/workflows/python.yml) +[![Coverage Status](https://coveralls.io/repos/github/safe-global/safe-transaction-service/badge.svg?branch=main)](https://coveralls.io/github/safe-global/safe-transaction-service?branch=main) [![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit) -![Python 3.10](https://img.shields.io/badge/Python-3.10-blue.svg) -![Django 4](https://img.shields.io/badge/Django-4-blue.svg) +![Python 3.12](https://img.shields.io/badge/Python-3.12-blue.svg) +![Django 5](https://img.shields.io/badge/Django-5-blue.svg) [![Docker Image Version (latest semver)](https://img.shields.io/docker/v/safeglobal/safe-transaction-service?label=Docker&sort=semver)](https://hub.docker.com/r/safeglobal/safe-transaction-service) # Safe Transaction Service @@ -16,7 +16,7 @@ Transactions can also be sent to the service to allow offchain collecting of sig a transaction that is pending to be sent to the blockchain. [Swagger (Mainnet version)](https://safe-transaction-mainnet.safe.global/) -[Swagger (Göerli version)](https://safe-transaction-goerli.safe.global/) +[More networks](https://docs.safe.global/api-supported-networks) ## Index of contents @@ -70,7 +70,7 @@ For more parameters check [base.py](config/settings/base.py) file. - If the network is not supported yet [contracts can be deployed using the deployment instructions ](https://github.com/safe-global/safe-contracts/tree/v1.3.0/contracts) -and then a PR should be provided [adding the deployment block number and the address](https://github.com/safe-global/safe-eth-py/blob/master/gnosis/safe/addresses.py) (address will be the same for every network). +and then a PR should be provided [adding the deployment block number and the address](https://github.com/safe-global/safe-eth-py/blob/main/gnosis/safe/addresses.py) (address will be the same for every network). - Only `ProxyFactory` and `GnosisSafeL2` must be configured. `+L2` must be added to the `Safe L2` contract versions, so the service knows the contract can be indexed using events. For us to accept the PR network must be on https://github.com/ethereum-lists/chains . - You can always set this up later using the **admin panel** if your network is not supported, going to the **Master Copies** and **Proxy Factories**. - **We recommend** using event indexing for every network where transaction fees are not relevant, so a tracing node is not required and everything can be indexed using events with the `Safe L2` version. @@ -184,19 +184,19 @@ and [have a proper RPC](https://docs.safe.global/safe-core-api/rpc-requirements) Aside from using standard HTTP requests: - [Safe{Core} API Kit](https://github.com/safe-global/safe-core-sdk/tree/main/packages/api-kit) - [Safe-eth-py](https://github.com/safe-global/safe-eth-py) -- [Safe CLI](https://github.com/5afe/safe-cli): It has a `tx-service` mode to gather offchain signatures. +- [Safe CLI](https://github.com/safe-global/safe-cli): It has a `tx-service` mode to gather offchain signatures. ### What chains do you officially support? -https://docs.safe.global/safe-core-api/available-services +https://docs.safe.global/api-supported-networks ### What means banned field in SafeContract model? The `banned` field in the `SafeContract` model is used to prevent indexing of certain Safes that have an unsupported `MasterCopy` or unverified proxies that have issues during indexing. This field does not remove the banned Safe and indexing can be resumed once the issue has been resolved. ## Troubleshooting -### Issues installing grpc on a Mac M1 +### Issues installing grpc on an Apple silicon system -If you face issues installing the `grpc` dependency locally (required by this project) on a M1 chip, set `GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1` and `GRPC_PYTHON_BUILD_SYSTEM_ZLIB=1` and then try to install the dependency again. +If you face issues installing the `grpc` dependency locally (required by this project) on a Apple silicon chip, set `GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1` and `GRPC_PYTHON_BUILD_SYSTEM_ZLIB=1` and then try to install the dependency again. ## Contributors [See contributors](https://github.com/safe-global/safe-transaction-service/graphs/contributors) diff --git a/config/gunicorn.py b/config/gunicorn.py index 247ed43e..83562955 100644 --- a/config/gunicorn.py +++ b/config/gunicorn.py @@ -1,6 +1,7 @@ """ Store gunicorn variables in this file, so they can be read by Django """ + import os gunicorn_request_timeout = os.environ.get("WEB_WORKER_TIMEOUT", 60) diff --git a/config/settings/base.py b/config/settings/base.py index 39e8a976..21514aad 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -40,8 +40,6 @@ SITE_ID = 1 # https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n USE_I18N = True -# https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n -USE_L10N = True # https://docs.djangoproject.com/en/dev/ref/settings/#use-tz USE_TZ = True # https://docs.djangoproject.com/en/3.2/ref/settings/#force-script-name @@ -106,13 +104,14 @@ "rest_framework.authtoken", ] LOCAL_APPS = [ + "safe_transaction_service.account_abstraction.apps.AccountAbstractionConfig", "safe_transaction_service.analytics.apps.AnalyticsConfig", "safe_transaction_service.contracts.apps.ContractsConfig", + "safe_transaction_service.events.apps.EventsConfig", "safe_transaction_service.history.apps.HistoryConfig", "safe_transaction_service.notifications.apps.NotificationsConfig", "safe_transaction_service.safe_messages.apps.SafeMessagesConfig", "safe_transaction_service.tokens.apps.TokensConfig", - "safe_transaction_service.events.apps.EventsConfig", ] # https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS @@ -229,15 +228,21 @@ CELERY_BROKER_HEARTBEAT = env.int("CELERY_BROKER_HEARTBEAT", default=0) # https://docs.celeryq.dev/en/stable/userguide/configuration.html#std-setting-broker_connection_max_retries -CELERY_BROKER_CONNECTION_MAX_RETRIES = env.int( - "CELERY_BROKER_CONNECTION_MAX_RETRIES", default=0 +CELERY_BROKER_CONNECTION_MAX_RETRIES = ( + value + if (value := env.int("CELERY_BROKER_CONNECTION_MAX_RETRIES", default=-1)) > 0 + else None ) # https://docs.celeryq.dev/en/stable/userguide/configuration.html#broker-channel-error-retry CELERY_BROKER_CHANNEL_ERROR_RETRY = env.bool( "CELERY_BROKER_CHANNEL_ERROR_RETRY", default=True ) -# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_backend -CELERY_RESULT_BACKEND = env("CELERY_RESULT_BACKEND", default="redis://") +# https://docs.celeryq.dev/en/stable/userguide/configuration.html#broker-connection-retry-on-startup +CELERY_BROKER_CONNECTION_RETRY_ON_STARTUP = env.bool( + "CELERY_BROKER_CONNECTION_RETRY_ON_STARTUP", default=True +) +# https://docs.celeryq.dev/en/latest/userguide/configuration.html#task-result-backend-settings +CELERY_RESULT_BACKEND = env("CELERY_RESULT_BACKEND", default=None) # http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-accept_content CELERY_ACCEPT_CONTENT = ["json"] # http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-task_serializer @@ -421,6 +426,9 @@ "handlers": ["console", "mail_admins"], "propagate": True, }, + "pika": { + "propagate": True if DEBUG else False, + }, }, } @@ -430,6 +438,18 @@ # ------------------------------------------------------------------------------ ETHEREUM_NODE_URL = env("ETHEREUM_NODE_URL", default=None) +# Ethereum 4337 Bundler RPC +# ------------------------------------------------------------------------------ +ETHEREUM_4337_BUNDLER_URL = env("ETHEREUM_4337_BUNDLER_URL", default=None) +ETHEREUM_4337_SUPPORTED_ENTRY_POINTS = env.list( + "ETHEREUM_4337_SUPPORTED_ENTRY_POINTS", + default=["0x5FF137D4b0FDCD49DcA30c7CF57E578a026d2789"], +) +ETHEREUM_4337_SUPPORTED_SAFE_MODULES = env.list( + "ETHEREUM_4337_SUPPORTED_SAFE_MODULES", + default=["0xa581c4A4DB7175302464fF3C06380BC3270b4037"], +) + # Tracing indexing configuration (not useful for L2 indexing) # ------------------------------------------------------------------------------ ETHEREUM_TRACING_NODE_URL = env("ETHEREUM_TRACING_NODE_URL", default=None) @@ -437,7 +457,7 @@ "ETH_INTERNAL_TXS_BLOCK_PROCESS_LIMIT", default=10_000 ) ETH_INTERNAL_TXS_BLOCKS_TO_REINDEX_AGAIN = env.int( - "ETH_INTERNAL_TXS_BLOCKS_TO_REINDEX_AGAIN", default=10 + "ETH_INTERNAL_TXS_BLOCKS_TO_REINDEX_AGAIN", default=1 ) ETH_INTERNAL_TXS_NUMBER_TRACE_BLOCKS = env.int( "ETH_INTERNAL_TXS_NUMBER_TRACE_BLOCKS", default=10 @@ -464,7 +484,7 @@ "ETH_EVENTS_BLOCK_PROCESS_LIMIT_MAX", default=0 ) # Maximum number of blocks to process together when searching for events. 0 == no limit. ETH_EVENTS_BLOCKS_TO_REINDEX_AGAIN = env.int( - "ETH_EVENTS_BLOCKS_TO_REINDEX_AGAIN", default=20 + "ETH_EVENTS_BLOCKS_TO_REINDEX_AGAIN", default=2 ) # Blocks to reindex again every indexer run when service is synced. Useful for RPCs not reliable ETH_EVENTS_GET_LOGS_CONCURRENCY = env.int( "ETH_EVENTS_GET_LOGS_CONCURRENCY", default=20 @@ -496,10 +516,6 @@ "TOKENS_ERC20_GET_BALANCES_BATCH", default=2_000 ) # Number of tokens to get balances from in the same request. From 2_500 some nodes raise HTTP 413 -TOKEN_ETH_PRICE_TTL = env.int( - "TOKEN_ETH_PRICE_TTL", default=60 * 30 # 30 minutes -) # Expiration time for token eth price - # Notifications # ------------------------------------------------------------------------------ SLACK_API_WEBHOOK = env("SLACK_API_WEBHOOK", default=None) @@ -524,8 +540,10 @@ # Events # ------------------------------------------------------------------------------ EVENTS_QUEUE_URL = env("EVENTS_QUEUE_URL", default=None) -EVENTS_QUEUE_ASYNC_CONNECTION = env("EVENTS_QUEUE_ASYNC_CONNECTION", default=False) EVENTS_QUEUE_EXCHANGE_NAME = env("EVENTS_QUEUE_EXCHANGE_NAME", default="amq.fanout") +EVENTS_QUEUE_POOL_CONNECTIONS_LIMIT = env.int( + "EVENTS_QUEUE_POOL_CONNECTIONS_LIMIT", default=0 +) # Cache CACHE_ALL_TXS_VIEW = env.int( @@ -554,4 +572,14 @@ "SECURITY_DEFINITIONS": { "api_key": {"type": "apiKey", "in": "header", "name": "Authorization"} }, + "DEFAULT_AUTO_SCHEMA_CLASS": "safe_transaction_service.utils.swagger.CustomSwaggerSchema", } + +# Shell Plus +# ------------------------------------------------------------------------------ +SHELL_PLUS_PRINT_SQL_TRUNCATE = env.int("SHELL_PLUS_PRINT_SQL_TRUNCATE", default=10_000) + +# Endpoints +TX_SERVICE_ALL_TXS_ENDPOINT_LIMIT_TRANSFERS = env.int( + "TX_SERVICE_ALL_TXS_ENDPOINT_LIMIT_TRANSFERS", default=1_000 +) # Don't return more than 1_000 transfers diff --git a/config/settings/local.py b/config/settings/local.py index a4601035..c6dde2f0 100644 --- a/config/settings/local.py +++ b/config/settings/local.py @@ -15,6 +15,11 @@ REDIS_URL = env.str("REDIS_URL") +# SECURITY +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/middleware/#x-content-type-options-nosniff +CSRF_TRUSTED_ORIGINS = env.list("CSRF_TRUSTED_ORIGINS", default=[]) + # CACHES # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#caches @@ -50,7 +55,3 @@ } # https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#internal-ips INTERNAL_IPS = ["127.0.0.1", "10.0.2.2"] - -# CELERY -# ------------------------------------------------------------------------------ -CELERY_RESULT_BACKEND = env("CELERY_RESULT_BACKEND", default=REDIS_URL) diff --git a/config/settings/production.py b/config/settings/production.py index f5aea51c..9bfce65a 100644 --- a/config/settings/production.py +++ b/config/settings/production.py @@ -36,7 +36,7 @@ SECURE_CONTENT_TYPE_NOSNIFF = env.bool( "DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True ) -# https://docs.djangoproject.com/en/3.2/ref/settings/#csrf-trusted-origins +# https://docs.djangoproject.com/en/5.0/ref/settings/#csrf-trusted-origins CSRF_TRUSTED_ORIGINS = env.list("CSRF_TRUSTED_ORIGINS", default=[]) # SSO (tested with https://github.com/buzzfeed/sso) @@ -66,10 +66,6 @@ ADMIN_URL = env("DJANGO_ADMIN_URL", default="admin/") SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTOCOL", "https") -# CELERY -# ------------------------------------------------------------------------------ -CELERY_RESULT_BACKEND = env("CELERY_RESULT_BACKEND", default=REDIS_URL) - # Gunicorn # ------------------------------------------------------------------------------ INSTALLED_APPS += ["gunicorn"] # noqa F405 diff --git a/config/settings/test.py b/config/settings/test.py index e3926085..0c339eb0 100644 --- a/config/settings/test.py +++ b/config/settings/test.py @@ -47,5 +47,3 @@ "level": "DEBUG", } } - -EVENTS_QUEUE_ASYNC_CONNECTION = False diff --git a/config/urls.py b/config/urls.py index 79be7374..9d80b3a1 100644 --- a/config/urls.py +++ b/config/urls.py @@ -13,7 +13,7 @@ openapi.Info( title="Safe Transaction Service API", default_version="v1", - description="API to keep track of transactions sent via Gnosis Safe smart contracts", + description="API to keep track of transactions sent via Safe smart contracts", license=openapi.License(name="MIT License"), ), validators=["flex", "ssv"], @@ -43,6 +43,13 @@ urlpatterns_v1 = [ path("", include("safe_transaction_service.history.urls", namespace="history")), + path( + "", + include( + "safe_transaction_service.account_abstraction.urls", + namespace="account_abstraction", + ), + ), path( "contracts/", include("safe_transaction_service.contracts.urls", namespace="contracts"), diff --git a/config/wsgi.py b/config/wsgi.py index 234b4731..07bc8ad6 100644 --- a/config/wsgi.py +++ b/config/wsgi.py @@ -13,6 +13,7 @@ framework. """ + import os import sys diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 7a32b718..9ff0f9b6 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -1,5 +1,3 @@ -version: '3.5' - services: web: build: @@ -30,6 +28,8 @@ services: ganache: image: trufflesuite/ganache:latest - command: --defaultBalanceEther 10000 --gasLimit 10000000 -a 10 --chain.chainId 1337 --chain.networkId 1337 -d --host 0.0.0.0 ports: - "8545:8545" + command: --defaultBalanceEther 10000 --gasLimit 10000000 -a 10 --chain.chainId 1337 --chain.networkId 1337 -d --host 0.0.0.0 + healthcheck: + test: bash -c "echo 'hello' > /dev/tcp/localhost/8545" diff --git a/docker-compose.yml b/docker-compose.yml index d658d97f..b96cdf0c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,3 @@ -version: '3.5' - volumes: nginx-shared: @@ -19,13 +17,21 @@ services: image: redis:alpine ports: - "6379:6379" - command: - - --appendonly yes + healthcheck: + test: [ "CMD", "redis-cli", "ping"] + timeout: 5s + retries: 3 + rabbitmq: image: rabbitmq:alpine ports: - "5672:5672" + healthcheck: + test: rabbitmq-diagnostics -q ping + interval: 30s + timeout: 30s + retries: 3 db: image: postgres:14-alpine @@ -34,6 +40,10 @@ services: environment: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres + healthcheck: + test: ["CMD", "pg_isready"] + timeout: 5s + retries: 3 web: build: diff --git a/docker/web/Dockerfile b/docker/web/Dockerfile index 05cd5445..1c08477b 100644 --- a/docker/web/Dockerfile +++ b/docker/web/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10-slim +FROM python:3.12-slim ARG APP_HOME=/app WORKDIR ${APP_HOME} @@ -17,15 +17,22 @@ RUN set -ex \ libpq-dev \ " \ && apt-get update \ + && apt-get upgrade -y \ && apt-get install -y --no-install-recommends $buildDeps tmux postgresql-client \ && pip install -U --no-cache-dir wheel setuptools pip \ && pip install --no-cache-dir -r requirements.txt \ && apt-get purge -y --auto-remove $buildDeps \ - && rm -rf /var/lib/apt/lists/* \ - && find /usr/local \ - \( -type d -a -name test -o -name tests \) \ - -o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \ - -exec rm -rf '{}' + + && rm -rf /var/lib/apt/lists/* + +# /nginx mount point must be created before so it doesn't have root permissions +# ${APP_HOME} root folder will not be updated by COPY --chown, so permissions need to be adjusted +RUN groupadd -g 999 python && \ + useradd -u 999 -r -g python python && \ + mkdir -p /nginx && \ + chown -R python:python /nginx ${APP_HOME} +COPY --chown=python:python . . + +# Use numeric ids so kubernetes identifies the user correctly +USER 999:999 -COPY . . RUN DJANGO_SETTINGS_MODULE=config.settings.production DJANGO_DOT_ENV_FILE=.env.tracing.sample python manage.py collectstatic --noinput diff --git a/docker/web/Dockerfile_alpine b/docker/web/Dockerfile_alpine index dd9c405c..d08c6210 100644 --- a/docker/web/Dockerfile_alpine +++ b/docker/web/Dockerfile_alpine @@ -1,13 +1,17 @@ -# Less size than Debian, slowest to build -FROM python:3.10-alpine +# Less size than Debian +FROM python:3.12-alpine -ENV PYTHONUNBUFFERED 1 -WORKDIR /app +ARG APP_HOME=/app +WORKDIR ${APP_HOME} +ENV PYTHONUNBUFFERED=1 -COPY requirements.txt ./ +# https://eth-hash.readthedocs.io/en/latest/quickstart.html#specify-backend-by-environment-variable +# `pysha3` is way faster than `pycryptodome` for CPython +ENV ETH_HASH_BACKEND=pysha3 -# Signal handling for PID1 https://github.com/krallin/tini -RUN apk add --update --no-cache tini libpq && \ +COPY requirements.txt ./ +RUN set -ex && \ + apk add --update --no-cache postgresql-client tmux && \ apk add --no-cache --virtual .build-dependencies postgresql-dev alpine-sdk libffi-dev && \ pip install --no-cache-dir -r requirements.txt && \ apk del .build-dependencies && \ @@ -16,7 +20,13 @@ RUN apk add --update --no-cache tini libpq && \ -o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \ -exec rm -rf '{}' + -COPY . . -RUN DJANGO_SETTINGS_MODULE=config.settings.local DJANGO_DOT_ENV_FILE=.env.local python manage.py collectstatic --noinput +RUN addgroup -g 999 -S python && \ + adduser -u 999 -S python python && \ + mkdir -p /nginx && \ + chown -R python:python /nginx ${APP_HOME} +COPY --chown=python:python . . + +# Use numeric ids so kubernetes identifies the user correctly +USER 999:999 -ENTRYPOINT ["/sbin/tini", "--"] +RUN DJANGO_SETTINGS_MODULE=config.settings.production DJANGO_DOT_ENV_FILE=.env.tracing.sample python manage.py collectstatic --noinput diff --git a/docker/web/celery/worker/run.sh b/docker/web/celery/worker/run.sh index ae980c62..9b1902e2 100755 --- a/docker/web/celery/worker/run.sh +++ b/docker/web/celery/worker/run.sh @@ -2,9 +2,7 @@ set -euo pipefail -MAX_MEMORY_PER_CHILD="${WORKER_MAX_MEMORY_PER_CHILD:-2097152}" -MAX_TASKS_PER_CHILD="${MAX_TASKS_PER_CHILD:-1000000}" -TASK_CONCURRENCY="${CELERYD_CONCURRENCY:-1000}" +TASK_CONCURRENCY=${CELERYD_CONCURRENCY:-15000} # DEBUG set in .env_docker_compose if [ ${DEBUG:-0} = 1 ]; then @@ -15,10 +13,13 @@ fi if [ ${RUN_MIGRATIONS:-0} = 1 ]; then echo "==> $(date +%H:%M:%S) ==> Migrating Django models... " - python manage.py migrate --noinput + DB_STATEMENT_TIMEOUT=0 python manage.py migrate --noinput echo "==> $(date +%H:%M:%S) ==> Setting up service... " python manage.py setup_service + + echo "==> $(date +%H:%M:%S) ==> Setting contracts... " + python manage.py update_safe_contracts_logo fi echo "==> $(date +%H:%M:%S) ==> Check RPC connected matches previously used RPC... " diff --git a/docs/conf.py b/docs/conf.py index ac32d5ac..4b83d012 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -40,7 +40,7 @@ # General information about the project. project = "Safe Transaction Service" -copyright = """2018, Gnosis""" +copyright = """2018, Safe""" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -183,8 +183,8 @@ ( "index", "safe_transaction_service.tex", - "Gnosis Transaction Service Documentation", - """Gnosis""", + "Safe Transaction Service Documentation", + """Safe""", "manual", ) ] @@ -219,7 +219,7 @@ "index", "safe_transaction_service", "Safe Transaction Service Documentation", - ["""Gnosis"""], + ["""Safe"""], 1, ) ] @@ -238,9 +238,9 @@ "index", "safe_transaction_service", "Safe Transaction Service Documentation", - """Gnosis""", + """Safe""", "Safe Transaction Service", - """Project to manage transactions for Gnosis Safe""", + """Project to manage transactions for Safe wallet""", "Miscellaneous", ) ] diff --git a/docs/index.rst b/docs/index.rst index 0c6caab2..b08d1667 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,9 +1,9 @@ -.. Gnosis Safe Push Service documentation master file, created by +.. Safe Tx Service documentation main file, created by sphinx-quickstart. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. -Welcome to Gnosis Safe Push Service's documentation! +Welcome to Safe Tx Service documentation! ==================================================================== Contents: diff --git a/requirements-test.txt b/requirements-test.txt index 65fb2f8a..0c339492 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,13 +1,13 @@ -r requirements.txt -coverage==7.3.1 -django-stubs==4.2.4 +coverage==7.5.1 +django-stubs==5.0.0 django-test-migrations==1.3.0 factory-boy==3.3.0 -faker==19.6.1 -mypy==1.5.1 -pytest==7.4.2 -pytest-celery==0.0.0 -pytest-django==4.5.2 -pytest-env==1.0.1 -pytest-rerunfailures==12.0 -pytest-sugar==0.9.7 +faker==25.1.0 +mypy==1.9.0 +pytest==8.2.0 +pytest-celery==1.0.0 +pytest-django==4.8.0 +pytest-env==1.1.3 +pytest-rerunfailures==14.0 +pytest-sugar==1.0.0 diff --git a/requirements.txt b/requirements.txt index 814abcc2..682957b4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,37 +1,38 @@ -boto3==1.28.44 -cachetools==5.3.1 -celery==5.3.4 -django==4.2.4 +asgiref==3.7.2 +boto3==1.34.103 +cachetools==5.3.3 +celery==5.4.0 +django==5.0.4 django-cache-memoize==0.2.0 -django-celery-beat==2.5.0 -django-cors-headers==4.2.0 -django-db-geventpool==4.0.1 +django-celery-beat==2.6.0 +django-cors-headers==4.3.1 +django-db-geventpool==4.0.2 django-debug-toolbar django-debug-toolbar-force django-environ==0.11.2 django-extensions==3.2.3 -django-filter==23.3 -django-imagekit==4.1.0 -django-model-utils==4.3.1 -django-redis==5.3.0 -django-s3-storage==0.14.0 -django-timezone-field==6.0.1 -djangorestframework==3.14.0 +django-filter==24.2 +django-imagekit==5.0.0 +django-model-utils==4.5.1 +django-redis==5.4.0 +django-s3-storage==0.15.0 +django-timezone-field==6.1.0 +djangorestframework==3.15.1 djangorestframework-camel-case==1.4.2 -docutils==0.20.1 +docutils==0.21.2 drf-yasg[validation]==1.21.7 -firebase-admin==6.2.0 +firebase-admin==6.5.0 flower==2.0.1 -gunicorn[gevent]==21.2.0 +gunicorn[gevent]==22.0.0 hexbytes==0.3.1 -hiredis==2.2.3 +hiredis==2.3.2 packaging>=21.0 pika==1.3.2 -pillow==10.0.1 +pillow==10.3.0 psycogreen==1.0.2 -psycopg2==2.9.7 -redis==5.0.0 +psycopg2==2.9.9 +redis==5.0.4 requests==2.31.0 -git+https://github.com/protofire/safe-eth-py.git@rsk#egg=safe-eth-py -#safe-eth-py[django]==5.8.0 -web3==6.9.0 +git+https://github.com/protofire/safe-eth-py.git@v6.0.0b29#egg=safe-eth-py +# safe-eth-py[django]==6.0.0b29 +web3==6.18.0 diff --git a/run_tests.sh b/run_tests.sh index f2cda5e7..f38b6095 100755 --- a/run_tests.sh +++ b/run_tests.sh @@ -8,7 +8,5 @@ docker compose -f docker-compose.yml -f docker-compose.dev.yml build --force-rm docker compose -f docker-compose.yml -f docker-compose.dev.yml up --no-start db redis ganache rabbitmq docker compose -f docker-compose.yml -f docker-compose.dev.yml start db redis ganache rabbitmq -sleep 10 - python manage.py check pytest -rxXs diff --git a/safe_transaction_service/__init__.py b/safe_transaction_service/__init__.py index 4e9c2a21..b140f08e 100644 --- a/safe_transaction_service/__init__.py +++ b/safe_transaction_service/__init__.py @@ -1,4 +1,4 @@ -__version__ = "4.26.0" +__version__ = "5.0.0" __version_info__ = tuple( int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") diff --git a/safe_transaction_service/history/indexers/abis/__init__.py b/safe_transaction_service/account_abstraction/__init__.py similarity index 100% rename from safe_transaction_service/history/indexers/abis/__init__.py rename to safe_transaction_service/account_abstraction/__init__.py diff --git a/safe_transaction_service/account_abstraction/admin.py b/safe_transaction_service/account_abstraction/admin.py new file mode 100644 index 00000000..c2922a27 --- /dev/null +++ b/safe_transaction_service/account_abstraction/admin.py @@ -0,0 +1,96 @@ +from django.contrib import admin + +from eth_typing import ChecksumAddress +from hexbytes import HexBytes + +from gnosis.eth.django.admin import AdvancedAdminSearchMixin + +from .models import SafeOperation, UserOperation, UserOperationReceipt + + +class SafeOperationInline(admin.TabularInline): + model = SafeOperation + + +@admin.register(UserOperation) +class UserOperationAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): + inlines = [SafeOperationInline] + list_display = ("hash", "ethereum_tx", "sender", "nonce", "success") + list_filter = [ + "receipt__success", + ] + search_fields = [ + "==ethereum_tx_id", + "==sender", + ] + ordering = ["-nonce"] + + @admin.display(boolean=True, description="Is successful?") + def success(self, obj: UserOperation) -> bool: + return obj.receipt.success + + +# Type for classes with a ForeignKey to UserOperation +ForeignClassToUserOperationType = UserOperationReceipt | SafeOperation + + +class ForeignClassToUserOperationAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): + search_fields = [ + "==user_operation__hash", + "==user_operation__ethereum_tx_id", + "==user_operation__sender", + ] + + @admin.display() + def ethereum_tx(self, obj: ForeignClassToUserOperationType) -> str: + return HexBytes(obj.user_operation.ethereum_tx.tx_hash).hex() + + @admin.display() + def user_operation_hash(self, obj: ForeignClassToUserOperationType) -> str: + return HexBytes(obj.user_operation.hash).hex() + + @admin.display() + def user_operation_sender( + self, obj: ForeignClassToUserOperationType + ) -> ChecksumAddress: + return obj.user_operation.sender + + @admin.display() + def user_operation_nonce(self, obj: ForeignClassToUserOperationType) -> int: + return obj.user_operation.nonce + + +@admin.register(UserOperationReceipt) +class UserOperationReceiptAdmin(ForeignClassToUserOperationAdmin): + list_display = ( + "user_operation_hash", + "ethereum_tx", + "user_operation_sender", + "user_operation_nonce", + "success", + "deposited", + ) + list_filter = [ + "success", + ] + + +@admin.register(SafeOperation) +class SafeOperationAdmin(ForeignClassToUserOperationAdmin): + list_display = ( + "hash", + "user_operation_hash", + "ethereum_tx", + "user_operation_sender", + "user_operation_nonce", + "success", + "module_address", + ) + list_filter = ["module_address"] + list_select_related = ["user_operation__receipt"] + search_fields = ForeignClassToUserOperationAdmin.search_fields + ["==hash"] + ordering = ["-modified"] + + @admin.display(boolean=True, description="Is successful?") + def success(self, obj: SafeOperation) -> bool: + return obj.user_operation.receipt.success diff --git a/safe_transaction_service/account_abstraction/apps.py b/safe_transaction_service/account_abstraction/apps.py new file mode 100644 index 00000000..443bf706 --- /dev/null +++ b/safe_transaction_service/account_abstraction/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class AccountAbstractionConfig(AppConfig): + name = "safe_transaction_service.account_abstraction" + verbose_name = "Account Abstraction (ERC4337) indexing support" diff --git a/safe_transaction_service/account_abstraction/constants.py b/safe_transaction_service/account_abstraction/constants.py new file mode 100644 index 00000000..dfac60d9 --- /dev/null +++ b/safe_transaction_service/account_abstraction/constants.py @@ -0,0 +1,23 @@ +""" +ERC4337 Constants + +EntryPoint v0.6.0 and 0.7.0 +--------------------------- + 0x49628fd1471006c1482da88028e9ce4dbb080b815c9b0344d39e5a8e6ec1419f + UserOperationEvent ( + indexed bytes32 userOpHash, + indexed address sender, + indexed address paymaster, + uint256 nonce, + bool success, + uint256 actualGasCost, + uint256 actualGasUsed + ) +""" + +from hexbytes import HexBytes + +USER_OPERATION_NUMBER_TOPICS = 4 +USER_OPERATION_EVENT_TOPIC = HexBytes( + "0x49628fd1471006c1482da88028e9ce4dbb080b815c9b0344d39e5a8e6ec1419f" +) diff --git a/safe_transaction_service/account_abstraction/helpers.py b/safe_transaction_service/account_abstraction/helpers.py new file mode 100644 index 00000000..20737cb4 --- /dev/null +++ b/safe_transaction_service/account_abstraction/helpers.py @@ -0,0 +1,81 @@ +import dataclasses +from typing import List + +from eth_typing import ChecksumAddress + +from gnosis.eth import EthereumClient +from gnosis.eth.contracts import get_safe_V1_4_1_contract +from gnosis.eth.utils import fast_to_checksum_address +from gnosis.safe.proxy_factory import ProxyFactoryV141 + + +@dataclasses.dataclass(eq=True, frozen=True) +class DecodedInitCode: + # UserOperation data + factory_address: ChecksumAddress + factory_data: bytes # Factory call with function identifier + initializer: bytes # Initializer passed to ProxyFactory + # ProxyFactory data + singleton: ChecksumAddress + salt_nonce: int + expected_address: ChecksumAddress # Expected Safe deployment address + # Safe creation data + owners: List[ChecksumAddress] + threshold: int + to: ChecksumAddress + data: bytes + fallback_handler: ChecksumAddress + payment_token: ChecksumAddress + payment: int + payment_receiver: ChecksumAddress + + +def decode_init_code( + init_code: bytes, ethereum_client: EthereumClient +) -> DecodedInitCode: + """ + Decode data to check for a valid ProxyFactory Safe deployment. + + :param init_code: should be composed of: + - 20 first bytes with the address of the factory. + - Call data for the ``Factory``. In the case of the Safe: + - Call to the ``ProxyFactory``, with the ``initializer``, ``singleton`` and ``saltNonce`` + - The ``ProxyFactory`` then deploys a ``Safe Proxy`` and calls ``setup`` with all the configuration parameters. + :param ethereum_client: + :return: Decoded Init Code dataclass + :raises ValueError: Problem decoding + """ + factory_address = fast_to_checksum_address(init_code[:20]) + factory_data = init_code[20:] + proxy_factory = ProxyFactoryV141(factory_address, ethereum_client) + safe_contract = get_safe_V1_4_1_contract(ethereum_client.w3) + _, data = proxy_factory.contract.decode_function_input(factory_data) + initializer = data.pop("initializer") + _, safe_deployment_data = safe_contract.decode_function_input(initializer) + + singleton = data.pop("_singleton") + salt_nonce = data.pop("saltNonce") + expected_address = proxy_factory.calculate_proxy_address( + singleton, initializer, salt_nonce, chain_specific=False + ) + return DecodedInitCode( + factory_address, + factory_data, + initializer, + singleton, + salt_nonce, + expected_address, + *( + safe_deployment_data[field] + for field in [ + "_owners", + "_threshold", + "to", + "data", + "fallbackHandler", + "paymentToken", + "payment", + "paymentReceiver", + ] + ) + ) diff --git a/safe_transaction_service/history/tests/clients/__init__.py b/safe_transaction_service/account_abstraction/management/__init__.py similarity index 100% rename from safe_transaction_service/history/tests/clients/__init__.py rename to safe_transaction_service/account_abstraction/management/__init__.py diff --git a/safe_transaction_service/account_abstraction/management/commands/__init__.py b/safe_transaction_service/account_abstraction/management/commands/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/safe_transaction_service/account_abstraction/management/commands/reindex_4337.py b/safe_transaction_service/account_abstraction/management/commands/reindex_4337.py new file mode 100644 index 00000000..3e0dc738 --- /dev/null +++ b/safe_transaction_service/account_abstraction/management/commands/reindex_4337.py @@ -0,0 +1,57 @@ +from typing import Optional, Sequence + +from django.core.management.base import BaseCommand + +from eth_typing import ChecksumAddress + +from gnosis.eth.utils import fast_to_checksum_address + +from safe_transaction_service.history.models import EthereumTx + +from ...constants import USER_OPERATION_EVENT_TOPIC +from ...services import get_aa_processor_service +from ...utils import get_user_operation_sender_from_user_operation_log + + +class Command(BaseCommand): + help = "Force reindexing of Safe events/traces (depending on the running mode)" + + def add_arguments(self, parser): + parser.add_argument( + "--addresses", + nargs="+", + help="Safe addresses. If not provided all will be reindexed", + ) + + def handle(self, *args, **options): + self.stdout.write(self.style.SUCCESS("Reindexing ERC4337 UserOperations")) + addresses = ( + [fast_to_checksum_address(address) for address in options["addresses"]] + if options["addresses"] + else None + ) + + processed_user_operations = self.reindex(addresses) + self.stdout.write( + self.style.SUCCESS(f"Reindexed {processed_user_operations} UserOperations") + ) + + def reindex( + self, + addresses: Optional[Sequence[ChecksumAddress]], + ) -> None: + topic = USER_OPERATION_EVENT_TOPIC.hex() + aa_processor_service = get_aa_processor_service() + processed_user_operations = 0 + for tx in EthereumTx.objects.account_abstraction_txs(): + for log in tx.logs: + if log["topics"][0] == topic: + safe_address = get_user_operation_sender_from_user_operation_log( + log + ) + if addresses and safe_address not in addresses: + continue + processed_user_operations += ( + aa_processor_service.process_aa_transaction(safe_address, tx) + ) + return processed_user_operations diff --git a/safe_transaction_service/account_abstraction/migrations/0001_initial.py b/safe_transaction_service/account_abstraction/migrations/0001_initial.py new file mode 100644 index 00000000..882bfa27 --- /dev/null +++ b/safe_transaction_service/account_abstraction/migrations/0001_initial.py @@ -0,0 +1,217 @@ +# Generated by Django 5.0.3 on 2024-03-25 13:34 + +import django.db.models.deletion +import django.utils.timezone +from django.db import migrations, models + +import model_utils.fields + +import gnosis.eth.django.models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ("history", "0080_alter_multisigconfirmation_signature"), + ] + + operations = [ + migrations.CreateModel( + name="SafeOperation", + fields=[ + ( + "created", + model_utils.fields.AutoCreatedField( + default=django.utils.timezone.now, + editable=False, + verbose_name="created", + ), + ), + ( + "modified", + model_utils.fields.AutoLastModifiedField( + default=django.utils.timezone.now, + editable=False, + verbose_name="modified", + ), + ), + ( + "hash", + gnosis.eth.django.models.Keccak256Field( + primary_key=True, serialize=False + ), + ), + ("valid_after", models.DateTimeField(null=True)), + ("valid_until", models.DateTimeField(null=True)), + ( + "module_address", + gnosis.eth.django.models.EthereumAddressV2Field(db_index=True), + ), + ], + options={ + "abstract": False, + }, + ), + migrations.CreateModel( + name="SafeOperationConfirmation", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "created", + model_utils.fields.AutoCreatedField( + default=django.utils.timezone.now, + editable=False, + verbose_name="created", + ), + ), + ( + "modified", + model_utils.fields.AutoLastModifiedField( + default=django.utils.timezone.now, + editable=False, + verbose_name="modified", + ), + ), + ("owner", gnosis.eth.django.models.EthereumAddressV2Field()), + ( + "signature", + gnosis.eth.django.models.HexV2Field( + default=None, max_length=5000, null=True + ), + ), + ( + "signature_type", + models.PositiveSmallIntegerField( + choices=[ + (0, "CONTRACT_SIGNATURE"), + (1, "APPROVED_HASH"), + (2, "EOA"), + (3, "ETH_SIGN"), + ], + db_index=True, + ), + ), + ( + "safe_operation", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="confirmations", + to="account_abstraction.safeoperation", + ), + ), + ], + options={ + "ordering": ["created"], + }, + ), + migrations.CreateModel( + name="UserOperation", + fields=[ + ( + "hash", + gnosis.eth.django.models.Keccak256Field( + primary_key=True, serialize=False + ), + ), + ( + "sender", + gnosis.eth.django.models.EthereumAddressV2Field(db_index=True), + ), + ("nonce", gnosis.eth.django.models.Uint256Field()), + ("init_code", models.BinaryField(blank=True, editable=True, null=True)), + ("call_data", models.BinaryField(blank=True, editable=True, null=True)), + ("call_data_gas_limit", gnosis.eth.django.models.Uint256Field()), + ("verification_gas_limit", gnosis.eth.django.models.Uint256Field()), + ("pre_verification_gas", gnosis.eth.django.models.Uint256Field()), + ("max_fee_per_gas", gnosis.eth.django.models.Uint256Field()), + ("max_priority_fee_per_gas", gnosis.eth.django.models.Uint256Field()), + ( + "paymaster", + gnosis.eth.django.models.EthereumAddressV2Field( + blank=True, db_index=True, null=True + ), + ), + ( + "paymaster_data", + models.BinaryField(blank=True, editable=True, null=True), + ), + ("signature", models.BinaryField(blank=True, editable=True, null=True)), + ( + "entry_point", + gnosis.eth.django.models.EthereumAddressV2Field(db_index=True), + ), + ( + "ethereum_tx", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="history.ethereumtx", + ), + ), + ], + ), + migrations.AddField( + model_name="safeoperation", + name="user_operation", + field=models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="safe_operation", + to="account_abstraction.useroperation", + ), + ), + migrations.CreateModel( + name="UserOperationReceipt", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("actual_gas_cost", gnosis.eth.django.models.Uint256Field()), + ("actual_gas_used", gnosis.eth.django.models.Uint256Field()), + ("success", models.BooleanField()), + ("reason", models.CharField(max_length=256)), + ("deposited", gnosis.eth.django.models.Uint256Field()), + ( + "user_operation", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="receipt", + to="account_abstraction.useroperation", + ), + ), + ], + ), + migrations.AddConstraint( + model_name="safeoperationconfirmation", + constraint=models.UniqueConstraint( + fields=("safe_operation", "owner"), + name="unique_safe_operation_owner_confirmation", + ), + ), + migrations.AddIndex( + model_name="useroperation", + index=models.Index( + fields=["sender", "-nonce"], name="account_abs_sender_663a75_idx" + ), + ), + migrations.AlterUniqueTogether( + name="useroperation", + unique_together={("sender", "nonce")}, + ), + ] diff --git a/safe_transaction_service/account_abstraction/migrations/0002_alter_useroperation_unique_together.py b/safe_transaction_service/account_abstraction/migrations/0002_alter_useroperation_unique_together.py new file mode 100644 index 00000000..0da8783a --- /dev/null +++ b/safe_transaction_service/account_abstraction/migrations/0002_alter_useroperation_unique_together.py @@ -0,0 +1,16 @@ +# Generated by Django 5.0.3 on 2024-04-05 15:05 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("account_abstraction", "0001_initial"), + ] + + operations = [ + migrations.AlterUniqueTogether( + name="useroperation", + unique_together=set(), + ), + ] diff --git a/safe_transaction_service/account_abstraction/migrations/0003_alter_useroperationreceipt_reason.py b/safe_transaction_service/account_abstraction/migrations/0003_alter_useroperationreceipt_reason.py new file mode 100644 index 00000000..ea959f9d --- /dev/null +++ b/safe_transaction_service/account_abstraction/migrations/0003_alter_useroperationreceipt_reason.py @@ -0,0 +1,18 @@ +# Generated by Django 5.0.4 on 2024-05-09 11:34 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("account_abstraction", "0002_alter_useroperation_unique_together"), + ] + + operations = [ + migrations.AlterField( + model_name="useroperationreceipt", + name="reason", + field=models.CharField(blank=True, max_length=256), + ), + ] diff --git a/safe_transaction_service/account_abstraction/migrations/__init__.py b/safe_transaction_service/account_abstraction/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/safe_transaction_service/account_abstraction/models.py b/safe_transaction_service/account_abstraction/models.py new file mode 100644 index 00000000..6aa0c3d1 --- /dev/null +++ b/safe_transaction_service/account_abstraction/models.py @@ -0,0 +1,176 @@ +import logging +from functools import cached_property +from typing import Optional + +from django.db import models +from django.db.models import Index + +from hexbytes import HexBytes +from model_utils.models import TimeStampedModel + +from gnosis.eth.account_abstraction import UserOperation as UserOperationClass +from gnosis.eth.account_abstraction import UserOperationMetadata +from gnosis.eth.django.models import ( + EthereumAddressV2Field, + HexV2Field, + Keccak256Field, + Uint256Field, +) +from gnosis.safe.account_abstraction import SafeOperation as SafeOperationClass +from gnosis.safe.safe_signature import SafeSignatureType + +from safe_transaction_service.history import models as history_models +from safe_transaction_service.utils.constants import SIGNATURE_LENGTH + +logger = logging.getLogger(__name__) + + +class UserOperation(models.Model): + """ + EIP 4337 UserOperation + + https://www.erc4337.io/docs/understanding-ERC-4337/user-operation + """ + + hash = Keccak256Field(primary_key=True) + ethereum_tx = models.ForeignKey( + history_models.EthereumTx, on_delete=models.CASCADE, null=True, blank=True + ) + sender = EthereumAddressV2Field(db_index=True) + nonce = Uint256Field() + init_code = models.BinaryField(null=True, blank=True, editable=True) + call_data = models.BinaryField(null=True, blank=True, editable=True) + call_data_gas_limit = Uint256Field() + verification_gas_limit = Uint256Field() + pre_verification_gas = Uint256Field() + max_fee_per_gas = Uint256Field() + max_priority_fee_per_gas = Uint256Field() + paymaster = EthereumAddressV2Field( + db_index=True, null=True, blank=True, editable=True + ) + paymaster_data = models.BinaryField(null=True, blank=True, editable=True) + signature = models.BinaryField(null=True, blank=True, editable=True) + entry_point = EthereumAddressV2Field(db_index=True) + + class Meta: + indexes = [ + Index(fields=["sender", "-nonce"]), + ] + + def __str__(self) -> str: + return f"{HexBytes(self.hash).hex()} UserOperation for sender={self.sender} with nonce={self.nonce}" + + @cached_property + def paymaster_and_data(self) -> Optional[HexBytes]: + if self.paymaster and self.paymaster_data: + return HexBytes(HexBytes(self.paymaster) + HexBytes(self.paymaster_data)) + + def to_user_operation(self, add_tx_metadata: bool = False) -> UserOperationClass: + """ + Returns a safe-eth-py UserOperation object + + :param add_tx_metadata: If `True` more database queries will be performed to get the transaction metadata + :return: safe-eth-py `UserOperation` + """ + user_operation_metadata = ( + UserOperationMetadata( + # More DB queries + transaction_hash=HexBytes(self.ethereum_tx_id), + block_hash=HexBytes(self.ethereum_tx.block.block_hash), + block_number=self.ethereum_tx.block.number, + ) + if add_tx_metadata + else None + ) + + return UserOperationClass( + HexBytes(self.hash), + self.sender, + self.nonce, + HexBytes(self.init_code) if self.init_code else b"", + HexBytes(self.call_data) if self.call_data else b"", + self.call_data_gas_limit, + self.verification_gas_limit, + self.pre_verification_gas, + self.max_fee_per_gas, + self.max_priority_fee_per_gas, + self.paymaster_and_data if self.paymaster_and_data else b"", + HexBytes(self.signature) if self.signature else b"", + self.entry_point, + user_operation_metadata, + ) + + def to_safe_operation(self) -> SafeOperationClass: + """ + :return: SafeOperation built from UserOperation + :raises: ValueError + """ + if self.signature and bytes(self.signature): + return SafeOperationClass.from_user_operation(self.to_user_operation()) + raise ValueError("Not enough information to build SafeOperation") + + +class UserOperationReceipt(models.Model): + user_operation = models.OneToOneField( + UserOperation, on_delete=models.CASCADE, related_name="receipt" + ) + actual_gas_cost = Uint256Field() + actual_gas_used = Uint256Field() + success = models.BooleanField() + reason = models.CharField(max_length=256, blank=True) + deposited = Uint256Field() + + def __str__(self) -> str: + return f"{HexBytes(self.user_operation_id).hex()} UserOperationReceipt" + + +class SafeOperation(TimeStampedModel): + hash = Keccak256Field(primary_key=True) # safeOperationHash + user_operation = models.OneToOneField( + UserOperation, on_delete=models.CASCADE, related_name="safe_operation" + ) + valid_after = models.DateTimeField(null=True) # Epoch uint48 + valid_until = models.DateTimeField(null=True) # Epoch uint48 + module_address = EthereumAddressV2Field(db_index=True) + + def __str__(self) -> str: + return f"{HexBytes(self.hash).hex()} SafeOperation for user-operation={HexBytes(self.user_operation_id).hex()}" + + def build_signature(self) -> bytes: + return b"".join( + [ + HexBytes(signature) + for _, signature in sorted( + self.confirmations.values_list("owner", "signature"), + key=lambda tup: tup[0].lower(), + ) + ] + ) + + +class SafeOperationConfirmation(TimeStampedModel): + safe_operation = models.ForeignKey( + SafeOperation, + on_delete=models.CASCADE, + related_name="confirmations", + ) + owner = EthereumAddressV2Field() + signature = HexV2Field(null=True, default=None, max_length=SIGNATURE_LENGTH) + signature_type = models.PositiveSmallIntegerField( + choices=[(tag.value, tag.name) for tag in SafeSignatureType], db_index=True + ) + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["safe_operation", "owner"], + name="unique_safe_operation_owner_confirmation", + ) + ] + ordering = ["created"] + + def __str__(self): + return ( + f"Safe Operation Confirmation of owner={self.owner} for " + f"safe-operation={HexBytes(self.safe_operation_id).hex()}" + ) diff --git a/safe_transaction_service/account_abstraction/pagination.py b/safe_transaction_service/account_abstraction/pagination.py new file mode 100644 index 00000000..e5d5557a --- /dev/null +++ b/safe_transaction_service/account_abstraction/pagination.py @@ -0,0 +1,6 @@ +from rest_framework.pagination import LimitOffsetPagination + + +class DefaultPagination(LimitOffsetPagination): + max_limit = 200 + default_limit = 100 diff --git a/safe_transaction_service/account_abstraction/serializers.py b/safe_transaction_service/account_abstraction/serializers.py new file mode 100644 index 00000000..b730c78b --- /dev/null +++ b/safe_transaction_service/account_abstraction/serializers.py @@ -0,0 +1,385 @@ +import datetime +from typing import Any, Dict, List, Optional + +from django.conf import settings +from django.db import transaction +from django.utils import timezone + +from eth_typing import ChecksumAddress, HexStr +from hexbytes import HexBytes +from rest_framework import serializers +from rest_framework.exceptions import ValidationError + +import gnosis.eth.django.serializers as eth_serializers +from gnosis.eth import EthereumClientProvider +from gnosis.eth.account_abstraction import UserOperation as UserOperationClass +from gnosis.eth.utils import fast_keccak, fast_to_checksum_address +from gnosis.safe.account_abstraction import SafeOperation as SafeOperationClass +from gnosis.safe.safe_signature import SafeSignature, SafeSignatureType + +from safe_transaction_service.utils.constants import SIGNATURE_LENGTH +from safe_transaction_service.utils.ethereum import get_chain_id + +from ..utils.serializers import get_safe_owners +from .helpers import decode_init_code +from .models import SafeOperation +from .models import SafeOperation as SafeOperationModel +from .models import SafeOperationConfirmation +from .models import UserOperation as UserOperationModel + + +# ================================================ # +# Request Serializers +# ================================================ # +class SafeOperationSerializer(serializers.Serializer): + nonce = serializers.IntegerField(min_value=0) + init_code = eth_serializers.HexadecimalField(allow_null=True) + call_data = eth_serializers.HexadecimalField(allow_null=True) + call_data_gas_limit = serializers.IntegerField(min_value=0) + verification_gas_limit = serializers.IntegerField(min_value=0) + pre_verification_gas = serializers.IntegerField(min_value=0) + max_fee_per_gas = serializers.IntegerField(min_value=0) + max_priority_fee_per_gas = serializers.IntegerField(min_value=0) + paymaster_and_data = eth_serializers.HexadecimalField(allow_null=True) + signature = eth_serializers.HexadecimalField( + min_length=65, max_length=SIGNATURE_LENGTH + ) + entry_point = eth_serializers.EthereumAddressField() + # Safe Operation fields + valid_after = serializers.DateTimeField(allow_null=True) # Epoch uint48 + valid_until = serializers.DateTimeField(allow_null=True) # Epoch uint48 + module_address = eth_serializers.EthereumAddressField() + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.ethereum_client = EthereumClientProvider() + self._deployment_owners: List[ChecksumAddress] = [] + + def _get_owners(self, safe_address: ChecksumAddress) -> List[ChecksumAddress]: + """ + :param safe_address: + :return: `init_code` decoded owners if Safe is not deployed or current blockchain owners if Safe is deployed + """ + return self._deployment_owners or get_safe_owners(safe_address) + + def _validate_signature( + self, + safe_address: ChecksumAddress, + safe_operation_hash: bytes, + safe_operation_hash_preimage: bytes, + signature: bytes, + ) -> List[SafeSignature]: + safe_owners = self._get_owners(safe_address) + parsed_signatures = SafeSignature.parse_signature( + signature, safe_operation_hash, safe_operation_hash_preimage + ) + owners_processed = set() + safe_signatures = [] + for safe_signature in parsed_signatures: + owner = safe_signature.owner + if owner not in safe_owners: + raise ValidationError( + f"Signer={owner} is not an owner. Current owners={safe_owners}. " + f"Safe-operation-hash={safe_operation_hash.hex()}" + ) + if not safe_signature.is_valid(self.ethereum_client, safe_address): + raise ValidationError( + f"Signature={safe_signature.signature.hex()} for owner={owner} is not valid" + ) + if owner in owners_processed: + raise ValidationError(f"Signature for owner={owner} is duplicated") + + owners_processed.add(owner) + safe_signatures.append(safe_signature) + return safe_signatures + + def validate_init_code(self, init_code: Optional[HexBytes]) -> Optional[HexBytes]: + """ + Check `init_code` is not provided for already initialized contracts + + :param init_code: + :return: `init_code` + """ + safe_address = self.context["safe_address"] + safe_is_deployed = self.ethereum_client.is_contract(safe_address) + if init_code: + if safe_is_deployed: + raise ValidationError( + "`init_code` must be empty as the contract was already initialized" + ) + + try: + decoded_init_code = decode_init_code(init_code, self.ethereum_client) + except ValueError: + raise ValidationError("Cannot decode data") + if not self.ethereum_client.is_contract(decoded_init_code.factory_address): + raise ValidationError( + f"`init_code` factory-address={decoded_init_code.factory_address} is not initialized" + ) + + if decoded_init_code.expected_address != safe_address: + raise ValidationError( + f"Provided safe-address={safe_address} does not match " + f"calculated-safe-address={decoded_init_code.expected_address}" + ) + # Store owners used for deployment, to do checks afterward + self._deployment_owners = decoded_init_code.owners + elif not safe_is_deployed: + raise ValidationError( + "`init_code` was not provided and contract was not initialized" + ) + + return init_code + + def validate_module_address( + self, module_address: ChecksumAddress + ) -> ChecksumAddress: + if module_address not in settings.ETHEREUM_4337_SUPPORTED_SAFE_MODULES: + raise ValidationError( + f"Module-address={module_address} not supported, " + f"valid values are {settings.ETHEREUM_4337_SUPPORTED_SAFE_MODULES}" + ) + return module_address + + def validate_nonce(self, nonce: int) -> int: + """ + Check nonce is higher than the last executed SafeOperation + + :param nonce: + :return: `nonce` + """ + safe_address = self.context["safe_address"] + if ( + UserOperationModel.objects.filter(sender=safe_address, nonce__gte=nonce) + .exclude(ethereum_tx=None) + .exists() + ): + raise ValidationError(f"Nonce={nonce} too low for safe={safe_address}") + return nonce + + def validate_paymaster_and_data( + self, paymaster_and_data: Optional[HexBytes] + ) -> Optional[HexBytes]: + if paymaster_and_data: + if len(paymaster_and_data) < 20: + raise ValidationError( + "`paymaster_and_data` length should be at least 20 bytes" + ) + + paymaster_address = fast_to_checksum_address(paymaster_and_data[:20]) + if not self.ethereum_client.is_contract(paymaster_address): + raise ValidationError( + f"paymaster={paymaster_address} was not found in blockchain" + ) + + return paymaster_and_data + + def validate_valid_until( + self, valid_until: Optional[datetime.datetime] + ) -> Optional[datetime.datetime]: + """ + Make sure ``valid_until`` is not previous to the current timestamp + + :param valid_until: + :return: `valid_until` + """ + if valid_until and valid_until <= timezone.now(): + raise ValidationError( + "`valid_until` cannot be previous to the current timestamp" + ) + return valid_until + + def validate(self, attrs): + attrs = super().validate(attrs) + + valid_after, valid_until = [ + int(attrs[key].timestamp()) if attrs[key] else 0 + for key in ("valid_after", "valid_until") + ] + if valid_after and valid_until and valid_after > valid_until: + raise ValidationError("`valid_after` cannot be higher than `valid_until`") + + safe_address = self.context["safe_address"] + safe_operation = SafeOperationClass( + safe_address, + attrs["nonce"], + fast_keccak(attrs["init_code"] or b""), + fast_keccak(attrs["call_data"] or b""), + attrs["call_data_gas_limit"], + attrs["verification_gas_limit"], + attrs["pre_verification_gas"], + attrs["max_fee_per_gas"], + attrs["max_priority_fee_per_gas"], + fast_keccak(attrs["paymaster_and_data"] or b""), + valid_after, + valid_until, + attrs["entry_point"], + attrs["signature"], + ) + + module_address = attrs["module_address"] + chain_id = get_chain_id() + attrs["chain_id"] = chain_id + + safe_operation_hash = safe_operation.get_safe_operation_hash( + chain_id, module_address + ) + + if SafeOperationModel.objects.filter(hash=safe_operation_hash).exists(): + raise ValidationError( + f"SafeOperation with hash={safe_operation_hash.hex()} already exists" + ) + + safe_signatures = self._validate_signature( + safe_address, + safe_operation_hash, + safe_operation.safe_operation_hash_preimage, + attrs["signature"], + ) + if not safe_signatures: + raise ValidationError("At least one signature must be provided") + + attrs["safe_operation_hash"] = safe_operation_hash + attrs["safe_signatures"] = safe_signatures + return attrs + + @transaction.atomic + def save(self, **kwargs): + user_operation = UserOperationClass( + b"", + self.context["safe_address"], + self.validated_data["nonce"], + self.validated_data["init_code"] or b"", + self.validated_data["call_data"] or b"", + self.validated_data["call_data_gas_limit"], + self.validated_data["verification_gas_limit"], + self.validated_data["pre_verification_gas"], + self.validated_data["max_fee_per_gas"], + self.validated_data["max_priority_fee_per_gas"], + self.validated_data["paymaster_and_data"] or b"", + self.validated_data["signature"], + self.validated_data["entry_point"], + ) + + user_operation_hash = user_operation.calculate_user_operation_hash( + self.validated_data["chain_id"] + ) + + user_operation_model, created = UserOperationModel.objects.get_or_create( + hash=user_operation_hash, + defaults={ + "ethereum_tx": None, + "sender": user_operation.sender, + "nonce": user_operation.nonce, + "init_code": user_operation.init_code, + "call_data": user_operation.call_data, + "call_data_gas_limit": user_operation.call_gas_limit, + "verification_gas_limit": user_operation.verification_gas_limit, + "pre_verification_gas": user_operation.pre_verification_gas, + "max_fee_per_gas": user_operation.max_fee_per_gas, + "max_priority_fee_per_gas": user_operation.max_priority_fee_per_gas, + "paymaster": user_operation.paymaster, + "paymaster_data": user_operation.paymaster_data, + "signature": user_operation.signature, + "entry_point": user_operation.entry_point, + }, + ) + + if created: + safe_operation_model = SafeOperationModel.objects.create( + hash=self.validated_data["safe_operation_hash"], + user_operation=user_operation_model, + valid_after=self.validated_data["valid_after"], + valid_until=self.validated_data["valid_until"], + module_address=self.validated_data["module_address"], + ) + + safe_signatures = self.validated_data["safe_signatures"] + for safe_signature in safe_signatures: + SafeOperationConfirmation.objects.get_or_create( + safe_operation=safe_operation_model, + owner=safe_signature.owner, + defaults={ + "signature": safe_signature.export_signature(), + "signature_type": safe_signature.signature_type.value, + }, + ) + + return user_operation_model + + +# ================================================ # +# Request Serializers +# ================================================ # +class SafeOperationConfirmationResponseSerializer(serializers.Serializer): + created = serializers.DateTimeField() + modified = serializers.DateTimeField() + owner = eth_serializers.EthereumAddressField() + signature = eth_serializers.HexadecimalField() + signature_type = serializers.SerializerMethodField() + + def get_signature_type(self, obj: SafeOperationConfirmation) -> str: + return SafeSignatureType(obj.signature_type).name + + +class UserOperationResponseSerializer(serializers.Serializer): + ethereum_tx_hash = eth_serializers.HexadecimalField(source="ethereum_tx_id") + + sender = eth_serializers.EthereumAddressField() + user_operation_hash = eth_serializers.HexadecimalField(source="hash") + nonce = serializers.IntegerField(min_value=0) + init_code = eth_serializers.HexadecimalField(allow_null=True) + call_data = eth_serializers.HexadecimalField(allow_null=True) + call_data_gas_limit = serializers.IntegerField(min_value=0) + verification_gas_limit = serializers.IntegerField(min_value=0) + pre_verification_gas = serializers.IntegerField(min_value=0) + max_fee_per_gas = serializers.IntegerField(min_value=0) + max_priority_fee_per_gas = serializers.IntegerField(min_value=0) + paymaster = eth_serializers.EthereumAddressField(allow_null=True) + paymaster_data = eth_serializers.HexadecimalField(allow_null=True) + signature = eth_serializers.HexadecimalField() + entry_point = eth_serializers.EthereumAddressField() + + +class SafeOperationResponseSerializer(serializers.Serializer): + created = serializers.DateTimeField() + modified = serializers.DateTimeField() + safe_operation_hash = eth_serializers.HexadecimalField(source="hash") + + valid_after = serializers.DateTimeField() + valid_until = serializers.DateTimeField() + module_address = eth_serializers.EthereumAddressField() + + confirmations = serializers.SerializerMethodField() + prepared_signature = serializers.SerializerMethodField() + + def get_confirmations(self, obj: SafeOperation) -> Dict[str, Any]: + """ + Filters confirmations queryset + + :param obj: SafeOperation instance + :return: Serialized queryset + """ + return SafeOperationConfirmationResponseSerializer( + obj.confirmations, many=True + ).data + + def get_prepared_signature(self, obj: SafeOperation) -> Optional[HexStr]: + """ + Prepared signature sorted + + :param obj: SafeOperation instance + :return: Serialized queryset + """ + signature = HexBytes(obj.build_signature()) + return signature.hex() if signature else None + + +class SafeOperationWithUserOperationResponseSerializer(SafeOperationResponseSerializer): + user_operation = UserOperationResponseSerializer(many=False, read_only=True) + + +class UserOperationWithSafeOperationResponseSerializer(UserOperationResponseSerializer): + safe_operation = SafeOperationResponseSerializer( + many=False, read_only=True, allow_null=True + ) diff --git a/safe_transaction_service/account_abstraction/services/__init__.py b/safe_transaction_service/account_abstraction/services/__init__.py new file mode 100644 index 00000000..f2618476 --- /dev/null +++ b/safe_transaction_service/account_abstraction/services/__init__.py @@ -0,0 +1,3 @@ +# flake8: noqa F401 + +from .aa_processor_service import AaProcessorService, get_aa_processor_service diff --git a/safe_transaction_service/account_abstraction/services/aa_processor_service.py b/safe_transaction_service/account_abstraction/services/aa_processor_service.py new file mode 100644 index 00000000..4755edd2 --- /dev/null +++ b/safe_transaction_service/account_abstraction/services/aa_processor_service.py @@ -0,0 +1,399 @@ +import logging +from functools import cache +from typing import List, Optional, Sequence, Tuple + +from django.conf import settings +from django.db import transaction + +from eth_typing import ChecksumAddress, HexStr +from hexbytes import HexBytes +from web3.types import LogReceipt + +from gnosis.eth import EthereumClient, EthereumClientProvider +from gnosis.eth.account_abstraction import ( + BundlerClient, + BundlerClientException, + UserOperation, + UserOperationReceipt, + UserOperationV07, +) +from gnosis.eth.utils import fast_to_checksum_address +from gnosis.safe.account_abstraction import SafeOperation +from gnosis.safe.safe_signature import SafeSignature + +from safe_transaction_service.history import models as history_models + +from ..constants import USER_OPERATION_EVENT_TOPIC, USER_OPERATION_NUMBER_TOPICS +from ..models import SafeOperation as SafeOperationModel +from ..models import SafeOperationConfirmation as SafeOperationConfirmationModel +from ..models import UserOperation as UserOperationModel +from ..models import UserOperationReceipt as UserOperationReceiptModel +from ..utils import get_bundler_client + +logger = logging.getLogger(__name__) + + +class AaProcessorServiceException(Exception): + pass + + +class UserOperationNotSupportedException(Exception): + pass + + +@cache +def get_aa_processor_service() -> "AaProcessorService": + ethereum_client = EthereumClientProvider() + bundler_client = get_bundler_client() + if not bundler_client: + logger.warning("Ethereum 4337 bundler client was not configured") + supported_entry_points = settings.ETHEREUM_4337_SUPPORTED_ENTRY_POINTS + return AaProcessorService(ethereum_client, bundler_client, supported_entry_points) + + +class AaProcessorService: + """ + Account Abstraction Transaction Processor + + From ``EthereumTxs`` it can detect and index ``SafeOperations`` + """ + + def __init__( + self, + ethereum_client: EthereumClient, + bundler_client: Optional[BundlerClient], + supported_entry_points: Sequence[ChecksumAddress], + ): + self.ethereum_client = ethereum_client + self.bundler_client = bundler_client + self.supported_entry_points = supported_entry_points + + def get_user_operation_hashes_from_logs( + self, safe_address: ChecksumAddress, logs: [Sequence[LogReceipt]] + ) -> List[HexBytes]: + """ + :param safe_address: + :param logs: + :return: ``UserOperations`` hashes if detected + """ + return [ + HexBytes(log["topics"][1]) + for log in logs + if ( + len(log["topics"]) == USER_OPERATION_NUMBER_TOPICS + and HexBytes(log["topics"][0]) == USER_OPERATION_EVENT_TOPIC + and fast_to_checksum_address(log["address"]) + in self.supported_entry_points # Only index supported entryPoints + and fast_to_checksum_address(log["topics"][2][-40:]) + == safe_address # Check sender + ) + ] + + def is_user_operation_indexed(self, user_operation_hash: HexStr) -> bool: + """ + If Receipt is stored, transaction has already been indexed + + :param user_operation_hash: + :return: ``True`` if indexed, ``False`` otherwise + """ + return UserOperationReceiptModel.objects.filter( + user_operation__hash=user_operation_hash + ).exists() + + def index_safe_operation_confirmations( + self, + signature: bytes, + safe_operation_model: SafeOperationModel, + safe_operation: SafeOperation, + ) -> List[SafeOperationConfirmationModel]: + """ + Creates missing ``SafeOperationConfirmations`` + + :param signature: + :param safe_operation_model: + :param safe_operation: + :return: List of ``SafeOperationConfirmationModel`` created (even if they were already on database) + """ + parsed_signatures = SafeSignature.parse_signature( + signature, + safe_operation_model.hash, + safe_operation.safe_operation_hash_preimage, + ) + + safe_operation_confirmations = [] + for parsed_signature in parsed_signatures: + safe_operation_confirmation, _ = ( + SafeOperationConfirmationModel.objects.get_or_create( + safe_operation=safe_operation_model, + owner=parsed_signature.owner, + defaults={ + "signature": parsed_signature.export_signature(), + "signature_type": parsed_signature.signature_type.value, + }, + ) + ) + safe_operation_confirmations.append(safe_operation_confirmation) + return safe_operation_confirmations + + def index_safe_operation( + self, + user_operation_model: UserOperationModel, + user_operation: UserOperation, + user_operation_receipt: UserOperationReceipt, + ) -> Optional[Tuple[SafeOperationModel, SafeOperation]]: + """ + Creates or updates a Safe Operation + + :param user_operation_model: Required due to the ForeignKey to ``UserOperation`` + :param user_operation: To build SafeOperation from + :param user_operation_receipt: For detecting the Safe module address + :return: Tuple with ``SafeOperationModel`` stored in Database and ``SafeOperation`` + """ + + if not (module_address := user_operation_receipt.get_module_address()): + # UserOperation it's being indexed as UserOperation event been emitted. So + # `nonce` was increased and the UserOperation must be indexed, but we should log the information + # so it's easy to debug edge cases, as 4337 entrypoint is still a work in progress. + logger.info( + "[%s] Cannot find ExecutionFromModuleSuccess or ExecutionFromModuleFailure " + "events for user-operation-hash=%s , it seems like UserOperation was reverted", + user_operation_model.sender, + user_operation.user_operation_hash.hex(), + ) + if user_operation_receipt.get_deployed_account(): + # UserOperation `initCode` was executed but `callData` failed, so account was deployed but + # SafeOperation was reverted + logger.info( + "[%s] user-operation-hash=%s was reverted but contract was deployed", + user_operation_model.sender, + user_operation.user_operation_hash.hex(), + ) + # As `module_address` cannot be detected there's not enough data to index the SafeOperation + return None + + # Build SafeOperation from UserOperation + safe_operation = SafeOperation.from_user_operation(user_operation) + + safe_operation_hash = safe_operation.get_safe_operation_hash( + self.ethereum_client.get_chain_id(), module_address + ) + + # Store SafeOperation + safe_operation_model, created = SafeOperationModel.objects.get_or_create( + hash=safe_operation_hash, + defaults={ + "user_operation": user_operation_model, + "valid_after": safe_operation.valid_after_as_datetime, + "valid_until": safe_operation.valid_until_as_datetime, + "module_address": module_address, + }, + ) + if not created: + logger.debug( + "[%s] safe-operation-hash=%s for user-operation-hash=%s was already indexed", + user_operation_model.sender, + HexBytes(safe_operation_hash).hex(), + user_operation.user_operation_hash.hex(), + ) + self.index_safe_operation_confirmations( + HexBytes(safe_operation.signature), safe_operation_model, safe_operation + ) + return safe_operation_model, safe_operation + + def index_user_operation_receipt( + self, user_operation_model: UserOperationModel + ) -> Tuple[UserOperationReceiptModel, UserOperationReceipt]: + """ + Stores UserOperationReceipt. Can never be updated as if ``UserOperationReceipt`` is on database indexing + ``UserOperation`` is not required + + :param user_operation_model: Required due to the ForeignKey to ``UserOperation`` + :return: Tuple with ``UserOperation`` and ``UserOperationReceipt`` + """ + safe_address = user_operation_model.sender + user_operation_hash = HexBytes(user_operation_model.hash).hex() + tx_hash = HexBytes(user_operation_model.ethereum_tx_id).hex() + logger.debug( + "[%s] Retrieving UserOperation Receipt with user-operation-hash=%s on tx-hash=%s", + safe_address, + user_operation_hash, + tx_hash, + ) + user_operation_receipt = self.bundler_client.get_user_operation_receipt( + user_operation_hash + ) + if not user_operation_receipt.success: + logger.info( + "[%s] UserOperation user-operation-hash=%s on tx-hash=%s failed, indexing either way", + safe_address, + user_operation_hash, + tx_hash, + ) + + # Use event `Deposited (index_topic_1 address account, uint256 totalDeposit)` + # to get deposited funds + deposited = user_operation_receipt.get_deposit() + + logger.debug( + "[%s] Storing UserOperation Receipt with user-operation=%s on tx-hash=%s", + safe_address, + user_operation_hash, + tx_hash, + ) + + # Cut reason if longer than `max_length` + reason = ( + user_operation_receipt.reason[ + : UserOperationReceiptModel._meta.get_field("reason").max_length + ] + if user_operation_receipt.reason + else "" + ) + return ( + UserOperationReceiptModel.objects.create( + user_operation=user_operation_model, + actual_gas_cost=user_operation_receipt.actual_gas_cost, + actual_gas_used=user_operation_receipt.actual_gas_used, + success=user_operation_receipt.success, + reason=reason, + deposited=deposited, + ), + user_operation_receipt, + ) + + @transaction.atomic + def index_user_operation( + self, + safe_address: ChecksumAddress, + user_operation_hash: HexBytes, + ethereum_tx: history_models.EthereumTx, + ) -> Tuple[UserOperationModel, UserOperation]: + """ + Index ``UserOperation``, ``SafeOperation`` and ``UserOperationReceipt`` for the given ``UserOperation`` log + + :param safe_address: to prevent indexing UserOperations from other address + :param user_operation_hash: hash for the ``UserOperation`` + :param ethereum_tx: Stored EthereumTx in database containing the ``UserOperation`` + :return: tuple of ``UserOperationModel`` and ``UserOperation`` + """ + user_operation_hash_hex = user_operation_hash.hex() + # If the UserOperationReceipt is present, UserOperation was already processed and mined + if self.is_user_operation_indexed(user_operation_hash_hex): + logger.warning( + "[%s] user-operation-hash=%s receipt was already indexed", + safe_address, + user_operation_hash_hex, + ) + else: + logger.debug( + "[%s] Retrieving UserOperation from Bundler with user-operation-hash=%s on tx-hash=%s", + safe_address, + user_operation_hash_hex, + ethereum_tx.tx_hash, + ) + user_operation = self.bundler_client.get_user_operation_by_hash( + user_operation_hash_hex + ) + if not user_operation: + self.bundler_client.get_user_operation_by_hash.cache_clear() + raise BundlerClientException( + f"user-operation={user_operation_hash_hex} returned `null`" + ) + if isinstance(user_operation, UserOperationV07): + raise UserOperationNotSupportedException( + f"user-operation={user_operation_hash_hex} for EntryPoint v0.7.0 is not supported" + ) + + try: + user_operation_model = UserOperationModel.objects.get( + hash=user_operation_hash_hex + ) + logger.debug( + "[%s] Updating UserOperation with user-operation=%s on tx-hash=%s", + safe_address, + user_operation_hash_hex, + ethereum_tx.tx_hash, + ) + user_operation_model.signature = user_operation.signature + user_operation_model.ethereum_tx = ethereum_tx + user_operation_model.save(update_fields=["signature", "ethereum_tx"]) + except UserOperationModel.DoesNotExist: + logger.debug( + "[%s] Storing UserOperation with user-operation=%s on tx-hash=%s", + safe_address, + user_operation_hash_hex, + ethereum_tx.tx_hash, + ) + user_operation_model = UserOperationModel.objects.create( + ethereum_tx=ethereum_tx, + hash=user_operation_hash_hex, + sender=user_operation.sender, + nonce=user_operation.nonce, + init_code=user_operation.init_code, + call_data=user_operation.call_data, + call_data_gas_limit=user_operation.call_gas_limit, + verification_gas_limit=user_operation.verification_gas_limit, + pre_verification_gas=user_operation.pre_verification_gas, + max_fee_per_gas=user_operation.max_fee_per_gas, + max_priority_fee_per_gas=user_operation.max_priority_fee_per_gas, + paymaster=user_operation.paymaster, + paymaster_data=user_operation.paymaster_data, + signature=user_operation.signature, + entry_point=user_operation.entry_point, + ) + + _, user_operation_receipt = self.index_user_operation_receipt( + user_operation_model + ) + self.index_safe_operation( + user_operation_model, user_operation, user_operation_receipt + ) + + return user_operation_model, user_operation + + def process_aa_transaction( + self, safe_address: ChecksumAddress, ethereum_tx: history_models.EthereumTx + ) -> int: + """ + Check if transaction contains any 4337 UserOperation for the provided `safe_address`. + Function is cached to prevent reprocessing the same transaction. + + :param safe_address: Sender to check in UserOperation + :param ethereum_tx: EthereumTx to check for UserOperations + :return: Number of detected ``UserOperations`` in transaction + """ + user_operation_hashes = self.get_user_operation_hashes_from_logs( + safe_address, ethereum_tx.logs + ) + number_detected_user_operations = len(user_operation_hashes) + if not self.bundler_client: + logger.debug( + "Detected 4337 User Operation but bundler client was not configured" + ) + return number_detected_user_operations + + for user_operation_hash in user_operation_hashes: + try: + self.index_user_operation( + safe_address, user_operation_hash, ethereum_tx + ) + except UserOperationNotSupportedException as exc: + logger.error( + "[%s] Error processing user-operation: %s", + safe_address, + exc, + ) + except BundlerClientException as exc: + logger.error( + "[%s] Error retrieving user-operation from bundler API: %s", + safe_address, + exc, + ) + except AaProcessorServiceException as exc: + logger.error( + "[%s] Error processing user-operation: %s", + safe_address, + exc, + ) + + return number_detected_user_operations diff --git a/safe_transaction_service/account_abstraction/tests/__init__.py b/safe_transaction_service/account_abstraction/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/safe_transaction_service/account_abstraction/tests/factories.py b/safe_transaction_service/account_abstraction/tests/factories.py new file mode 100644 index 00000000..074e3d2f --- /dev/null +++ b/safe_transaction_service/account_abstraction/tests/factories.py @@ -0,0 +1,73 @@ +from django.conf import settings +from django.utils import timezone + +import factory +from eth_account import Account +from factory.django import DjangoModelFactory + +from gnosis.eth.constants import NULL_ADDRESS +from gnosis.eth.utils import fast_keccak_text +from gnosis.safe.safe_signature import SafeSignatureType + +from safe_transaction_service.history.tests import factories as history_factories + +from .. import models + + +class UserOperationFactory(DjangoModelFactory): + class Meta: + model = models.UserOperation + + hash = factory.Sequence(lambda n: fast_keccak_text(f"user-operation-{n}").hex()) + ethereum_tx = factory.SubFactory(history_factories.EthereumTxFactory) + sender = factory.LazyFunction(lambda: Account.create().address) + nonce = factory.Sequence(lambda n: n) + init_code = b"" + call_data = b"" + call_data_gas_limit = factory.fuzzy.FuzzyInteger(50_000, 200_000) + verification_gas_limit = factory.fuzzy.FuzzyInteger(30_000, 50_000) + pre_verification_gas = factory.fuzzy.FuzzyInteger(20_000, 30_000) + max_fee_per_gas = factory.fuzzy.FuzzyInteger(20, 50) + max_priority_fee_per_gas = factory.fuzzy.FuzzyInteger(0, 10) + paymaster = NULL_ADDRESS + paymaster_data = b"" + signature = b"" + entry_point = settings.ETHEREUM_4337_SUPPORTED_ENTRY_POINTS[0] + + +class UserOperationReceiptFactory(DjangoModelFactory): + class Meta: + model = models.UserOperationReceipt + + user_operation = factory.SubFactory(UserOperationFactory) + actual_gas_cost = factory.fuzzy.FuzzyInteger(20, 50) + actual_gas_used = factory.fuzzy.FuzzyInteger(100, 200) + success = True + reason = "" + deposited = factory.fuzzy.FuzzyInteger(500, 1_000) + + +class SafeOperationFactory(DjangoModelFactory): + class Meta: + model = models.SafeOperation + + hash = factory.Sequence(lambda n: fast_keccak_text(f"safe-operation-{n}").hex()) + user_operation = factory.SubFactory(UserOperationFactory) + valid_after = factory.LazyFunction(timezone.now) + valid_until = factory.LazyFunction(timezone.now) + module_address = factory.LazyFunction(lambda: Account.create().address) + + +class SafeOperationConfirmationFactory(DjangoModelFactory): + class Meta: + model = models.SafeOperationConfirmation + + class Params: + signing_owner = Account.create() + + safe_operation = factory.SubFactory(SafeOperationFactory) + owner = factory.LazyAttribute(lambda o: o.signing_owner.address) + signature = factory.LazyAttribute( + lambda o: o.signing_owner.signHash(o.safe_operation.hash)["signature"] + ) + signature_type = SafeSignatureType.EOA.value diff --git a/safe_transaction_service/account_abstraction/tests/mocks/__init__.py b/safe_transaction_service/account_abstraction/tests/mocks/__init__.py new file mode 100644 index 00000000..0f4accc7 --- /dev/null +++ b/safe_transaction_service/account_abstraction/tests/mocks/__init__.py @@ -0,0 +1,8 @@ +# flake8: noqa F401 +from .aa_tx_receipt_mock import ( + aa_chain_id, + aa_expected_safe_operation_hash, + aa_expected_user_operation_hash, + aa_safe_address, + aa_tx_receipt_mock, +) diff --git a/safe_transaction_service/account_abstraction/tests/mocks/aa_tx_receipt_mock.py b/safe_transaction_service/account_abstraction/tests/mocks/aa_tx_receipt_mock.py new file mode 100644 index 00000000..98ea7ab1 --- /dev/null +++ b/safe_transaction_service/account_abstraction/tests/mocks/aa_tx_receipt_mock.py @@ -0,0 +1,293 @@ +from hexbytes import HexBytes + +aa_chain_id = 11155111 + +aa_safe_address = "0xB0B5c0578Aa134b0496a6C0e51A7aae47C522861" + +aa_expected_user_operation_hash = HexBytes( + "0x39b3e2171c04539d9b3f848d04364dfaa42cc0b412ff65ce2a85c566cf8bf281" +) +aa_expected_safe_operation_hash = HexBytes( + "0xb34556b3564ad04e472ca0f846afe44e0cfff8ceb0f94302792fdd1b9aff1351" +) + + +aa_tx_receipt_mock = { + "blockHash": HexBytes( + "0xcc466b284f4030ee3f5941a2c8e36892262bf583611c902fe5558a595af47e13" + ), + "blockNumber": 5288154, + "contractAddress": None, + "cumulativeGasUsed": 13804372, + "effectiveGasPrice": 176552365, + "from": "0xd53Eb5203e367BbDD4f72338938224881Fc501Ab", + "gasUsed": 424992, + "logs": [ + { + "address": "0xB0B5c0578Aa134b0496a6C0e51A7aae47C522861", + "topics": [ + HexBytes( + "0xecdf3a3effea5783a3c4c2140e677577666428d44ed9d474a0b3a4c9943f8440" + ), + HexBytes( + "0x000000000000000000000000a581c4a4db7175302464ff3c06380bc3270b4037" + ), + ], + "data": HexBytes("0x"), + "blockNumber": 5288154, + "transactionHash": HexBytes( + "0xf8dab30ed3c8814ee9a67770ee68f8fb83e6247706c24371a76e7cd8d348b0e3" + ), + "transactionIndex": 133, + "blockHash": HexBytes( + "0xcc466b284f4030ee3f5941a2c8e36892262bf583611c902fe5558a595af47e13" + ), + "logIndex": 194, + "removed": False, + }, + { + "address": "0xB0B5c0578Aa134b0496a6C0e51A7aae47C522861", + "topics": [ + HexBytes( + "0x141df868a6331af528e38c83b7aa03edc19be66e37ae67f9285bf4f8e3c6a1a8" + ), + HexBytes( + "0x0000000000000000000000004e1dcf7ad4e460cfd30791ccc4f9c8a4f820ec67" + ), + ], + "data": HexBytes( + "0x000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000010000000000000000000000008ecd4ec46d4d2a6b64fe960b3d64e8b94b2234eb000000000000000000000000a581c4a4db7175302464ff3c06380bc3270b403700000000000000000000000000000000000000000000000000000000000000010000000000000000000000005ac255889882acd3da2aa939679e3f3d4cea221e" + ), + "blockNumber": 5288154, + "transactionHash": HexBytes( + "0xf8dab30ed3c8814ee9a67770ee68f8fb83e6247706c24371a76e7cd8d348b0e3" + ), + "transactionIndex": 133, + "blockHash": HexBytes( + "0xcc466b284f4030ee3f5941a2c8e36892262bf583611c902fe5558a595af47e13" + ), + "logIndex": 195, + "removed": False, + }, + { + "address": "0x4e1DCf7AD4e460CfD30791CCC4F9c8a4f820ec67", + "topics": [ + HexBytes( + "0x4f51faf6c4561ff95f067657e43439f0f856d97c04d9ec9070a6199ad418e235" + ), + HexBytes( + "0x000000000000000000000000b0b5c0578aa134b0496a6c0e51a7aae47c522861" + ), + ], + "data": HexBytes( + "0x00000000000000000000000029fcb43b46531bca003ddc8fcb67ffe91900c762" + ), + "blockNumber": 5288154, + "transactionHash": HexBytes( + "0xf8dab30ed3c8814ee9a67770ee68f8fb83e6247706c24371a76e7cd8d348b0e3" + ), + "transactionIndex": 133, + "blockHash": HexBytes( + "0xcc466b284f4030ee3f5941a2c8e36892262bf583611c902fe5558a595af47e13" + ), + "logIndex": 196, + "removed": False, + }, + { + "address": "0x5FF137D4b0FDCD49DcA30c7CF57E578a026d2789", + "topics": [ + HexBytes( + "0xd51a9c61267aa6196961883ecf5ff2da6619c37dac0fa92122513fb32c032d2d" + ), + HexBytes( + "0x39b3e2171c04539d9b3f848d04364dfaa42cc0b412ff65ce2a85c566cf8bf281" + ), + HexBytes( + "0x000000000000000000000000b0b5c0578aa134b0496a6c0e51a7aae47c522861" + ), + ], + "data": HexBytes( + "0x0000000000000000000000004e1dcf7ad4e460cfd30791ccc4f9c8a4f820ec670000000000000000000000000000000000000000000000000000000000000000" + ), + "blockNumber": 5288154, + "transactionHash": HexBytes( + "0xf8dab30ed3c8814ee9a67770ee68f8fb83e6247706c24371a76e7cd8d348b0e3" + ), + "transactionIndex": 133, + "blockHash": HexBytes( + "0xcc466b284f4030ee3f5941a2c8e36892262bf583611c902fe5558a595af47e13" + ), + "logIndex": 197, + "removed": False, + }, + { + "address": "0xB0B5c0578Aa134b0496a6C0e51A7aae47C522861", + "topics": [ + HexBytes( + "0xb648d3644f584ed1c2232d53c46d87e693586486ad0d1175f8656013110b714e" + ) + ], + "data": HexBytes( + "0x000000000000000000000000a581c4a4db7175302464ff3c06380bc3270b40370000000000000000000000005ff137d4b0fdcd49dca30c7cf57e578a026d27890000000000000000000000000000000000000000000000000002b32962c0bb8400000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + ), + "blockNumber": 5288154, + "transactionHash": HexBytes( + "0xf8dab30ed3c8814ee9a67770ee68f8fb83e6247706c24371a76e7cd8d348b0e3" + ), + "transactionIndex": 133, + "blockHash": HexBytes( + "0xcc466b284f4030ee3f5941a2c8e36892262bf583611c902fe5558a595af47e13" + ), + "logIndex": 198, + "removed": False, + }, + { + "address": "0x5FF137D4b0FDCD49DcA30c7CF57E578a026d2789", + "topics": [ + HexBytes( + "0x2da466a7b24304f47e87fa2e1e5a81b9831ce54fec19055ce277ca2f39ba42c4" + ), + HexBytes( + "0x000000000000000000000000b0b5c0578aa134b0496a6c0e51a7aae47c522861" + ), + ], + "data": HexBytes( + "0x0000000000000000000000000000000000000000000000000002b32962c0bb84" + ), + "blockNumber": 5288154, + "transactionHash": HexBytes( + "0xf8dab30ed3c8814ee9a67770ee68f8fb83e6247706c24371a76e7cd8d348b0e3" + ), + "transactionIndex": 133, + "blockHash": HexBytes( + "0xcc466b284f4030ee3f5941a2c8e36892262bf583611c902fe5558a595af47e13" + ), + "logIndex": 199, + "removed": False, + }, + { + "address": "0xB0B5c0578Aa134b0496a6C0e51A7aae47C522861", + "topics": [ + HexBytes( + "0x6895c13664aa4f67288b25d7a21d7aaa34916e355fb9b6fae0a139a9085becb8" + ), + HexBytes( + "0x000000000000000000000000a581c4a4db7175302464ff3c06380bc3270b4037" + ), + ], + "data": HexBytes("0x"), + "blockNumber": 5288154, + "transactionHash": HexBytes( + "0xf8dab30ed3c8814ee9a67770ee68f8fb83e6247706c24371a76e7cd8d348b0e3" + ), + "transactionIndex": 133, + "blockHash": HexBytes( + "0xcc466b284f4030ee3f5941a2c8e36892262bf583611c902fe5558a595af47e13" + ), + "logIndex": 200, + "removed": False, + }, + { + "address": "0x5FF137D4b0FDCD49DcA30c7CF57E578a026d2789", + "topics": [ + HexBytes( + "0xbb47ee3e183a558b1a2ff0874b079f3fc5478b7454eacf2bfc5af2ff5878f972" + ) + ], + "data": HexBytes("0x"), + "blockNumber": 5288154, + "transactionHash": HexBytes( + "0xf8dab30ed3c8814ee9a67770ee68f8fb83e6247706c24371a76e7cd8d348b0e3" + ), + "transactionIndex": 133, + "blockHash": HexBytes( + "0xcc466b284f4030ee3f5941a2c8e36892262bf583611c902fe5558a595af47e13" + ), + "logIndex": 201, + "removed": False, + }, + { + "address": "0xB0B5c0578Aa134b0496a6C0e51A7aae47C522861", + "topics": [ + HexBytes( + "0xb648d3644f584ed1c2232d53c46d87e693586486ad0d1175f8656013110b714e" + ) + ], + "data": HexBytes( + "0x000000000000000000000000a581c4a4db7175302464ff3c06380bc3270b403700000000000000000000000002270bd144e70ce6963ba02f575776a16184e1e600000000000000000000000000000000000000000000000000005af3107a400000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + ), + "blockNumber": 5288154, + "transactionHash": HexBytes( + "0xf8dab30ed3c8814ee9a67770ee68f8fb83e6247706c24371a76e7cd8d348b0e3" + ), + "transactionIndex": 133, + "blockHash": HexBytes( + "0xcc466b284f4030ee3f5941a2c8e36892262bf583611c902fe5558a595af47e13" + ), + "logIndex": 202, + "removed": False, + }, + { + "address": "0xB0B5c0578Aa134b0496a6C0e51A7aae47C522861", + "topics": [ + HexBytes( + "0x6895c13664aa4f67288b25d7a21d7aaa34916e355fb9b6fae0a139a9085becb8" + ), + HexBytes( + "0x000000000000000000000000a581c4a4db7175302464ff3c06380bc3270b4037" + ), + ], + "data": HexBytes("0x"), + "blockNumber": 5288154, + "transactionHash": HexBytes( + "0xf8dab30ed3c8814ee9a67770ee68f8fb83e6247706c24371a76e7cd8d348b0e3" + ), + "transactionIndex": 133, + "blockHash": HexBytes( + "0xcc466b284f4030ee3f5941a2c8e36892262bf583611c902fe5558a595af47e13" + ), + "logIndex": 203, + "removed": False, + }, + { + "address": "0x5FF137D4b0FDCD49DcA30c7CF57E578a026d2789", + "topics": [ + HexBytes( + "0x49628fd1471006c1482da88028e9ce4dbb080b815c9b0344d39e5a8e6ec1419f" + ), + HexBytes( + "0x39b3e2171c04539d9b3f848d04364dfaa42cc0b412ff65ce2a85c566cf8bf281" + ), + HexBytes( + "0x000000000000000000000000b0b5c0578aa134b0496a6c0e51a7aae47c522861" + ), + HexBytes( + "0x0000000000000000000000000000000000000000000000000000000000000000" + ), + ], + "data": HexBytes( + "0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000001c7f432341e240000000000000000000000000000000000000000000000000000000000068072" + ), + "blockNumber": 5288154, + "transactionHash": HexBytes( + "0xf8dab30ed3c8814ee9a67770ee68f8fb83e6247706c24371a76e7cd8d348b0e3" + ), + "transactionIndex": 133, + "blockHash": HexBytes( + "0xcc466b284f4030ee3f5941a2c8e36892262bf583611c902fe5558a595af47e13" + ), + "logIndex": 204, + "removed": False, + }, + ], + "logsBloom": HexBytes( + "0x080004000000900000000000000000008000000000000000000000000200000000080000000000000002200100000000001000000000000080000200000000000000100000000000000000000000000000000000000004080040000000002000000000000a00000005000000000008000000000001000000000000000002000008000120204002000000000000400000000002000004000000000000000000000000000000000010005000000000000002000000000000000000020000000000000000000000000000010000000000000000000000200000000000000000200000400c0000010000000000000008100220000000000000080000000000000000" + ), + "status": 1, + "to": "0x5FF137D4b0FDCD49DcA30c7CF57E578a026d2789", + "transactionHash": HexBytes( + "0xf8dab30ed3c8814ee9a67770ee68f8fb83e6247706c24371a76e7cd8d348b0e3" + ), + "transactionIndex": 133, + "type": 2, +} diff --git a/safe_transaction_service/account_abstraction/tests/services/__init__.py b/safe_transaction_service/account_abstraction/tests/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/safe_transaction_service/account_abstraction/tests/services/test_aa_processor_service.py b/safe_transaction_service/account_abstraction/tests/services/test_aa_processor_service.py new file mode 100644 index 00000000..b06606ac --- /dev/null +++ b/safe_transaction_service/account_abstraction/tests/services/test_aa_processor_service.py @@ -0,0 +1,156 @@ +from unittest import mock +from unittest.mock import MagicMock + +from django.test import TestCase + +from eth_account import Account + +from gnosis.eth import EthereumClient +from gnosis.eth.account_abstraction import BundlerClient +from gnosis.eth.account_abstraction import UserOperation as UserOperationClass +from gnosis.eth.account_abstraction import ( + UserOperationReceipt as UserOperationReceiptClass, +) +from gnosis.eth.tests.mocks.mock_bundler import ( + safe_4337_user_operation_hash_mock, + user_operation_mock, + user_operation_receipt_mock, + user_operation_v07_hash, + user_operation_v07_mock, +) + +from safe_transaction_service.account_abstraction.services import ( + get_aa_processor_service, +) +from safe_transaction_service.history.tests import factories as history_factories +from safe_transaction_service.history.utils import clean_receipt_log + +from ...models import SafeOperation as SafeOperationModel +from ...models import SafeOperationConfirmation as SafeOperationConfirmationModel +from ...models import UserOperation as UserOperationModel +from ...models import UserOperationReceipt as UserOperationReceiptModel +from ...services.aa_processor_service import UserOperationNotSupportedException +from ...utils import get_bundler_client +from ..mocks import ( + aa_chain_id, + aa_expected_safe_operation_hash, + aa_expected_user_operation_hash, + aa_safe_address, + aa_tx_receipt_mock, +) + + +class TestAaProcessorService(TestCase): + + def setUp(self): + super().setUp() + get_bundler_client.cache_clear() + get_aa_processor_service.cache_clear() + with self.settings(ETHEREUM_4337_BUNDLER_URL="https://localhost"): + # Bundler must be defined so it's initialized and it can be mocked + self.aa_processor_service = get_aa_processor_service() + self.assertIsNotNone(self.aa_processor_service.bundler_client) + + def tearDown(self): + super().tearDown() + get_bundler_client.cache_clear() + get_aa_processor_service.cache_clear() + + @mock.patch.object( + BundlerClient, + "get_user_operation_receipt", + autospec=True, + return_value=UserOperationReceiptClass.from_bundler_response( + user_operation_receipt_mock["result"] + ), + ) + @mock.patch.object( + BundlerClient, + "get_user_operation_by_hash", + autospec=True, + return_value=UserOperationClass.from_bundler_response( + safe_4337_user_operation_hash_mock.hex(), user_operation_mock["result"] + ), + ) + @mock.patch.object( + EthereumClient, + "get_chain_id", + autospec=True, + return_value=aa_chain_id, # Needed for hashes to match + ) + def test_process_aa_transaction( + self, + get_chain_id_mock: MagicMock, + get_user_operation_by_hash_mock: MagicMock, + get_user_operation_receipt_mock: MagicMock, + ): + ethereum_tx = history_factories.EthereumTxFactory( + logs=[clean_receipt_log(log) for log in aa_tx_receipt_mock["logs"]] + ) + self.aa_processor_service.process_aa_transaction(aa_safe_address, ethereum_tx) + + user_operation_model = UserOperationModel.objects.get() + safe_operation_model = SafeOperationModel.objects.get() + user_operation_receipt_model = UserOperationReceiptModel.objects.get() + user_operation_confirmation_model = SafeOperationConfirmationModel.objects.get() + + self.assertEqual( + user_operation_model.hash, aa_expected_user_operation_hash.hex() + ) + self.assertEqual( + safe_operation_model.hash, aa_expected_safe_operation_hash.hex() + ) + self.assertEqual(user_operation_receipt_model.deposited, 759940285250436) + self.assertEqual( + user_operation_confirmation_model.owner, + "0x5aC255889882aCd3da2aA939679E3f3d4cea221e", + ) + + @mock.patch.object( + BundlerClient, + "get_user_operation_receipt", + autospec=True, + return_value=UserOperationReceiptClass.from_bundler_response( + user_operation_receipt_mock["result"] + ), + ) + @mock.patch.object( + BundlerClient, + "get_user_operation_by_hash", + autospec=True, + return_value=UserOperationClass.from_bundler_response( + user_operation_v07_hash.hex(), user_operation_v07_mock["result"] + ), + ) + @mock.patch.object( + EthereumClient, + "get_chain_id", + autospec=True, + return_value=aa_chain_id, # Needed for hashes to match + ) + def test_process_aa_transaction_entrypoint_V07( + self, + get_chain_id_mock: MagicMock, + get_user_operation_by_hash_mock: MagicMock, + get_user_operation_receipt_mock: MagicMock, + ): + """ + Entrypoint v0.7.0 endpoints should be ignored + """ + ethereum_tx = history_factories.EthereumTxFactory( + logs=[clean_receipt_log(log) for log in aa_tx_receipt_mock["logs"]] + ) + with self.assertRaisesMessage( + UserOperationNotSupportedException, "for EntryPoint v0.7.0 is not supported" + ): + self.aa_processor_service.index_user_operation( + Account.create().address, # Not relevant + user_operation_v07_hash, + ethereum_tx, + ) + + self.aa_processor_service.process_aa_transaction(aa_safe_address, ethereum_tx) + self.assertEqual(UserOperationModel.objects.count(), 0) + self.assertEqual(SafeOperationModel.objects.count(), 0) + self.assertEqual(UserOperationReceiptModel.objects.count(), 0) + self.assertEqual(SafeOperationConfirmationModel.objects.count(), 0) diff --git a/safe_transaction_service/account_abstraction/tests/test_commands.py b/safe_transaction_service/account_abstraction/tests/test_commands.py new file mode 100644 index 00000000..2a2838fa --- /dev/null +++ b/safe_transaction_service/account_abstraction/tests/test_commands.py @@ -0,0 +1,48 @@ +from io import StringIO +from unittest import mock + +from django.core.management import call_command +from django.test import TestCase + +from eth_account import Account + +from safe_transaction_service.history.tests.factories import EthereumTxFactory +from safe_transaction_service.history.utils import clean_receipt_log + +from ..services import AaProcessorService +from .mocks import aa_safe_address, aa_tx_receipt_mock + + +class TestCommands(TestCase): + def test_reindex_4337(self): + command = "reindex_4337" + + buf = StringIO() + call_command(command, stdout=buf) + self.assertIn("Reindexed 0 UserOperations", buf.getvalue()) + + # Insert a 4337 transaction + ethereum_tx = EthereumTxFactory( + logs=[clean_receipt_log(log) for log in aa_tx_receipt_mock["logs"]] + ) + + # Test command with and without `addresses` flag + for commands in ([command], [command, f"--addresses={aa_safe_address}"]): + with mock.patch.object( + AaProcessorService, "process_aa_transaction", return_value=1 + ) as process_aa_transaction_mock: + buf = StringIO() + call_command(*commands, stdout=buf) + process_aa_transaction_mock.assert_called_once_with( + aa_safe_address, ethereum_tx + ) + self.assertIn("Reindexed 1 UserOperations", buf.getvalue()) + + with mock.patch.object( + AaProcessorService, "process_aa_transaction", return_value=1 + ) as process_aa_transaction_mock: + buf = StringIO() + random_address = Account.create().address.lower() # Test not checksummed + call_command(command, f"--addresses={random_address}", stdout=buf) + process_aa_transaction_mock.assert_not_called() + self.assertIn("Reindexed 0 UserOperations", buf.getvalue()) diff --git a/safe_transaction_service/account_abstraction/tests/test_helpers.py b/safe_transaction_service/account_abstraction/tests/test_helpers.py new file mode 100644 index 00000000..fcb94650 --- /dev/null +++ b/safe_transaction_service/account_abstraction/tests/test_helpers.py @@ -0,0 +1,38 @@ +from django.test import TestCase + +from hexbytes import HexBytes + +from gnosis.eth.tests.mocks.mock_bundler import user_operation_mock +from gnosis.safe.tests.safe_test_case import SafeTestCaseMixin + +from ..helpers import DecodedInitCode, decode_init_code + + +class TestAccountAbstractionHelpers(SafeTestCaseMixin, TestCase): + def test_decode_init_code(self): + with self.assertRaises(ValueError): + decode_init_code(b"", self.ethereum_client) + + expected = DecodedInitCode( + factory_address="0x4e1DCf7AD4e460CfD30791CCC4F9c8a4f820ec67", + factory_data=HexBytes( + "0x1688f0b900000000000000000000000029fcb43b46531bca003ddc8fcb67ffe91900c7620000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001e4b63e800d000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000010000000000000000000000008ecd4ec46d4d2a6b64fe960b3d64e8b94b2234eb0000000000000000000000000000000000000000000000000000000000000140000000000000000000000000a581c4a4db7175302464ff3c06380bc3270b403700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000005ac255889882acd3da2aa939679e3f3d4cea221e00000000000000000000000000000000000000000000000000000000000000648d0dc49f00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000a581c4a4db7175302464ff3c06380bc3270b40370000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + ), + initializer=b"\xb6>\x80\r\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8e\xcdN\xc4mM*kd\xfe\x96\x0b=d\xe8\xb9K\"4\xeb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa5\x81\xc4\xa4\xdbqu0$d\xff<\x068\x0b\xc3'\x0b@7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00Z\xc2U\x88\x98\x82\xac\xd3\xda*\xa99g\x9e?=L\xea\"\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00d\x8d\r\xc4\x9f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa5\x81\xc4\xa4\xdbqu0$d\xff<\x068\x0b\xc3'\x0b@7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + singleton="0x29fcB43b46531BcA003ddC8FCB67FFE91900C762", + salt_nonce=0, + expected_address="0xB0B5c0578Aa134b0496a6C0e51A7aae47C522861", + owners=["0x5aC255889882aCd3da2aA939679E3f3d4cea221e"], + threshold=1, + to="0x8EcD4ec46D4D2a6B64fE960B3D64e8B94B2234eb", + data=b"\x8d\r\xc4\x9f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa5\x81\xc4\xa4\xdbqu0$d\xff<\x068\x0b\xc3'\x0b@7", + fallback_handler="0xa581c4A4DB7175302464fF3C06380BC3270b4037", + payment_token="0x0000000000000000000000000000000000000000", + payment=0, + payment_receiver="0x0000000000000000000000000000000000000000", + ) + result = decode_init_code( + HexBytes(user_operation_mock["result"]["userOperation"]["initCode"]), + self.ethereum_client, + ) + self.assertEqual(result, expected) diff --git a/safe_transaction_service/account_abstraction/tests/test_models.py b/safe_transaction_service/account_abstraction/tests/test_models.py new file mode 100644 index 00000000..79755699 --- /dev/null +++ b/safe_transaction_service/account_abstraction/tests/test_models.py @@ -0,0 +1,77 @@ +from django.test import TestCase + +from gnosis.eth.account_abstraction import UserOperation as UserOperationClass +from gnosis.eth.tests.mocks.mock_bundler import ( + safe_4337_module_address_mock, + safe_4337_safe_operation_hash_mock, + safe_4337_user_operation_hash_mock, + user_operation_mock, +) +from gnosis.safe.account_abstraction import SafeOperation as SafeOperationClass + +from safe_transaction_service.history.tests import factories as history_factories + +from ..models import SafeOperation as SafeOperationModel +from ..models import UserOperation as UserOperationModel +from .factories import SafeOperationConfirmationFactory + + +class TestModels(TestCase): + def test_user_operation(self): + expected_user_operation_hash = safe_4337_user_operation_hash_mock + expected_user_operation = UserOperationClass.from_bundler_response( + expected_user_operation_hash.hex(), user_operation_mock["result"] + ) + expected_safe_operation = SafeOperationClass.from_user_operation( + expected_user_operation + ) + expected_safe_operation_hash = safe_4337_safe_operation_hash_mock + expected_module_address = safe_4337_module_address_mock + + ethereum_tx = history_factories.EthereumTxFactory( + tx_hash=user_operation_mock["result"]["transactionHash"], + block__block_hash=user_operation_mock["result"]["blockHash"], + block__number=int(user_operation_mock["result"]["blockNumber"], 16), + ) + user_operation_model: UserOperationModel = UserOperationModel.objects.create( + ethereum_tx=ethereum_tx, + hash=expected_user_operation_hash, + sender=expected_user_operation.sender, + nonce=expected_user_operation.nonce, + init_code=expected_user_operation.init_code, + call_data=expected_user_operation.call_data, + call_data_gas_limit=expected_user_operation.call_gas_limit, + verification_gas_limit=expected_user_operation.verification_gas_limit, + pre_verification_gas=expected_user_operation.pre_verification_gas, + max_fee_per_gas=expected_user_operation.max_fee_per_gas, + max_priority_fee_per_gas=expected_user_operation.max_priority_fee_per_gas, + paymaster=expected_user_operation.paymaster, + paymaster_data=expected_user_operation.paymaster_data, + signature=expected_user_operation.signature, + entry_point=expected_user_operation.entry_point, + ) + + user_operation = user_operation_model.to_user_operation(add_tx_metadata=True) + self.assertEqual(user_operation.metadata, expected_user_operation.metadata) + self.assertEqual(user_operation, expected_user_operation) + self.assertEqual( + user_operation_model.to_safe_operation(), expected_safe_operation + ) + self.assertIsNone(user_operation_model.paymaster_and_data) + + safe_operation_model: SafeOperationModel = SafeOperationModel.objects.create( + hash=expected_safe_operation_hash, + user_operation=user_operation_model, + valid_after=expected_safe_operation.valid_after_as_datetime, + valid_until=expected_safe_operation.valid_until_as_datetime, + module_address=expected_module_address, + ) + + self.assertEqual(safe_operation_model.build_signature(), b"") + SafeOperationConfirmationFactory( + safe_operation=safe_operation_model, + signature=user_operation.signature[12:], + ) + self.assertEqual( + safe_operation_model.build_signature(), expected_safe_operation.signature + ) diff --git a/safe_transaction_service/account_abstraction/tests/test_views.py b/safe_transaction_service/account_abstraction/tests/test_views.py new file mode 100644 index 00000000..821c9b10 --- /dev/null +++ b/safe_transaction_service/account_abstraction/tests/test_views.py @@ -0,0 +1,750 @@ +import dataclasses +import datetime +import logging +from unittest import mock +from unittest.mock import MagicMock + +from django.urls import reverse +from django.utils import timezone + +from eth_account import Account +from hexbytes import HexBytes +from rest_framework import status +from rest_framework.exceptions import ErrorDetail +from rest_framework.test import APITestCase + +from gnosis.eth import EthereumClient +from gnosis.eth.account_abstraction import UserOperation as UserOperationClass +from gnosis.eth.constants import NULL_ADDRESS +from gnosis.eth.tests.mocks.mock_bundler import ( + safe_4337_address, + safe_4337_chain_id_mock, + safe_4337_module_address_mock, + safe_4337_safe_operation_hash_mock, + safe_4337_user_operation_hash_mock, + user_operation_mock, +) +from gnosis.eth.utils import fast_to_checksum_address +from gnosis.safe.account_abstraction import SafeOperation as SafeOperationClass +from gnosis.safe.proxy_factory import ProxyFactoryV141 +from gnosis.safe.safe_signature import SafeSignatureEOA +from gnosis.safe.tests.safe_test_case import SafeTestCaseMixin + +from safe_transaction_service.utils.utils import datetime_to_str + +from .. import models +from ..serializers import SafeOperationSerializer +from . import factories + +logger = logging.getLogger(__name__) + + +class TestAccountAbstractionViews(SafeTestCaseMixin, APITestCase): + def test_safe_operation_view(self): + random_safe_operation_hash = ( + "0x8aca9664752dbae36135fd0956c956fc4a370feeac67485b49bcd4b99608ae41" + ) + response = self.client.get( + reverse( + "v1:account_abstraction:safe-operation", + args=(random_safe_operation_hash,), + ) + ) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + self.assertEqual( + response.json(), {"detail": "No SafeOperation matches the given query."} + ) + safe_address = Account.create().address + safe_operation = factories.SafeOperationFactory( + user_operation__sender=safe_address + ) + response = self.client.get( + reverse( + "v1:account_abstraction:safe-operation", args=(safe_operation.hash,) + ) + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + expected = { + "created": datetime_to_str(safe_operation.created), + "modified": datetime_to_str(safe_operation.modified), + "safeOperationHash": safe_operation.hash, + "userOperation": { + "sender": safe_operation.user_operation.sender, + "nonce": safe_operation.user_operation.nonce, + "userOperationHash": safe_operation.user_operation.hash, + "ethereumTxHash": safe_operation.user_operation.ethereum_tx_id, + "initCode": "0x", + "callData": "0x", + "callDataGasLimit": safe_operation.user_operation.call_data_gas_limit, + "verificationGasLimit": safe_operation.user_operation.verification_gas_limit, + "preVerificationGas": safe_operation.user_operation.pre_verification_gas, + "maxFeePerGas": safe_operation.user_operation.max_fee_per_gas, + "maxPriorityFeePerGas": safe_operation.user_operation.max_priority_fee_per_gas, + "paymaster": NULL_ADDRESS, + "paymasterData": "0x", + "entryPoint": safe_operation.user_operation.entry_point, + "signature": "0x", + }, + "validAfter": datetime_to_str(safe_operation.valid_after), + "validUntil": datetime_to_str(safe_operation.valid_until), + "moduleAddress": safe_operation.module_address, + "confirmations": [], + "preparedSignature": None, + } + self.assertDictEqual( + response.json(), + expected, + ) + + # Add a confirmation + safe_operation_confirmation = factories.SafeOperationConfirmationFactory( + safe_operation=safe_operation + ) + response = self.client.get( + reverse( + "v1:account_abstraction:safe-operation", args=(safe_operation.hash,) + ) + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + expected["preparedSignature"] = safe_operation_confirmation.signature.hex() + expected["confirmations"] = [ + { + "created": datetime_to_str(safe_operation_confirmation.created), + "modified": datetime_to_str(safe_operation_confirmation.modified), + "owner": safe_operation_confirmation.owner, + "signature": safe_operation_confirmation.signature.hex(), + "signatureType": "EOA", + } + ] + self.assertDictEqual(response.json(), expected) + + def test_safe_operations_view(self): + safe_address = Account.create().address + + response = self.client.get( + reverse( + "v1:account_abstraction:safe-operations", + args=(safe_address,), + ) + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual( + response.json(), {"count": 0, "next": None, "previous": None, "results": []} + ) + safe_operation = factories.SafeOperationFactory( + user_operation__sender=safe_address + ) + response = self.client.get( + reverse("v1:account_abstraction:safe-operations", args=(safe_address,)) + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + expected = { + "created": datetime_to_str(safe_operation.created), + "modified": datetime_to_str(safe_operation.modified), + "safeOperationHash": safe_operation.hash, + "userOperation": { + "sender": safe_operation.user_operation.sender, + "nonce": safe_operation.user_operation.nonce, + "userOperationHash": safe_operation.user_operation.hash, + "ethereumTxHash": safe_operation.user_operation.ethereum_tx_id, + "initCode": "0x", + "callData": "0x", + "callDataGasLimit": safe_operation.user_operation.call_data_gas_limit, + "verificationGasLimit": safe_operation.user_operation.verification_gas_limit, + "preVerificationGas": safe_operation.user_operation.pre_verification_gas, + "maxFeePerGas": safe_operation.user_operation.max_fee_per_gas, + "maxPriorityFeePerGas": safe_operation.user_operation.max_priority_fee_per_gas, + "paymaster": NULL_ADDRESS, + "paymasterData": "0x", + "signature": "0x", + "entryPoint": safe_operation.user_operation.entry_point, + }, + "validAfter": datetime_to_str(safe_operation.valid_after), + "validUntil": datetime_to_str(safe_operation.valid_until), + "moduleAddress": safe_operation.module_address, + "confirmations": [], + "preparedSignature": None, + } + self.assertDictEqual( + response.json(), + {"count": 1, "next": None, "previous": None, "results": [expected]}, + ) + + # Add a confirmation + safe_operation_confirmation = factories.SafeOperationConfirmationFactory( + safe_operation=safe_operation + ) + response = self.client.get( + reverse("v1:account_abstraction:safe-operations", args=(safe_address,)) + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + expected["preparedSignature"] = safe_operation_confirmation.signature.hex() + expected["confirmations"] = [ + { + "created": datetime_to_str(safe_operation_confirmation.created), + "modified": datetime_to_str(safe_operation_confirmation.modified), + "owner": safe_operation_confirmation.owner, + "signature": safe_operation_confirmation.signature.hex(), + "signatureType": "EOA", + } + ] + self.assertDictEqual( + response.json(), + {"count": 1, "next": None, "previous": None, "results": [expected]}, + ) + + @mock.patch.object( + SafeOperationSerializer, + "_get_owners", + autospec=True, + ) + @mock.patch.object( + EthereumClient, + "get_chain_id", + autospec=True, + return_value=safe_4337_chain_id_mock, + ) + def test_safe_operation_create_view( + self, get_chain_id_mock: MagicMock, get_owners_mock: MagicMock + ): + account = Account.create() + safe_address = safe_4337_address + user_operation_hash = safe_4337_user_operation_hash_mock + + user_operation = UserOperationClass.from_bundler_response( + user_operation_hash.hex(), user_operation_mock["result"] + ) + + safe_operation = SafeOperationClass.from_user_operation(user_operation) + safe_operation_hash = safe_4337_safe_operation_hash_mock + + self.assertEqual( + safe_operation_hash, + safe_operation.get_safe_operation_hash( + safe_4337_chain_id_mock, safe_4337_module_address_mock + ), + ) + + signature = account.signHash(safe_operation_hash)["signature"].hex() + get_owners_mock.return_value = [] + data = { + "nonce": safe_operation.nonce, + "init_code": user_operation.init_code.hex(), + "call_data": user_operation.call_data.hex(), + "call_data_gas_limit": user_operation.call_gas_limit, + "verification_gas_limit": user_operation.verification_gas_limit, + "pre_verification_gas": user_operation.pre_verification_gas, + "max_fee_per_gas": user_operation.max_fee_per_gas, + "max_priority_fee_per_gas": user_operation.max_priority_fee_per_gas, + "paymaster_and_data": ( + user_operation.paymaster_and_data + if user_operation.paymaster_and_data + else None + ), + "signature": signature, + "entry_point": user_operation.entry_point, + # Safe Operation fields, + "valid_after": ( + datetime_to_str(safe_operation.valid_after_as_datetime) + if safe_operation.valid_after + else None + ), + "valid_until": ( + datetime_to_str(safe_operation.valid_until_as_datetime) + if safe_operation.valid_until + else None + ), + "module_address": safe_4337_module_address_mock, + } + response = self.client.post( + reverse("v1:account_abstraction:safe-operations", args=(safe_address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.data, + { + "non_field_errors": [ + ErrorDetail( + string=f"Signer={account.address} is not an owner. Current owners=[]. Safe-operation-hash={safe_operation_hash.hex()}", + code="invalid", + ) + ] + }, + ) + + get_owners_mock.return_value = [account.address] + # Test not valid signature + with mock.patch.object(SafeSignatureEOA, "is_valid", return_value=False): + response = self.client.post( + reverse("v1:account_abstraction:safe-operations", args=(safe_address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.data, + { + "non_field_errors": [ + ErrorDetail( + string=f'Signature={data["signature"]} for owner={account.address} is not valid', + code="invalid", + ) + ] + }, + ) + + # Fake that Safe contract was already deployed, so `init_code` should not be provided + with mock.patch.object(EthereumClient, "is_contract", return_value=True): + response = self.client.post( + reverse("v1:account_abstraction:safe-operations", args=(safe_address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.data, + { + "init_code": [ + ErrorDetail( + string="`init_code` must be empty as the contract was already initialized", + code="invalid", + ) + ] + }, + ) + + with mock.patch.object( + ProxyFactoryV141, "calculate_proxy_address", return_value=NULL_ADDRESS + ): + response = self.client.post( + reverse("v1:account_abstraction:safe-operations", args=(safe_address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.data, + { + "init_code": [ + ErrorDetail( + string=f"Provided safe-address={safe_address} does not match calculated-safe-address={NULL_ADDRESS}", + code="invalid", + ) + ] + }, + ) + + # Fake that contract was not deployed and init_code was not provided + with mock.patch.object( + EthereumClient, "is_contract", return_value=False + ) as is_contract_mock: + data_without_init_code = dict(data) + data_without_init_code["init_code"] = None + response = self.client.post( + reverse("v1:account_abstraction:safe-operations", args=(safe_address,)), + format="json", + data=data_without_init_code, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.data, + { + "init_code": [ + ErrorDetail( + string="`init_code` was not provided and contract was not initialized", + code="invalid", + ) + ] + }, + ) + is_contract_mock.assert_called_once_with(safe_address) + + response = self.client.post( + reverse("v1:account_abstraction:safe-operations", args=(safe_address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(models.UserOperation.objects.count(), 1) + self.assertEqual(models.SafeOperation.objects.count(), 1) + + # Receipt will only be created when Operation is indexed + self.assertEqual(models.UserOperationReceipt.objects.count(), 0) + self.assertEqual( + models.UserOperation.objects.get().hash, user_operation_hash.hex() + ) + self.assertEqual( + models.SafeOperation.objects.get().hash, safe_operation_hash.hex() + ) + + # Try to create the same transaction + response = self.client.post( + reverse("v1:account_abstraction:safe-operations", args=(safe_address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.data, + { + "non_field_errors": [ + ErrorDetail( + string=f"SafeOperation with hash={safe_operation_hash.hex()} already exists", + code="invalid", + ) + ] + }, + ) + + # Insert a SafeOperation with higher nonce, nonce should be too low now + factories.SafeOperationFactory( + user_operation__nonce=safe_operation.nonce, + user_operation__sender=safe_address, + ) + response = self.client.post( + reverse("v1:account_abstraction:safe-operations", args=(safe_address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.data, + { + "nonce": [ + ErrorDetail( + string=f'Nonce={data["nonce"]} too low for safe=0xB0B5c0578Aa134b0496a6C0e51A7aae47C522861', + code="invalid", + ) + ] + }, + ) + + @mock.patch.object( + SafeOperationSerializer, + "_get_owners", + autospec=True, + ) + @mock.patch.object( + EthereumClient, + "get_chain_id", + autospec=True, + return_value=safe_4337_chain_id_mock, + ) + def test_safe_operation_valid_until_create_view( + self, get_chain_id_mock: MagicMock, get_owners_mock: MagicMock + ): + """ + Make sure `valid_until` checks are working + """ + + account = Account.create() + get_owners_mock.return_value = [account.address] + safe_address = safe_4337_address + user_operation_hash = safe_4337_user_operation_hash_mock + + user_operation = UserOperationClass.from_bundler_response( + user_operation_hash.hex(), user_operation_mock["result"] + ) + + safe_operation = SafeOperationClass.from_user_operation(user_operation) + safe_operation_hash = safe_4337_safe_operation_hash_mock + + self.assertEqual( + safe_operation_hash, + safe_operation.get_safe_operation_hash( + safe_4337_chain_id_mock, safe_4337_module_address_mock + ), + ) + + signature = account.signHash(safe_operation_hash)["signature"].hex() + data = { + "nonce": safe_operation.nonce, + "init_code": user_operation.init_code.hex(), + "call_data": user_operation.call_data.hex(), + "call_data_gas_limit": user_operation.call_gas_limit, + "verification_gas_limit": user_operation.verification_gas_limit, + "pre_verification_gas": user_operation.pre_verification_gas, + "max_fee_per_gas": user_operation.max_fee_per_gas, + "max_priority_fee_per_gas": user_operation.max_priority_fee_per_gas, + "paymaster_and_data": ( + user_operation.paymaster_and_data + if user_operation.paymaster_and_data + else None + ), + "signature": signature, + "entry_point": user_operation.entry_point, + # Safe Operation fields, + "valid_after": ( + datetime_to_str(safe_operation.valid_after_as_datetime) + if safe_operation.valid_after + else None + ), + "valid_until": datetime_to_str(timezone.now()), + "module_address": safe_4337_module_address_mock, + } + response = self.client.post( + reverse("v1:account_abstraction:safe-operations", args=(safe_address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.data, + { + "valid_until": [ + ErrorDetail( + string="`valid_until` cannot be previous to the current timestamp", + code="invalid", + ) + ] + }, + ) + + # Set valid_until in the future + valid_until = timezone.now() + datetime.timedelta(minutes=90) + data["valid_until"] = datetime_to_str(valid_until) + new_safe_operation = dataclasses.replace( + safe_operation, valid_until=int(valid_until.timestamp()) + ) + safe_operation_hash = new_safe_operation.get_safe_operation_hash( + safe_4337_chain_id_mock, safe_4337_module_address_mock + ) + data["signature"] = account.signHash(safe_operation_hash)["signature"].hex() + response = self.client.post( + reverse("v1:account_abstraction:safe-operations", args=(safe_address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + @mock.patch.object( + SafeOperationSerializer, + "_get_owners", + autospec=True, + ) + @mock.patch.object( + EthereumClient, + "get_chain_id", + autospec=True, + return_value=safe_4337_chain_id_mock, + ) + def test_safe_operation_paymaster_and_data_create_view( + self, get_chain_id_mock: MagicMock, get_owners_mock: MagicMock + ): + """ + Make sure `valid_until` checks are working + """ + + account = Account.create() + get_owners_mock.return_value = [account.address] + safe_address = safe_4337_address + user_operation_hash = safe_4337_user_operation_hash_mock + + paymaster_address = Account.create().address + paymaster_and_data = HexBytes(paymaster_address) + user_operation = dataclasses.replace( + UserOperationClass.from_bundler_response( + user_operation_hash.hex(), user_operation_mock["result"] + ), + paymaster_and_data=paymaster_and_data, + ) + + safe_operation = SafeOperationClass.from_user_operation(user_operation) + safe_operation_hash = safe_operation.get_safe_operation_hash( + safe_4337_chain_id_mock, safe_4337_module_address_mock + ) + + signature = account.signHash(safe_operation_hash)["signature"].hex() + data = { + "nonce": safe_operation.nonce, + "init_code": user_operation.init_code.hex(), + "call_data": user_operation.call_data.hex(), + "call_data_gas_limit": user_operation.call_gas_limit, + "verification_gas_limit": user_operation.verification_gas_limit, + "pre_verification_gas": user_operation.pre_verification_gas, + "max_fee_per_gas": user_operation.max_fee_per_gas, + "max_priority_fee_per_gas": user_operation.max_priority_fee_per_gas, + "paymaster_and_data": "0x00", + "signature": signature, + "entry_point": user_operation.entry_point, + # Safe Operation fields, + "valid_after": ( + datetime_to_str(safe_operation.valid_after_as_datetime) + if safe_operation.valid_after + else None + ), + "valid_until": ( + datetime_to_str(safe_operation.valid_after_as_datetime) + if safe_operation.valid_after + else None + ), + "module_address": safe_4337_module_address_mock, + } + response = self.client.post( + reverse("v1:account_abstraction:safe-operations", args=(safe_address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.data, + { + "paymaster_and_data": [ + ErrorDetail( + string="`paymaster_and_data` length should be at least 20 bytes", + code="invalid", + ) + ] + }, + ) + + # Set valid paymaster_and_data + data["paymaster_and_data"] = paymaster_and_data.hex() + response = self.client.post( + reverse("v1:account_abstraction:safe-operations", args=(safe_address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.data, + { + "paymaster_and_data": [ + ErrorDetail( + string=f"paymaster={paymaster_address} was not found in blockchain", + code="invalid", + ) + ] + }, + ) + + with mock.patch.object( + EthereumClient, + "is_contract", + side_effect=[False, True, True], + ) as is_contract_mock: + response = self.client.post( + reverse("v1:account_abstraction:safe-operations", args=(safe_address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertListEqual( + is_contract_mock.call_args_list, + [ + mock.call(safe_address), + mock.call(fast_to_checksum_address(user_operation.init_code[:20])), + mock.call(paymaster_address), + ], + ) + + def test_user_operation_view(self): + random_user_operation_hash = ( + "0x8aca9664752dbae36135fd0956c956fc4a370feeac67485b49bcd4b99608ae41" + ) + response = self.client.get( + reverse( + "v1:account_abstraction:user-operation", + args=(random_user_operation_hash,), + ) + ) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + self.assertEqual( + response.json(), {"detail": "No UserOperation matches the given query."} + ) + safe_address = Account.create().address + safe_operation = factories.SafeOperationFactory( + user_operation__sender=safe_address + ) + response = self.client.get( + reverse( + "v1:account_abstraction:user-operation", + args=(safe_operation.user_operation.hash,), + ) + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + expected = { + "sender": safe_operation.user_operation.sender, + "nonce": safe_operation.user_operation.nonce, + "userOperationHash": safe_operation.user_operation.hash, + "ethereumTxHash": safe_operation.user_operation.ethereum_tx_id, + "initCode": "0x", + "callData": "0x", + "callDataGasLimit": safe_operation.user_operation.call_data_gas_limit, + "verificationGasLimit": safe_operation.user_operation.verification_gas_limit, + "preVerificationGas": safe_operation.user_operation.pre_verification_gas, + "maxFeePerGas": safe_operation.user_operation.max_fee_per_gas, + "maxPriorityFeePerGas": safe_operation.user_operation.max_priority_fee_per_gas, + "paymaster": NULL_ADDRESS, + "paymasterData": "0x", + "signature": "0x", + "entryPoint": safe_operation.user_operation.entry_point, + "safeOperation": { + "created": datetime_to_str(safe_operation.created), + "modified": datetime_to_str(safe_operation.modified), + "safeOperationHash": safe_operation.hash, + "validAfter": datetime_to_str(safe_operation.valid_after), + "validUntil": datetime_to_str(safe_operation.valid_until), + "moduleAddress": safe_operation.module_address, + "confirmations": [], + "preparedSignature": None, + }, + } + self.assertDictEqual( + response.json(), + expected, + ) + + def test_user_operations_view(self): + safe_address = Account.create().address + + response = self.client.get( + reverse( + "v1:account_abstraction:user-operations", + args=(safe_address,), + ) + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual( + response.json(), {"count": 0, "next": None, "previous": None, "results": []} + ) + safe_operation = factories.SafeOperationFactory( + user_operation__sender=safe_address + ) + response = self.client.get( + reverse("v1:account_abstraction:user-operations", args=(safe_address,)) + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + expected = { + "sender": safe_operation.user_operation.sender, + "nonce": safe_operation.user_operation.nonce, + "userOperationHash": safe_operation.user_operation.hash, + "ethereumTxHash": safe_operation.user_operation.ethereum_tx_id, + "initCode": "0x", + "callData": "0x", + "callDataGasLimit": safe_operation.user_operation.call_data_gas_limit, + "verificationGasLimit": safe_operation.user_operation.verification_gas_limit, + "preVerificationGas": safe_operation.user_operation.pre_verification_gas, + "maxFeePerGas": safe_operation.user_operation.max_fee_per_gas, + "maxPriorityFeePerGas": safe_operation.user_operation.max_priority_fee_per_gas, + "paymaster": NULL_ADDRESS, + "paymasterData": "0x", + "signature": "0x", + "entryPoint": safe_operation.user_operation.entry_point, + "safeOperation": { + "created": datetime_to_str(safe_operation.created), + "modified": datetime_to_str(safe_operation.modified), + "safeOperationHash": safe_operation.hash, + "validAfter": datetime_to_str(safe_operation.valid_after), + "validUntil": datetime_to_str(safe_operation.valid_until), + "moduleAddress": safe_operation.module_address, + "confirmations": [], + "preparedSignature": None, + }, + } + self.assertDictEqual( + response.json(), + {"count": 1, "next": None, "previous": None, "results": [expected]}, + ) diff --git a/safe_transaction_service/account_abstraction/urls.py b/safe_transaction_service/account_abstraction/urls.py new file mode 100644 index 00000000..3cb1231a --- /dev/null +++ b/safe_transaction_service/account_abstraction/urls.py @@ -0,0 +1,28 @@ +from django.urls import path + +from . import views + +app_name = "account_abstraction" + +urlpatterns = [ + path( + "safe-operations//", + views.SafeOperationView.as_view(), + name="safe-operation", + ), + path( + "safes//safe-operations/", + views.SafeOperationsView.as_view(), + name="safe-operations", + ), + path( + "user-operations//", + views.UserOperationView.as_view(), + name="user-operation", + ), + path( + "safes//user-operations/", + views.UserOperationsView.as_view(), + name="user-operations", + ), +] diff --git a/safe_transaction_service/account_abstraction/utils.py b/safe_transaction_service/account_abstraction/utils.py new file mode 100644 index 00000000..3a8e2590 --- /dev/null +++ b/safe_transaction_service/account_abstraction/utils.py @@ -0,0 +1,45 @@ +import logging +from functools import cache +from typing import Optional + +from django.conf import settings + +from eth_typing import ChecksumAddress +from hexbytes import HexBytes +from web3.types import LogReceipt + +from gnosis.eth.account_abstraction import BundlerClient +from gnosis.eth.utils import fast_to_checksum_address + +logger = logging.getLogger(__name__) + + +@cache +def get_bundler_client() -> Optional[BundlerClient]: + """ + :return: Initialized `ERC4337 RPC Bundler Client` if configured, `None` otherwise + """ + if settings.ETHEREUM_4337_BUNDLER_URL: + return BundlerClient(settings.ETHEREUM_4337_BUNDLER_URL) + logger.warning("ETHEREUM_4337_BUNDLER_URL not set, cannot configure bundler client") + return None + + +def get_user_operation_sender_from_user_operation_log( + log: LogReceipt, +) -> ChecksumAddress: + """ + UserOperationEvent ( + indexed bytes32 userOpHash, + indexed address sender, + indexed address paymaster, + uint256 nonce, + bool success, + uint256 actualGasCost, + uint256 actualGasUsed + ) + :param log: `UserOperationEvent` log + :return: Checksum address of user operation `sender` + """ + + return fast_to_checksum_address(HexBytes(log["topics"][2])[-20:]) diff --git a/safe_transaction_service/account_abstraction/views.py b/safe_transaction_service/account_abstraction/views.py new file mode 100644 index 00000000..5229beff --- /dev/null +++ b/safe_transaction_service/account_abstraction/views.py @@ -0,0 +1,143 @@ +import django_filters +from drf_yasg.utils import swagger_auto_schema +from rest_framework import status +from rest_framework.filters import OrderingFilter +from rest_framework.generics import ListAPIView, ListCreateAPIView, RetrieveAPIView +from rest_framework.response import Response + +from gnosis.eth.utils import fast_is_checksum_address + +from . import pagination, serializers +from .models import SafeOperation, UserOperation + + +class SafeOperationView(RetrieveAPIView): + lookup_field = "hash" + lookup_url_kwarg = "safe_operation_hash" + queryset = SafeOperation.objects.prefetch_related("confirmations").select_related( + "user_operation" + ) + serializer_class = serializers.SafeOperationWithUserOperationResponseSerializer + + +class SafeOperationsView(ListCreateAPIView): + filter_backends = [ + django_filters.rest_framework.DjangoFilterBackend, + OrderingFilter, + ] + ordering = ["-user_operation__nonce"] + ordering_fields = ["user_operation__nonce"] + pagination_class = pagination.DefaultPagination + + def get_queryset(self): + safe = self.kwargs["address"] + return ( + SafeOperation.objects.filter(user_operation__sender=safe) + .prefetch_related("confirmations") + .select_related("user_operation") + ) + + def get_serializer_context(self): + context = super().get_serializer_context() + if getattr(self, "swagger_fake_view", False): + return context + + context["safe_address"] = self.kwargs["address"] + return context + + def get_serializer_class(self): + if self.request.method == "GET": + return serializers.SafeOperationWithUserOperationResponseSerializer + elif self.request.method == "POST": + return serializers.SafeOperationSerializer + + def get(self, request, address, *args, **kwargs): + if not fast_is_checksum_address(address): + return Response( + status=status.HTTP_422_UNPROCESSABLE_ENTITY, + data={ + "code": 1, + "message": "Checksum address validation failed", + "arguments": [address], + }, + ) + return super().get(request, address, *args, **kwargs) + + @swagger_auto_schema( + request_body=serializers.SafeOperationSerializer, + responses={201: "Created"}, + ) + def post(self, request, address, *args, **kwargs): + """ + Create a new 4337 ``SafeOperation`` for a Safe. + + :param request: + :param address: + :param args: + :param kwargs: + :return: + """ + + if not fast_is_checksum_address(address): + return Response( + status=status.HTTP_422_UNPROCESSABLE_ENTITY, + data={ + "code": 1, + "message": "Checksum address validation failed", + "arguments": [address], + }, + ) + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + self.perform_create(serializer) + return Response(status=status.HTTP_201_CREATED) + + +class UserOperationView(RetrieveAPIView): + lookup_field = "hash" + lookup_url_kwarg = "user_operation_hash" + queryset = ( + UserOperation.objects.all() + .select_related("receipt", "safe_operation") + .prefetch_related("safe_operation__confirmations") + ) + serializer_class = serializers.UserOperationWithSafeOperationResponseSerializer + + +class UserOperationsView(ListAPIView): + filter_backends = [ + django_filters.rest_framework.DjangoFilterBackend, + OrderingFilter, + ] + ordering = ["-nonce"] + ordering_fields = ["nonce"] + pagination_class = pagination.DefaultPagination + serializer_class = serializers.UserOperationWithSafeOperationResponseSerializer + + def get_queryset(self): + safe = self.kwargs["address"] + return ( + UserOperation.objects.filter(sender=safe) + .select_related("receipt", "safe_operation") + .prefetch_related("safe_operation__confirmations") + ) + + def get_serializer_context(self): + context = super().get_serializer_context() + if getattr(self, "swagger_fake_view", False): + return context + + context["safe_address"] = self.kwargs["address"] + return context + + def get(self, request, address, *args, **kwargs): + if not fast_is_checksum_address(address): + return Response( + status=status.HTTP_422_UNPROCESSABLE_ENTITY, + data={ + "code": 1, + "message": "Checksum address validation failed", + "arguments": [address], + }, + ) + return super().get(request, address, *args, **kwargs) diff --git a/safe_transaction_service/contracts/admin.py b/safe_transaction_service/contracts/admin.py index 8a27f94e..6612bc48 100644 --- a/safe_transaction_service/contracts/admin.py +++ b/safe_transaction_service/contracts/admin.py @@ -1,15 +1,16 @@ from django.contrib import admin -from gnosis.eth.django.admin import BinarySearchAdmin - -from safe_transaction_service.utils.admin import HasLogoFilterAdmin +from safe_transaction_service.utils.admin import ( + AdvancedAdminSearchMixin, + HasLogoFilterAdmin, +) from .models import Contract, ContractAbi from .tasks import create_or_update_contract_with_metadata_task @admin.register(ContractAbi) -class ContractAbiAdmin(BinarySearchAdmin): +class ContractAbiAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): list_display = ("pk", "relevance", "description", "abi_functions") list_filter = ("relevance",) ordering = ["relevance"] @@ -40,7 +41,7 @@ def queryset(self, request, queryset): @admin.register(Contract) -class ContractAdmin(BinarySearchAdmin): +class ContractAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): actions = ["find_abi"] list_display = ( "address", @@ -57,7 +58,7 @@ class ContractAdmin(BinarySearchAdmin): ordering = ["address"] raw_id_fields = ("contract_abi",) search_fields = [ - "=address", + "==address", "name", "contract_abi__abi", "contract_abi__description", diff --git a/safe_transaction_service/contracts/management/commands/update_safe_contracts_logo.py b/safe_transaction_service/contracts/management/commands/update_safe_contracts_logo.py new file mode 100644 index 00000000..d0cb43a2 --- /dev/null +++ b/safe_transaction_service/contracts/management/commands/update_safe_contracts_logo.py @@ -0,0 +1,106 @@ +from django.core.files import File +from django.core.management import BaseCommand, CommandError + +from gnosis.eth import EthereumClientProvider +from gnosis.safe.safe_deployments import safe_deployments + +from config.settings.base import STATICFILES_DIRS +from safe_transaction_service.contracts.models import Contract + +TRUSTED_FOR_DELEGATE_CALL = ["MultiSendCallOnly"] + + +def generate_safe_contract_display_name(contract_name: str, version: str) -> str: + """ + Generates the display name for Safe contract. + Append Safe at the beginning if the contract name doesn't contain Safe word and append the contract version at the end. + + :param contract_name: + :param version: + :return: display_name + """ + # Remove gnosis word + contract_name = contract_name.replace("Gnosis", "") + if "safe" not in contract_name.lower(): + return f"Safe: {contract_name} {version}" + else: + return f"{contract_name} {version}" + + +class Command(BaseCommand): + help = "Update or create Safe contracts with provided logo" + + def add_arguments(self, parser): + parser.add_argument( + "--safe-version", type=str, help="Contract version", required=False + ) + parser.add_argument( + "--force-update-contract-names", + help="Update all the safe contract names and display names", + action="store_true", + default=False, + ) + parser.add_argument( + "--logo-path", + type=str, + help="Path of new logo", + required=False, + default=f"{STATICFILES_DIRS[0]}/safe/safe_contract_logo.png", + ) + + def handle(self, *args, **options): + """ + Command to create or update Safe contracts with provided logo. + + :param args: + :param options: Safe version and logo path + :return: + """ + safe_version = options["safe_version"] + force_update_contract_names = options["force_update_contract_names"] + logo_path = options["logo_path"] + ethereum_client = EthereumClientProvider() + chain_id = ethereum_client.get_chain_id() + logo_file = File(open(logo_path, "rb")) + if not safe_version: + versions = list(safe_deployments.keys()) + elif safe_version in safe_deployments: + versions = [safe_version] + else: + raise CommandError( + f"Wrong Safe version {safe_version}, supported versions {safe_deployments.keys()}" + ) + + if force_update_contract_names: + # update all safe contract names + queryset = Contract.objects.update_or_create + else: + # only update the contracts with empty values + queryset = Contract.objects.get_or_create + + for version in versions: + for contract_name, addresses in safe_deployments[version].items(): + if (contract_address := addresses.get(str(chain_id))) is not None: + display_name = generate_safe_contract_display_name( + contract_name, version + ) + contract, created = queryset( + address=contract_address, + defaults={ + "name": contract_name, + "display_name": display_name, + "trusted_for_delegate_call": contract_name + in TRUSTED_FOR_DELEGATE_CALL, + }, + ) + + if not created: + # Remove previous logo file + contract.logo.delete(save=True) + # update name only for contracts with empty names + if not force_update_contract_names and contract.name == "": + contract.display_name = display_name + contract.name = contract_name + + contract.logo.save(f"{contract.address}.png", logo_file) + contract.save() diff --git a/safe_transaction_service/contracts/migrations/0001_initial.py b/safe_transaction_service/contracts/migrations/0001_initial.py index 5cd90a6f..1ffc65e2 100644 --- a/safe_transaction_service/contracts/migrations/0001_initial.py +++ b/safe_transaction_service/contracts/migrations/0001_initial.py @@ -9,7 +9,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [] diff --git a/safe_transaction_service/contracts/migrations/0002_auto_20210119_1136.py b/safe_transaction_service/contracts/migrations/0002_auto_20210119_1136.py index 22145266..c1856d83 100644 --- a/safe_transaction_service/contracts/migrations/0002_auto_20210119_1136.py +++ b/safe_transaction_service/contracts/migrations/0002_auto_20210119_1136.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("contracts", "0001_initial"), ] diff --git a/safe_transaction_service/contracts/migrations/0003_auto_20210122_1352.py b/safe_transaction_service/contracts/migrations/0003_auto_20210122_1352.py index f8f0800f..5770989e 100644 --- a/safe_transaction_service/contracts/migrations/0003_auto_20210122_1352.py +++ b/safe_transaction_service/contracts/migrations/0003_auto_20210122_1352.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("contracts", "0002_auto_20210119_1136"), ] diff --git a/safe_transaction_service/contracts/migrations/0004_auto_20210125_0925.py b/safe_transaction_service/contracts/migrations/0004_auto_20210125_0925.py index 8ae786c6..2892649c 100644 --- a/safe_transaction_service/contracts/migrations/0004_auto_20210125_0925.py +++ b/safe_transaction_service/contracts/migrations/0004_auto_20210125_0925.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("contracts", "0003_auto_20210122_1352"), ] diff --git a/safe_transaction_service/contracts/migrations/0005_alter_contractabi_id.py b/safe_transaction_service/contracts/migrations/0005_alter_contractabi_id.py index 5d8e96c8..503f5e8b 100644 --- a/safe_transaction_service/contracts/migrations/0005_alter_contractabi_id.py +++ b/safe_transaction_service/contracts/migrations/0005_alter_contractabi_id.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("contracts", "0004_auto_20210125_0925"), ] diff --git a/safe_transaction_service/contracts/migrations/0006_contractabi_abi_hash.py b/safe_transaction_service/contracts/migrations/0006_contractabi_abi_hash.py index 4480934f..4d521ad1 100644 --- a/safe_transaction_service/contracts/migrations/0006_contractabi_abi_hash.py +++ b/safe_transaction_service/contracts/migrations/0006_contractabi_abi_hash.py @@ -22,7 +22,6 @@ def add_hash_for_contract_abis(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("contracts", "0005_alter_contractabi_id"), ] diff --git a/safe_transaction_service/contracts/migrations/0007_contract_trusted_for_delegate_call.py b/safe_transaction_service/contracts/migrations/0007_contract_trusted_for_delegate_call.py index 6dc03257..621c2683 100644 --- a/safe_transaction_service/contracts/migrations/0007_contract_trusted_for_delegate_call.py +++ b/safe_transaction_service/contracts/migrations/0007_contract_trusted_for_delegate_call.py @@ -19,7 +19,6 @@ def add_trusted_contracts(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("contracts", "0006_contractabi_abi_hash"), ] diff --git a/safe_transaction_service/contracts/migrations/0008_ethereum_address_field_v2.py b/safe_transaction_service/contracts/migrations/0008_ethereum_address_field_v2.py index 521d45ee..dddf7d37 100644 --- a/safe_transaction_service/contracts/migrations/0008_ethereum_address_field_v2.py +++ b/safe_transaction_service/contracts/migrations/0008_ethereum_address_field_v2.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("contracts", "0007_contract_trusted_for_delegate_call"), ] diff --git a/safe_transaction_service/contracts/migrations/0009_alter_contractabi_abi_hash.py b/safe_transaction_service/contracts/migrations/0009_alter_contractabi_abi_hash.py index 50d5ec08..8f889dd3 100644 --- a/safe_transaction_service/contracts/migrations/0009_alter_contractabi_abi_hash.py +++ b/safe_transaction_service/contracts/migrations/0009_alter_contractabi_abi_hash.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("contracts", "0008_ethereum_address_field_v2"), ] diff --git a/safe_transaction_service/contracts/migrations/0010_alter_contract_logo.py b/safe_transaction_service/contracts/migrations/0010_alter_contract_logo.py index 18018725..212770a6 100644 --- a/safe_transaction_service/contracts/migrations/0010_alter_contract_logo.py +++ b/safe_transaction_service/contracts/migrations/0010_alter_contract_logo.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("contracts", "0009_alter_contractabi_abi_hash"), ] diff --git a/safe_transaction_service/contracts/models.py b/safe_transaction_service/contracts/models.py index 03ea4c75..27c7b56a 100644 --- a/safe_transaction_service/contracts/models.py +++ b/safe_transaction_service/contracts/models.py @@ -17,15 +17,8 @@ from pilkit.processors import Resize from web3._utils.normalizers import normalize_abi -from gnosis.eth.clients import ( - BlockscoutClient, - BlockScoutConfigurationProblem, - EtherscanClient, - EtherscanClientConfigurationProblem, - Sourcify, -) +from gnosis.eth.clients import ContractMetadata from gnosis.eth.django.models import EthereumAddressV2Field, Keccak256Field -from gnosis.eth.ethereum_client import EthereumClientProvider, EthereumNetwork from gnosis.eth.utils import fast_keccak logger = getLogger(__name__) @@ -97,18 +90,19 @@ def save(self, *args, **kwargs) -> None: class ContractManager(models.Manager): - def create_from_address( - self, address: str, network: Optional[EthereumNetwork] = None + def create_from_metadata( + self, address: str, metadata: Optional[ContractMetadata] ) -> "Contract": """ - Create contract and try to fetch information from APIs + Create contract from provided ``metadata`` :param address: - :param network: - :return: Contract instance populated with all the information found + :param metadata: + :return: Contract instance populated with all the information provided by ``metadata`` """ contract = super().create(address=address) - contract.sync_abi_from_api(network=network) + if metadata: + contract.update_from_metadata(metadata) return contract def fix_missing_logos(self) -> int: @@ -185,56 +179,27 @@ def __str__(self): logo = " with logo" if self.logo else " without logo" return f"Contract {self.address} - {self.name} - with abi {has_abi}{logo}" - def sync_abi_from_api(self, network: Optional[EthereumNetwork] = None) -> bool: + def update_from_metadata(self, contract_metadata: ContractMetadata) -> bool: """ - Sync ABI from Sourcify, then from Etherscan and Blockscout if available + Update contract metadata - :param network: Can be provided to save requests to the node + :param contract_metadata: Contract Metadata :return: True if updated, False otherwise """ - ethereum_client = EthereumClientProvider() - network = network or ethereum_client.get_network() - sourcify = Sourcify(network) + if not contract_metadata: + return False - try: - etherscan_client = EtherscanClient( - network, api_key=settings.ETHERSCAN_API_KEY - ) - except EtherscanClientConfigurationProblem: - logger.info( - "Etherscan client is not available for current network %s", network - ) - etherscan_client = None - - try: - blockscout_client = BlockscoutClient(network) - except BlockScoutConfigurationProblem: - logger.info( - "Blockscout client is not available for current network %s", network - ) - blockscout_client = None - - contract_abi: Optional[ContractAbi] = None - for client in (sourcify, etherscan_client, blockscout_client): - if not client: - continue - try: - contract_metadata = client.get_contract_metadata(self.address) - if contract_metadata: - name = contract_metadata.name or "" - contract_abi, _ = ContractAbi.objects.get_or_create( - abi=contract_metadata.abi, defaults={"description": name} - ) - if name: - if not contract_abi.description: - contract_abi.description = name - contract_abi.save(update_fields=["description"]) - if not self.name: - self.name = name - self.contract_abi = contract_abi - self.save(update_fields=["name", "contract_abi"]) - break - except IOError: - pass + name = contract_metadata.name or "" + contract_abi, _ = ContractAbi.objects.get_or_create( + abi=contract_metadata.abi, defaults={"description": name} + ) + if name: + if not contract_abi.description: + contract_abi.description = name + contract_abi.save(update_fields=["description"]) + if not self.name: + self.name = name + self.contract_abi = contract_abi + self.save(update_fields=["name", "contract_abi"]) return bool(contract_abi) diff --git a/safe_transaction_service/contracts/services/__init__.py b/safe_transaction_service/contracts/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/safe_transaction_service/contracts/services/contract_metadata_service.py b/safe_transaction_service/contracts/services/contract_metadata_service.py new file mode 100644 index 00000000..8452f1f0 --- /dev/null +++ b/safe_transaction_service/contracts/services/contract_metadata_service.py @@ -0,0 +1,102 @@ +import logging +from functools import cache +from typing import Optional + +from django.conf import settings + +from eth_typing import ChecksumAddress + +from gnosis.eth import EthereumClient, EthereumClientProvider +from gnosis.eth.clients import ( + BlockscoutClient, + BlockScoutConfigurationProblem, + ContractMetadata, + EtherscanClient, + EtherscanClientConfigurationProblem, + SourcifyClient, + SourcifyClientConfigurationProblem, +) + +logger = logging.getLogger(__name__) + + +@cache +def get_contract_metadata_service(): + return ContractMetadataService(EthereumClientProvider(), settings.ETHERSCAN_API_KEY) + + +class ContractMetadataService: + def __init__( + self, ethereum_client: EthereumClient, etherscan_api_key: Optional[str] = None + ): + self.ethereum_client = ethereum_client + self.ethereum_network = ethereum_client.get_network() + self.etherscan_api_key = etherscan_api_key + self.etherscan_client = self._get_etherscan_client() + self.blockscout_client = self._get_blockscout_client() + self.sourcify_client = self._get_sourcify_client() + self.enabled_clients = [ + client + for client in ( + self.sourcify_client, + self.etherscan_client, + self.blockscout_client, + ) + if client + ] + + def _get_etherscan_client(self) -> Optional[EthereumClient]: + try: + return EtherscanClient( + self.ethereum_network, api_key=self.etherscan_api_key + ) + except EtherscanClientConfigurationProblem: + logger.info( + "Etherscan client is not available for current network %s", + self.ethereum_network, + ) + return None + + def _get_blockscout_client(self) -> Optional[BlockscoutClient]: + try: + return BlockscoutClient(self.ethereum_network) + except BlockScoutConfigurationProblem: + logger.info( + "Blockscout client is not available for current network %s", + self.ethereum_network, + ) + return None + + def _get_sourcify_client(self) -> Optional[SourcifyClient]: + try: + return SourcifyClient(self.ethereum_network) + except SourcifyClientConfigurationProblem: + logger.info( + "Sourcify client is not available for current network %s", + self.ethereum_network, + ) + return None + + def get_contract_metadata( + self, contract_address: ChecksumAddress + ) -> Optional[ContractMetadata]: + """ + Get contract metadata from every enabled client + + :param contract_address: Contract address + :return: Contract Metadata if found from any client, otherwise None + """ + for client in self.enabled_clients: + try: + contract_metadata = client.get_contract_metadata(contract_address) + if contract_metadata: + return contract_metadata + except IOError: + logger.debug( + "Cannot get metadata for contract=%s on network=%s using client=%s", + contract_address, + self.ethereum_network.name, + client.__class__.__name__, + ) + + return None diff --git a/safe_transaction_service/contracts/tasks.py b/safe_transaction_service/contracts/tasks.py index 3d53ffeb..b6de02af 100644 --- a/safe_transaction_service/contracts/tasks.py +++ b/safe_transaction_service/contracts/tasks.py @@ -2,7 +2,6 @@ from enum import Enum from itertools import chain -from django.db import IntegrityError, transaction from django.utils import timezone from celery import app @@ -16,10 +15,10 @@ ModuleTransaction, MultisigTransaction, ) -from safe_transaction_service.utils.ethereum import get_ethereum_network from safe_transaction_service.utils.utils import close_gevent_db_connection_decorator from .models import Contract +from .services.contract_metadata_service import get_contract_metadata_service logger = get_task_logger(__name__) @@ -120,25 +119,24 @@ def create_or_update_contract_with_metadata_task( address: ChecksumAddress, ) -> ContractAction: """ - Creates or updates a contract using 3rd party information (contract name, ABI...) + Creates or updates a contract using 3rd party metadata (contract name, ABI...) :param address: Contract address :return: ContractAction """ logger.info("Searching metadata for contract %s", address) - ethereum_network = get_ethereum_network() + contract_metadata_service = get_contract_metadata_service() + contract_metadata = contract_metadata_service.get_contract_metadata(address) + try: - with transaction.atomic(): - contract = Contract.objects.create_from_address( - address, network=ethereum_network - ) - action = ContractAction.CREATED - except IntegrityError: contract = Contract.objects.get(address=address) - if contract.sync_abi_from_api(): + if contract_metadata and contract.update_from_metadata(contract_metadata): action = ContractAction.UPDATED else: action = ContractAction.NOT_MODIFIED + except Contract.DoesNotExist: + contract = Contract.objects.create_from_metadata(address, contract_metadata) + action = ContractAction.CREATED logger.info( "%s contract with address=%s name=%s abi-found=%s", diff --git a/safe_transaction_service/contracts/tests/mocks/__init__.py b/safe_transaction_service/contracts/tests/mocks/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/safe_transaction_service/contracts/tests/mocks/contract_metadata_mocks.py b/safe_transaction_service/contracts/tests/mocks/contract_metadata_mocks.py new file mode 100644 index 00000000..f672fda7 --- /dev/null +++ b/safe_transaction_service/contracts/tests/mocks/contract_metadata_mocks.py @@ -0,0 +1,138 @@ +from gnosis.eth.clients import ContractMetadata + +etherscan_metadata_mock = ContractMetadata( + "Etherscan Uxio Contract", + [ + { + "anonymous": False, + "inputs": [ + { + "indexed": False, + "internalType": "address", + "name": "etherscanParam", + "type": "address", + } + ], + "name": "AddedOwner", + "type": "event", + }, + { + "constant": False, + "inputs": [ + { + "internalType": "address", + "name": "_masterCopy", + "type": "address", + } + ], + "name": "changeMasterCopy", + "outputs": [], + "payable": False, + "stateMutability": "nonpayable", + "type": "function", + }, + { + "constant": False, + "inputs": [ + {"internalType": "uint256", "name": "_threshold", "type": "uint256"} + ], + "name": "changeThreshold", + "outputs": [], + "payable": False, + "stateMutability": "nonpayable", + "type": "function", + }, + ], + False, +) +sourcify_metadata_mock = ContractMetadata( + "Sourcify Uxio Contract", + [ + { + "anonymous": False, + "inputs": [ + { + "indexed": False, + "internalType": "address", + "name": "sourcifyParam", + "type": "address", + } + ], + "name": "AddedOwner", + "type": "event", + }, + { + "constant": False, + "inputs": [ + { + "internalType": "address", + "name": "_masterCopy", + "type": "address", + } + ], + "name": "changeMasterCopy", + "outputs": [], + "payable": False, + "stateMutability": "nonpayable", + "type": "function", + }, + { + "constant": False, + "inputs": [ + {"internalType": "uint256", "name": "_threshold", "type": "uint256"} + ], + "name": "changeThreshold", + "outputs": [], + "payable": False, + "stateMutability": "nonpayable", + "type": "function", + }, + ], + True, +) + +blockscout_metadata_mock = ContractMetadata( + "Blockscout Moises Contract", + [ + { + "anonymous": False, + "inputs": [ + { + "indexed": False, + "internalType": "address", + "name": "blockscoutParam", + "type": "address", + } + ], + "name": "AddedOwner", + "type": "event", + }, + { + "constant": False, + "inputs": [ + { + "internalType": "address", + "name": "_masterCopy", + "type": "address", + } + ], + "name": "changeMasterCopy", + "outputs": [], + "payable": False, + "stateMutability": "nonpayable", + "type": "function", + }, + { + "constant": False, + "inputs": [ + {"internalType": "uint256", "name": "_threshold", "type": "uint256"} + ], + "name": "changeThreshold", + "outputs": [], + "payable": False, + "stateMutability": "nonpayable", + "type": "function", + }, + ], + False, +) diff --git a/safe_transaction_service/contracts/tests/mocks/tx_decoder_mocks.py b/safe_transaction_service/contracts/tests/mocks/tx_decoder_mocks.py new file mode 100644 index 00000000..8dcd030c --- /dev/null +++ b/safe_transaction_service/contracts/tests/mocks/tx_decoder_mocks.py @@ -0,0 +1,208 @@ +from hexbytes import HexBytes + +exec_transaction_data_mock = HexBytes( + "0x6a761202000000000000000000000000b522a9f781924ed250a11c54105e51840b138add00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000bd4a50000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000066000000000000000000000000000000000000000000000000000000000000004e48d80ff0a0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000048f003d9819210a31b4961b30ef54be2aed79b9c9cd3b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000084c29982380000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000039aa39c021dfbae8fac545936693ac917d5e7563000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc900a0b86991c6218b36c1d19d4a2e9eb0ce3606eb4800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000044095ea7b300000000000000000000000039aa39c021dfbae8fac545936693ac917d5e756300000000000000000000000000000000000000000000000000000000009896800039aa39c021dfbae8fac545936693ac917d5e756300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024a0712d68000000000000000000000000000000000000000000000000000000000098968000f650c3d88d12db855b8bf7d11be6c55a4e07dcc900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024c5ebeaec00000000000000000000000000000000000000000000000000000000001e848000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000044095ea7b30000000000000000000000006f400810b62df8e13fded51be75ff5393eaa841f00000000000000000000000000000000000000000000000000000000001e8480006f400810b62df8e13fded51be75ff5393eaa841f000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a426c3d3940000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000005265c000000000000000000000000000000000000000000000000000000000001e5d7000000000000000000000000000000000000000000000000000000000001e8480006f400810b62df8e13fded51be75ff5393eaa841f0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004447e7ef24000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000000001e848000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000825cce27c16c9431409a311e1bfc7fb00cf28f223f309af6917bea47a1f787cb84117521c8dd216993ab576ddbf2850a65ed434577ae9153c666d96e9138ddcc901c000000000000000000000000ae5fb390e5c4fa1962e39e98dbfb0ed8055ed7a9000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000" +) +exec_transaction_decoded_mock = { + "method": "execTransaction", + "parameters": [ + { + "name": "to", + "type": "address", + "value": "0xB522a9f781924eD250A11C54105E51840B138AdD", + }, + {"name": "value", "type": "uint256", "value": "0"}, + { + "name": "data", + "type": "bytes", + "value": "0x8d80ff0a0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000048f003d9819210a31b4961b30ef54be2aed79b9c9cd3b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000084c29982380000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000039aa39c021dfbae8fac545936693ac917d5e7563000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc900a0b86991c6218b36c1d19d4a2e9eb0ce3606eb4800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000044095ea7b300000000000000000000000039aa39c021dfbae8fac545936693ac917d5e756300000000000000000000000000000000000000000000000000000000009896800039aa39c021dfbae8fac545936693ac917d5e756300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024a0712d68000000000000000000000000000000000000000000000000000000000098968000f650c3d88d12db855b8bf7d11be6c55a4e07dcc900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024c5ebeaec00000000000000000000000000000000000000000000000000000000001e848000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000044095ea7b30000000000000000000000006f400810b62df8e13fded51be75ff5393eaa841f00000000000000000000000000000000000000000000000000000000001e8480006f400810b62df8e13fded51be75ff5393eaa841f000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a426c3d3940000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000005265c000000000000000000000000000000000000000000000000000000000001e5d7000000000000000000000000000000000000000000000000000000000001e8480006f400810b62df8e13fded51be75ff5393eaa841f0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004447e7ef24000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000000001e84800000000000000000000000000000000000", + "value_decoded": { + "method": "multiSend", + "parameters": [ + { + "name": "transactions", + "type": "bytes", + "value": "0x003d9819210a31b4961b30ef54be2aed79b9c9cd3b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000084c29982380000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000039aa39c021dfbae8fac545936693ac917d5e7563000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc900a0b86991c6218b36c1d19d4a2e9eb0ce3606eb4800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000044095ea7b300000000000000000000000039aa39c021dfbae8fac545936693ac917d5e756300000000000000000000000000000000000000000000000000000000009896800039aa39c021dfbae8fac545936693ac917d5e756300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024a0712d68000000000000000000000000000000000000000000000000000000000098968000f650c3d88d12db855b8bf7d11be6c55a4e07dcc900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024c5ebeaec00000000000000000000000000000000000000000000000000000000001e848000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000044095ea7b30000000000000000000000006f400810b62df8e13fded51be75ff5393eaa841f00000000000000000000000000000000000000000000000000000000001e8480006f400810b62df8e13fded51be75ff5393eaa841f000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a426c3d3940000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000005265c000000000000000000000000000000000000000000000000000000000001e5d7000000000000000000000000000000000000000000000000000000000001e8480006f400810b62df8e13fded51be75ff5393eaa841f0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004447e7ef24000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000000001e8480", + "value_decoded": [ + { + "operation": 0, + "to": "0x3d9819210A31b4961b30EF54bE2aeD79B9c9Cd3B", + "value": "0", + "data": "0xc29982380000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000039aa39c021dfbae8fac545936693ac917d5e7563000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9", + "data_decoded": { + "method": "enterMarkets", + "parameters": [ + { + "name": "cTokens", + "type": "address[]", + "value": [ + "0x39AA39c021dfbaE8faC545936693aC917d5E7563", + "0xf650C3d88D12dB855b8bf7D11Be6C55A4e07dCC9", + ], + } + ], + }, + }, + { + "operation": 0, + "to": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", + "value": "0", + "data": "0x095ea7b300000000000000000000000039aa39c021dfbae8fac545936693ac917d5e75630000000000000000000000000000000000000000000000000000000000989680", + "data_decoded": { + "method": "approve", + "parameters": [ + { + "name": "spender", + "type": "address", + "value": "0x39AA39c021dfbaE8faC545936693aC917d5E7563", + }, + { + "name": "value", + "type": "uint256", + "value": "10000000", + }, + ], + }, + }, + { + "operation": 0, + "to": "0x39AA39c021dfbaE8faC545936693aC917d5E7563", + "value": "0", + "data": "0xa0712d680000000000000000000000000000000000000000000000000000000000989680", + "data_decoded": { + "method": "mint", + "parameters": [ + { + "name": "mintAmount", + "type": "uint256", + "value": "10000000", + } + ], + }, + }, + { + "operation": 0, + "to": "0xf650C3d88D12dB855b8bf7D11Be6C55A4e07dCC9", + "value": "0", + "data": "0xc5ebeaec00000000000000000000000000000000000000000000000000000000001e8480", + "data_decoded": { + "method": "borrow", + "parameters": [ + { + "name": "borrowAmount", + "type": "uint256", + "value": "2000000", + } + ], + }, + }, + { + "operation": 0, + "to": "0xdAC17F958D2ee523a2206206994597C13D831ec7", + "value": "0", + "data": "0x095ea7b30000000000000000000000006f400810b62df8e13fded51be75ff5393eaa841f00000000000000000000000000000000000000000000000000000000001e8480", + "data_decoded": { + "method": "approve", + "parameters": [ + { + "name": "spender", + "type": "address", + "value": "0x6F400810b62df8E13fded51bE75fF5393eaa841F", + }, + { + "name": "value", + "type": "uint256", + "value": "2000000", + }, + ], + }, + }, + { + "operation": 0, + "to": "0x6F400810b62df8E13fded51bE75fF5393eaa841F", + "value": "0", + "data": "0x26c3d3940000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000005265c000000000000000000000000000000000000000000000000000000000001e5d7000000000000000000000000000000000000000000000000000000000001e8480", + "data_decoded": { + "method": "placeOrder", + "parameters": [ + { + "name": "buyToken", + "type": "uint16", + "value": "4", + }, + { + "name": "sellToken", + "type": "uint16", + "value": "2", + }, + { + "name": "validUntil", + "type": "uint32", + "value": "5400000", + }, + { + "name": "buyAmount", + "type": "uint128", + "value": "1990000", + }, + { + "name": "sellAmount", + "type": "uint128", + "value": "2000000", + }, + ], + }, + }, + { + "operation": 0, + "to": "0x6F400810b62df8E13fded51bE75fF5393eaa841F", + "value": "0", + "data": "0x47e7ef24000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000000001e8480", + "data_decoded": { + "method": "deposit", + "parameters": [ + { + "name": "token", + "type": "address", + "value": "0xdAC17F958D2ee523a2206206994597C13D831ec7", + }, + { + "name": "amount", + "type": "uint256", + "value": "2000000", + }, + ], + }, + }, + ], + } + ], + }, + }, + {"name": "operation", "type": "uint8", "value": "1"}, + {"name": "safeTxGas", "type": "uint256", "value": "775333"}, + {"name": "baseGas", "type": "uint256", "value": "0"}, + {"name": "gasPrice", "type": "uint256", "value": "0"}, + { + "name": "gasToken", + "type": "address", + "value": "0x0000000000000000000000000000000000000000", + }, + { + "name": "refundReceiver", + "type": "address", + "value": "0x0000000000000000000000000000000000000000", + }, + { + "name": "signatures", + "type": "bytes", + "value": "0x5cce27c16c9431409a311e1bfc7fb00cf28f223f309af6917bea47a1f787cb84117521c8dd216993ab576ddbf2850a65ed434577ae9153c666d96e9138ddcc901c000000000000000000000000ae5fb390e5c4fa1962e39e98dbfb0ed8055ed7a9000000000000000000000000000000000000000000000000000000000000000001", + }, + ], +} + +# Detected on Sepolia production +# eth_abi.exceptions.InsufficientDataBytes: Tried to read 640 bytes, only got 636 bytes +insufficient_data_bytes_mock = HexBytes( + "468721a700000000000000000000000038869bf66a61cf6bdb996a6ae40d5853fd43b52600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000002648d80ff0a0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000021600131a5486c00006dcbe91ebcc56572593901a9dc90000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000479ba509700c43df9444bd5aad89bdf3d4e8ea4d546e0444f670000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000479ba509700c7218327a819abd4ea01ba63811349f550a5617d0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000479ba50970058a002d589b385d11ccad95e72fd3bcd22f954b60000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000479ba509700c49149b49354d8dece2d81716d3221eae09d18060000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000479ba50970048590d4406c2c7432a018d80b992871a8009eefc0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000479ba509700000000000000000000000000000000000000000000000000000000000000000000" +) diff --git a/safe_transaction_service/contracts/tests/services/__init__.py b/safe_transaction_service/contracts/tests/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/safe_transaction_service/contracts/tests/services/test_contract_metadata_service.py b/safe_transaction_service/contracts/tests/services/test_contract_metadata_service.py new file mode 100644 index 00000000..c60b367c --- /dev/null +++ b/safe_transaction_service/contracts/tests/services/test_contract_metadata_service.py @@ -0,0 +1,77 @@ +from unittest import mock +from unittest.mock import MagicMock + +from django.test import TestCase + +from eth_account import Account + +from gnosis.eth.clients import BlockscoutClient, EtherscanClient, SourcifyClient +from gnosis.eth.ethereum_client import EthereumClient, EthereumNetwork + +from ...services.contract_metadata_service import get_contract_metadata_service +from ..mocks.contract_metadata_mocks import ( + blockscout_metadata_mock, + etherscan_metadata_mock, + sourcify_metadata_mock, +) + + +class TestContractAbi(TestCase): + def setUp(self): + super().setUp() + with mock.patch.object( + EthereumClient, "get_network", return_value=EthereumNetwork.GNOSIS + ): + # Setup service using Gnosis chain network so Sourcify, Etherscan and Blockscout clients are available + get_contract_metadata_service.cache_clear() + self.contract_metadata_service = get_contract_metadata_service() + + def tearDown(self): + super().tearDown() + get_contract_metadata_service.cache_clear() + + def test_singleton(self): + self.assertEqual( + get_contract_metadata_service(), self.contract_metadata_service + ) + self.assertEqual( + get_contract_metadata_service(), get_contract_metadata_service() + ) + + @mock.patch.object(EtherscanClient, "get_contract_metadata", autospec=True) + @mock.patch.object(BlockscoutClient, "get_contract_metadata", autospec=True) + @mock.patch.object( + SourcifyClient, "is_chain_supported", autospec=True, return_value=True + ) + @mock.patch.object(SourcifyClient, "get_contract_metadata", autospec=True) + def test_get_contract_metadata( + self, + sourcify_get_contract_metadata_mock: MagicMock, + sourcify_is_chain_supported: MagicMock, + blockscout_get_contract_metadata_mock: MagicMock, + etherscan_get_contract_metadata_mock: MagicMock, + ): + etherscan_get_contract_metadata_mock.return_value = etherscan_metadata_mock + sourcify_get_contract_metadata_mock.return_value = sourcify_metadata_mock + blockscout_get_contract_metadata_mock.return_value = blockscout_metadata_mock + + random_address = Account.create().address + self.assertEqual( + self.contract_metadata_service.get_contract_metadata(random_address), + sourcify_metadata_mock, + ) + sourcify_get_contract_metadata_mock.return_value = None + self.assertEqual( + self.contract_metadata_service.get_contract_metadata(random_address), + etherscan_metadata_mock, + ) + etherscan_get_contract_metadata_mock.side_effect = IOError + self.assertEqual( + self.contract_metadata_service.get_contract_metadata(random_address), + blockscout_metadata_mock, + ) + + blockscout_get_contract_metadata_mock.side_effect = IOError + self.assertIsNone( + self.contract_metadata_service.get_contract_metadata(random_address) + ) diff --git a/safe_transaction_service/contracts/tests/test_commands.py b/safe_transaction_service/contracts/tests/test_commands.py index c6569869..948cd3e0 100644 --- a/safe_transaction_service/contracts/tests/test_commands.py +++ b/safe_transaction_service/contracts/tests/test_commands.py @@ -1,8 +1,14 @@ from io import StringIO +from unittest.mock import MagicMock, patch from django.core.management import call_command from django.test import TestCase +from gnosis.eth import EthereumClient + +from safe_transaction_service.contracts.models import Contract +from safe_transaction_service.contracts.tests.factories import ContractFactory + class TestCommands(TestCase): def test_index_contracts_with_metadata(self): @@ -21,3 +27,68 @@ def test_index_contracts_with_metadata(self): "Calling `reindex_contracts_without_metadata_task` task", buf.getvalue() ) self.assertIn("Processing finished", buf.getvalue()) + + @patch.object(EthereumClient, "get_chain_id", autospec=True, return_value=137) + def test_update_safe_contracts_logo(self, mock_chain_id: MagicMock): + command = "update_safe_contracts_logo" + buf = StringIO() + random_contract = ContractFactory() + previous_random_contract_logo = random_contract.logo.read() + multisend_address = "0x40A2aCCbd92BCA938b02010E17A5b8929b49130D" + multisend_contract = ContractFactory( + address=multisend_address, name="GnosisMultisend" + ) + multisend_contract_logo = multisend_contract.logo.read() + + call_command(command, stdout=buf) + current_multisend_contract = Contract.objects.get(address=multisend_address) + # Previous created contracts logo should be updated + self.assertNotEqual( + current_multisend_contract.logo.read(), multisend_contract_logo + ) + + # Previous created contracts name and display name should keep unchanged + self.assertEqual(multisend_contract.name, current_multisend_contract.name) + self.assertEqual( + multisend_contract.display_name, current_multisend_contract.display_name + ) + + # No safe contract logos should keep unchanged + current_no_safe_contract_logo: bytes = Contract.objects.get( + address=random_contract.address + ).logo.read() + self.assertEqual(current_no_safe_contract_logo, previous_random_contract_logo) + + # Missing safe addresses should be added + self.assertEqual(Contract.objects.count(), 19) + + # Contract name and display name should be correctly generated + safe_l2_130_address = "0x3E5c63644E683549055b9Be8653de26E0B4CD36E" + contract = Contract.objects.get(address=safe_l2_130_address) + self.assertEqual(contract.name, "GnosisSafeL2") + self.assertEqual(contract.display_name, "SafeL2 1.3.0") + self.assertFalse(contract.trusted_for_delegate_call) + + safe_multisend_130_address = "0xA238CBeb142c10Ef7Ad8442C6D1f9E89e07e7761" + contract = Contract.objects.get(address=safe_multisend_130_address) + self.assertEqual(contract.name, "MultiSend") + self.assertEqual(contract.display_name, "Safe: MultiSend 1.3.0") + + # Force to update contract names should update the name and display name of the contract + call_command( + command, + "--force-update-contract-names", + stdout=buf, + ) + contract = Contract.objects.get(address=multisend_address) + self.assertEqual(contract.name, "MultiSendCallOnly") + self.assertEqual(contract.display_name, "Safe: MultiSendCallOnly 1.3.0") + # MultiSendCallOnly should be trusted for delegate calls + self.assertTrue(contract.trusted_for_delegate_call) + + multisend_141_address = "0x9641d764fc13c8B624c04430C7356C1C7C8102e2" + contract = Contract.objects.get(address=multisend_141_address) + self.assertEqual(contract.name, "MultiSendCallOnly") + self.assertEqual(contract.display_name, "Safe: MultiSendCallOnly 1.4.1") + # MultiSendCallOnly should be trusted for delegate calls + self.assertTrue(contract.trusted_for_delegate_call) diff --git a/safe_transaction_service/contracts/tests/test_models.py b/safe_transaction_service/contracts/tests/test_models.py index 71cf0768..6d61e4a5 100644 --- a/safe_transaction_service/contracts/tests/test_models.py +++ b/safe_transaction_service/contracts/tests/test_models.py @@ -1,6 +1,5 @@ import json from unittest import mock -from unittest.mock import MagicMock from django.core.exceptions import ValidationError from django.db import IntegrityError @@ -10,20 +9,15 @@ from eth_account import Account -from gnosis.eth.clients import ( - BlockscoutClient, - ContractMetadata, - EtherscanClient, - Sourcify, -) from gnosis.eth.ethereum_client import EthereumNetwork -from gnosis.eth.tests.clients.mocks import ( - etherscan_source_code_mock, - sourcify_safe_metadata, -) +from gnosis.eth.tests.clients.mocks import sourcify_safe_metadata from ..models import Contract, ContractAbi, validate_abi from .factories import ContractAbiFactory, ContractFactory +from .mocks.contract_metadata_mocks import ( + etherscan_metadata_mock, + sourcify_metadata_mock, +) class TestContractAbi(TestCase): @@ -56,54 +50,39 @@ def test_contract_abi_save(self): class TestContract(TestCase): - @mock.patch.object( - Sourcify, "_do_request", autospec=True, return_value=sourcify_safe_metadata - ) - @mock.patch.object( - EtherscanClient, - "_do_request", - autospec=True, - return_value=etherscan_source_code_mock, - ) - def test_contract_create_from_address( - self, etherscan_request_mock: MagicMock, sourcify_request_mock: MagicMock - ): + def test_contract_create_from_metadata(self): safe_contract_address = "0x6851D6fDFAfD08c0295C392436245E5bc78B0185" - network = EthereumNetwork.MAINNET - contract = Contract.objects.create_from_address( - safe_contract_address, network=network + contract = Contract.objects.create_from_metadata( + safe_contract_address, sourcify_metadata_mock ) - self.assertEqual(contract.name, "GnosisSafe") + self.assertEqual(contract.name, "Sourcify Uxio Contract") self.assertTrue(contract.contract_abi.abi) - self.assertEqual(len(contract.contract_abi.abi_functions()), 31) + self.assertEqual(len(contract.contract_abi.abi_functions()), 2) with self.assertRaises(IntegrityError): with atomic(): - Contract.objects.create_from_address( - safe_contract_address, network=network + Contract.objects.create_from_metadata( + safe_contract_address, sourcify_metadata_mock ) - sourcify_request_mock.return_value = None - - # Use etherscan API with self.assertRaises(IntegrityError): with atomic(): - Contract.objects.create_from_address( - safe_contract_address, network=network + Contract.objects.create_from_metadata( + safe_contract_address, etherscan_metadata_mock ) contract.delete() - contract = Contract.objects.create_from_address( - safe_contract_address, network=network + contract = Contract.objects.create_from_metadata( + safe_contract_address, etherscan_metadata_mock ) - self.assertEqual(contract.name, "GnosisSafe") + self.assertEqual(contract.name, "Etherscan Uxio Contract") self.assertTrue(contract.contract_abi.abi) - self.assertEqual(len(contract.contract_abi.abi_functions()), 31) + self.assertEqual(len(contract.contract_abi.abi_functions()), 2) - etherscan_request_mock.return_value = None + contract_metadata = None new_safe_contract_address = Account.create().address - contract_without_metadata = Contract.objects.create_from_address( - new_safe_contract_address, network=network + contract_without_metadata = Contract.objects.create_from_metadata( + new_safe_contract_address, contract_metadata ) self.assertEqual(contract_without_metadata.name, "") self.assertIsNone(contract_without_metadata.contract_abi) @@ -129,55 +108,7 @@ def test_fix_missing_logos(self): self.assertEqual(Contract.objects.with_logo().count(), 1) self.assertEqual(Contract.objects.without_logo().count(), 0) - @mock.patch.object(EtherscanClient, "get_contract_metadata", autospec=True) - @mock.patch.object( - BlockscoutClient, "get_contract_metadata", autospec=True, side_effect=IOError - ) - @mock.patch.object(Sourcify, "get_contract_metadata", autospec=True) - def test_sync_abi_from_api( - self, - sourcify_get_contract_metadata_mock: MagicMock, - blockscout_client_mock: MagicMock, - etherscan_get_contract_abi_mock: MagicMock, - ): - etherscan_get_contract_abi_mock.return_value = ContractMetadata( - "Etherscan Uxio Contract", - [ - { - "anonymous": False, - "inputs": [ - { - "indexed": False, - "internalType": "address", - "name": "etherscanParam", - "type": "address", - } - ], - "name": "AddedOwner", - "type": "event", - } - ], - False, - ) - sourcify_get_contract_metadata_mock.return_value = ContractMetadata( - "Sourcify Uxio Contract", - [ - { - "anonymous": False, - "inputs": [ - { - "indexed": False, - "internalType": "address", - "name": "sourcifyParam", - "type": "address", - } - ], - "name": "AddedOwner", - "type": "event", - } - ], - False, - ) + def test_update_from_metadata(self): contract_name = "Hello" contract = Contract.objects.create( address="0xaE32496491b53841efb51829d6f886387708F99B", @@ -187,25 +118,21 @@ def test_sync_abi_from_api( network = EthereumNetwork.MAINNET self.assertIsNone(contract.contract_abi) self.assertEqual(ContractAbi.objects.count(), 0) - self.assertTrue(contract.sync_abi_from_api(network=network)) + self.assertTrue(contract.update_from_metadata(sourcify_metadata_mock)) # Remove contract_abi description and sync again to check that's filled contract.contract_abi.description = "" contract.contract_abi.save() - self.assertTrue(contract.sync_abi_from_api(network=network)) + self.assertTrue(contract.update_from_metadata(sourcify_metadata_mock)) self.assertIsNotNone(contract.contract_abi) self.assertEqual(contract.name, contract_name) contract_abi = contract.contract_abi self.assertEqual( contract_abi.description, - sourcify_get_contract_metadata_mock.return_value.name, - ) - self.assertEqual( - contract_abi.abi, sourcify_get_contract_metadata_mock.return_value.abi + sourcify_metadata_mock.name, ) - sourcify_get_contract_metadata_mock.side_effect = ( - IOError # Now etherscan should be used - ) - self.assertTrue(contract.sync_abi_from_api(network=network)) + self.assertEqual(contract_abi.abi, sourcify_metadata_mock.abi) + + self.assertTrue(contract.update_from_metadata(etherscan_metadata_mock)) self.assertEqual(ContractAbi.objects.count(), 2) # A new ABI was inserted self.assertNotEqual( contract.contract_abi, contract_abi @@ -213,11 +140,10 @@ def test_sync_abi_from_api( contract_abi.refresh_from_db() self.assertEqual( contract_abi.description, - sourcify_get_contract_metadata_mock.return_value.name, + sourcify_metadata_mock.name, ) # Description should not change - etherscan_get_contract_abi_mock.side_effect = IOError - self.assertFalse(contract.sync_abi_from_api(network=network)) + self.assertFalse(contract.update_from_metadata(None)) def test_without_metadata(self): ContractFactory(name="aloha", contract_abi=None) diff --git a/safe_transaction_service/contracts/tests/test_tasks.py b/safe_transaction_service/contracts/tests/test_tasks.py index 2da18428..97f7d1e1 100644 --- a/safe_transaction_service/contracts/tests/test_tasks.py +++ b/safe_transaction_service/contracts/tests/test_tasks.py @@ -1,5 +1,6 @@ import datetime from unittest import mock +from unittest.mock import MagicMock from django.test import TestCase from django.utils import timezone @@ -7,12 +8,10 @@ from eth_account import Account from hexbytes import HexBytes -from gnosis.eth.clients import Sourcify -from gnosis.eth.tests.clients.mocks import sourcify_safe_metadata - from safe_transaction_service.history.tests.factories import MultisigTransactionFactory from ..models import Contract +from ..services.contract_metadata_service import ContractMetadataService from ..tasks import ( ContractAction, create_missing_contracts_with_metadata_task, @@ -20,10 +19,14 @@ create_or_update_contract_with_metadata_task, reindex_contracts_without_metadata_task, ) +from .mocks.contract_metadata_mocks import sourcify_metadata_mock class TestTasks(TestCase): - def test_contract_tasks(self): + @mock.patch.object( + ContractMetadataService, "get_contract_metadata", return_value=None + ) + def test_contract_tasks(self, contract_metadata_service_mock: MagicMock): self.assertEqual(create_missing_contracts_with_metadata_task.delay().result, 0) [ MultisigTransactionFactory( @@ -38,31 +41,26 @@ def test_contract_tasks(self): ) # Contract ABIs were not found self.assertEqual(create_missing_contracts_with_metadata_task.delay().result, 0) - with mock.patch.object( - Sourcify, "_do_request", autospec=True, return_value=sourcify_safe_metadata - ): - multisig_tx = MultisigTransactionFactory( - to=Account.create().address, data=b"12", trusted=True - ) - contract_metadata = Sourcify().get_contract_metadata(multisig_tx.to) - self.assertEqual( - create_missing_contracts_with_metadata_task.delay().result, 1 - ) - self.assertEqual( - Contract.objects.without_metadata().count(), 2 - ) # Previously inserted contracts were not processed - contract = Contract.objects.select_related("contract_abi").get( - address=multisig_tx.to - ) - self.assertEqual(contract.name, contract_metadata.name) - self.assertEqual(contract.contract_abi.abi, contract_metadata.abi) - contract_abi_id = contract.contract_abi_id - - # Reindex all the contracts, they should have the same abi - self.assertEqual(reindex_contracts_without_metadata_task.delay().result, 2) - self.assertEqual( - Contract.objects.filter(contract_abi_id=contract_abi_id).count(), 3 - ) + contract_metadata_service_mock.return_value = sourcify_metadata_mock + multisig_tx = MultisigTransactionFactory( + to=Account.create().address, data=b"12", trusted=True + ) + self.assertEqual(create_missing_contracts_with_metadata_task.delay().result, 1) + self.assertEqual( + Contract.objects.without_metadata().count(), 2 + ) # Previously inserted contracts were not processed + contract = Contract.objects.select_related("contract_abi").get( + address=multisig_tx.to + ) + self.assertEqual(contract.name, sourcify_metadata_mock.name) + self.assertEqual(contract.contract_abi.abi, sourcify_metadata_mock.abi) + contract_abi_id = contract.contract_abi_id + + # Reindex all the contracts, they should have the same abi + self.assertEqual(reindex_contracts_without_metadata_task.delay().result, 2) + self.assertEqual( + Contract.objects.filter(contract_abi_id=contract_abi_id).count(), 3 + ) def test_create_missing_multisend_contracts_with_metadata_task(self): self.assertEqual( @@ -124,28 +122,26 @@ def test_create_missing_multisend_contracts_with_metadata_task(self): create_missing_multisend_contracts_with_metadata_task.delay().result, 0 ) - def test_create_or_update_contract_with_metadata_task(self): - with mock.patch.object( - Sourcify, "_do_request", autospec=True, return_value=sourcify_safe_metadata - ) as sourcify_mock: - random_address = Account.create().address - - self.assertFalse(Contract.objects.filter(address=random_address).exists()) - contract_action = create_or_update_contract_with_metadata_task( - random_address - ) - self.assertEqual(contract_action, ContractAction.CREATED) - self.assertTrue(Contract.objects.filter(address=random_address).exists()) - - # Try with a contract already created - contract_action = create_or_update_contract_with_metadata_task( - random_address - ) - self.assertEqual(contract_action, ContractAction.UPDATED) - self.assertTrue(Contract.objects.filter(address=random_address).exists()) - - sourcify_mock.side_effect = IOError - contract_action = create_or_update_contract_with_metadata_task( - random_address - ) - self.assertEqual(contract_action, ContractAction.NOT_MODIFIED) + @mock.patch.object( + ContractMetadataService, + "get_contract_metadata", + return_value=sourcify_metadata_mock, + ) + def test_create_or_update_contract_with_metadata_task( + self, contract_metadata_service_mock: MagicMock + ): + random_address = Account.create().address + + self.assertFalse(Contract.objects.filter(address=random_address).exists()) + contract_action = create_or_update_contract_with_metadata_task(random_address) + self.assertEqual(contract_action, ContractAction.CREATED) + self.assertTrue(Contract.objects.filter(address=random_address).exists()) + + # Try with a contract already created + contract_action = create_or_update_contract_with_metadata_task(random_address) + self.assertEqual(contract_action, ContractAction.UPDATED) + self.assertTrue(Contract.objects.filter(address=random_address).exists()) + + contract_metadata_service_mock.return_value = None + contract_action = create_or_update_contract_with_metadata_task(random_address) + self.assertEqual(contract_action, ContractAction.NOT_MODIFIED) diff --git a/safe_transaction_service/contracts/tests/test_tx_decoder.py b/safe_transaction_service/contracts/tests/test_tx_decoder.py index 1784744e..f25baf46 100644 --- a/safe_transaction_service/contracts/tests/test_tx_decoder.py +++ b/safe_transaction_service/contracts/tests/test_tx_decoder.py @@ -18,11 +18,17 @@ DbTxDecoder, SafeTxDecoder, TxDecoder, + UnexpectedProblemDecoding, get_db_tx_decoder, get_safe_tx_decoder, get_tx_decoder, is_db_tx_decoder_loaded, ) +from .mocks.tx_decoder_mocks import ( + exec_transaction_data_mock, + exec_transaction_decoded_mock, + insufficient_data_bytes_mock, +) logger = logging.getLogger(__name__) @@ -294,211 +300,19 @@ def test_decode_multisend_not_valid(self): ) def test_decode_safe_exec_transaction(self): - data = HexBytes( - "0x6a761202000000000000000000000000b522a9f781924ed250a11c54105e51840b138add00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000bd4a50000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000066000000000000000000000000000000000000000000000000000000000000004e48d80ff0a0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000048f003d9819210a31b4961b30ef54be2aed79b9c9cd3b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000084c29982380000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000039aa39c021dfbae8fac545936693ac917d5e7563000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc900a0b86991c6218b36c1d19d4a2e9eb0ce3606eb4800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000044095ea7b300000000000000000000000039aa39c021dfbae8fac545936693ac917d5e756300000000000000000000000000000000000000000000000000000000009896800039aa39c021dfbae8fac545936693ac917d5e756300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024a0712d68000000000000000000000000000000000000000000000000000000000098968000f650c3d88d12db855b8bf7d11be6c55a4e07dcc900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024c5ebeaec00000000000000000000000000000000000000000000000000000000001e848000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000044095ea7b30000000000000000000000006f400810b62df8e13fded51be75ff5393eaa841f00000000000000000000000000000000000000000000000000000000001e8480006f400810b62df8e13fded51be75ff5393eaa841f000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a426c3d3940000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000005265c000000000000000000000000000000000000000000000000000000000001e5d7000000000000000000000000000000000000000000000000000000000001e8480006f400810b62df8e13fded51be75ff5393eaa841f0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004447e7ef24000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000000001e848000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000825cce27c16c9431409a311e1bfc7fb00cf28f223f309af6917bea47a1f787cb84117521c8dd216993ab576ddbf2850a65ed434577ae9153c666d96e9138ddcc901c000000000000000000000000ae5fb390e5c4fa1962e39e98dbfb0ed8055ed7a9000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000" - ) + data = exec_transaction_data_mock tx_decoder = get_tx_decoder() self.assertEqual( - tx_decoder.get_data_decoded(data), - { - "method": "execTransaction", - "parameters": [ - { - "name": "to", - "type": "address", - "value": "0xB522a9f781924eD250A11C54105E51840B138AdD", - }, - {"name": "value", "type": "uint256", "value": "0"}, - { - "name": "data", - "type": "bytes", - "value": "0x8d80ff0a0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000048f003d9819210a31b4961b30ef54be2aed79b9c9cd3b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000084c29982380000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000039aa39c021dfbae8fac545936693ac917d5e7563000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc900a0b86991c6218b36c1d19d4a2e9eb0ce3606eb4800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000044095ea7b300000000000000000000000039aa39c021dfbae8fac545936693ac917d5e756300000000000000000000000000000000000000000000000000000000009896800039aa39c021dfbae8fac545936693ac917d5e756300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024a0712d68000000000000000000000000000000000000000000000000000000000098968000f650c3d88d12db855b8bf7d11be6c55a4e07dcc900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024c5ebeaec00000000000000000000000000000000000000000000000000000000001e848000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000044095ea7b30000000000000000000000006f400810b62df8e13fded51be75ff5393eaa841f00000000000000000000000000000000000000000000000000000000001e8480006f400810b62df8e13fded51be75ff5393eaa841f000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a426c3d3940000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000005265c000000000000000000000000000000000000000000000000000000000001e5d7000000000000000000000000000000000000000000000000000000000001e8480006f400810b62df8e13fded51be75ff5393eaa841f0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004447e7ef24000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000000001e84800000000000000000000000000000000000", - "value_decoded": { - "method": "multiSend", - "parameters": [ - { - "name": "transactions", - "type": "bytes", - "value": "0x003d9819210a31b4961b30ef54be2aed79b9c9cd3b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000084c29982380000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000039aa39c021dfbae8fac545936693ac917d5e7563000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc900a0b86991c6218b36c1d19d4a2e9eb0ce3606eb4800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000044095ea7b300000000000000000000000039aa39c021dfbae8fac545936693ac917d5e756300000000000000000000000000000000000000000000000000000000009896800039aa39c021dfbae8fac545936693ac917d5e756300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024a0712d68000000000000000000000000000000000000000000000000000000000098968000f650c3d88d12db855b8bf7d11be6c55a4e07dcc900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024c5ebeaec00000000000000000000000000000000000000000000000000000000001e848000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000044095ea7b30000000000000000000000006f400810b62df8e13fded51be75ff5393eaa841f00000000000000000000000000000000000000000000000000000000001e8480006f400810b62df8e13fded51be75ff5393eaa841f000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a426c3d3940000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000005265c000000000000000000000000000000000000000000000000000000000001e5d7000000000000000000000000000000000000000000000000000000000001e8480006f400810b62df8e13fded51be75ff5393eaa841f0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004447e7ef24000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000000001e8480", - "value_decoded": [ - { - "operation": 0, - "to": "0x3d9819210A31b4961b30EF54bE2aeD79B9c9Cd3B", - "value": "0", - "data": "0xc29982380000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000039aa39c021dfbae8fac545936693ac917d5e7563000000000000000000000000f650c3d88d12db855b8bf7d11be6c55a4e07dcc9", - "data_decoded": { - "method": "enterMarkets", - "parameters": [ - { - "name": "cTokens", - "type": "address[]", - "value": [ - "0x39AA39c021dfbaE8faC545936693aC917d5E7563", - "0xf650C3d88D12dB855b8bf7D11Be6C55A4e07dCC9", - ], - } - ], - }, - }, - { - "operation": 0, - "to": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", - "value": "0", - "data": "0x095ea7b300000000000000000000000039aa39c021dfbae8fac545936693ac917d5e75630000000000000000000000000000000000000000000000000000000000989680", - "data_decoded": { - "method": "approve", - "parameters": [ - { - "name": "spender", - "type": "address", - "value": "0x39AA39c021dfbaE8faC545936693aC917d5E7563", - }, - { - "name": "value", - "type": "uint256", - "value": "10000000", - }, - ], - }, - }, - { - "operation": 0, - "to": "0x39AA39c021dfbaE8faC545936693aC917d5E7563", - "value": "0", - "data": "0xa0712d680000000000000000000000000000000000000000000000000000000000989680", - "data_decoded": { - "method": "mint", - "parameters": [ - { - "name": "mintAmount", - "type": "uint256", - "value": "10000000", - } - ], - }, - }, - { - "operation": 0, - "to": "0xf650C3d88D12dB855b8bf7D11Be6C55A4e07dCC9", - "value": "0", - "data": "0xc5ebeaec00000000000000000000000000000000000000000000000000000000001e8480", - "data_decoded": { - "method": "borrow", - "parameters": [ - { - "name": "borrowAmount", - "type": "uint256", - "value": "2000000", - } - ], - }, - }, - { - "operation": 0, - "to": "0xdAC17F958D2ee523a2206206994597C13D831ec7", - "value": "0", - "data": "0x095ea7b30000000000000000000000006f400810b62df8e13fded51be75ff5393eaa841f00000000000000000000000000000000000000000000000000000000001e8480", - "data_decoded": { - "method": "approve", - "parameters": [ - { - "name": "spender", - "type": "address", - "value": "0x6F400810b62df8E13fded51bE75fF5393eaa841F", - }, - { - "name": "value", - "type": "uint256", - "value": "2000000", - }, - ], - }, - }, - { - "operation": 0, - "to": "0x6F400810b62df8E13fded51bE75fF5393eaa841F", - "value": "0", - "data": "0x26c3d3940000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000005265c000000000000000000000000000000000000000000000000000000000001e5d7000000000000000000000000000000000000000000000000000000000001e8480", - "data_decoded": { - "method": "placeOrder", - "parameters": [ - { - "name": "buyToken", - "type": "uint16", - "value": "4", - }, - { - "name": "sellToken", - "type": "uint16", - "value": "2", - }, - { - "name": "validUntil", - "type": "uint32", - "value": "5400000", - }, - { - "name": "buyAmount", - "type": "uint128", - "value": "1990000", - }, - { - "name": "sellAmount", - "type": "uint128", - "value": "2000000", - }, - ], - }, - }, - { - "operation": 0, - "to": "0x6F400810b62df8E13fded51bE75fF5393eaa841F", - "value": "0", - "data": "0x47e7ef24000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000000001e8480", - "data_decoded": { - "method": "deposit", - "parameters": [ - { - "name": "token", - "type": "address", - "value": "0xdAC17F958D2ee523a2206206994597C13D831ec7", - }, - { - "name": "amount", - "type": "uint256", - "value": "2000000", - }, - ], - }, - }, - ], - } - ], - }, - }, - {"name": "operation", "type": "uint8", "value": "1"}, - {"name": "safeTxGas", "type": "uint256", "value": "775333"}, - {"name": "baseGas", "type": "uint256", "value": "0"}, - {"name": "gasPrice", "type": "uint256", "value": "0"}, - { - "name": "gasToken", - "type": "address", - "value": "0x0000000000000000000000000000000000000000", - }, - { - "name": "refundReceiver", - "type": "address", - "value": "0x0000000000000000000000000000000000000000", - }, - { - "name": "signatures", - "type": "bytes", - "value": "0x5cce27c16c9431409a311e1bfc7fb00cf28f223f309af6917bea47a1f787cb84117521c8dd216993ab576ddbf2850a65ed434577ae9153c666d96e9138ddcc901c000000000000000000000000ae5fb390e5c4fa1962e39e98dbfb0ed8055ed7a9000000000000000000000000000000000000000000000000000000000000000001", - }, - ], - }, + tx_decoder.get_data_decoded(data), exec_transaction_decoded_mock ) + def test_unexpected_problem_decoding(self): + data = insufficient_data_bytes_mock + tx_decoder = get_tx_decoder() + + with self.assertRaises(UnexpectedProblemDecoding): + tx_decoder.decode_transaction(data) + def test_supported_fn_selectors(self): for tx_decoder in (TxDecoder(), get_tx_decoder(), get_safe_tx_decoder()): self.assertIn( diff --git a/safe_transaction_service/contracts/tx_decoder.py b/safe_transaction_service/contracts/tx_decoder.py index 0b7cebc7..2fb9d86f 100644 --- a/safe_transaction_service/contracts/tx_decoder.py +++ b/safe_transaction_service/contracts/tx_decoder.py @@ -2,6 +2,7 @@ import operator from functools import cache, cached_property from logging import getLogger +from threading import Lock from typing import ( Any, Dict, @@ -38,6 +39,7 @@ get_safe_V1_0_0_contract, get_safe_V1_1_1_contract, get_safe_V1_3_0_contract, + get_safe_V1_4_1_contract, get_uniswap_exchange_contract, ) from gnosis.safe.multi_send import MultiSend @@ -117,6 +119,9 @@ class MultisendDecoded(TypedDict): data_decoded: Optional[DataDecoded] +mutex = Lock() + + @cache def get_db_tx_decoder() -> "DbTxDecoder": """ @@ -126,16 +131,19 @@ def get_db_tx_decoder() -> "DbTxDecoder": the ``DbTxDecoder`` multiple times, and depending on the number of Contracts in the database it could take a lot. """ + with mutex: + if is_db_tx_decoder_loaded(): + return get_db_tx_decoder() - def _get_db_tx_decoder() -> "DbTxDecoder": - return DbTxDecoder() + def _get_db_tx_decoder() -> "DbTxDecoder": + return DbTxDecoder() - if running_on_gevent(): - # It's a very intensive CPU task, so to prevent blocking - # http://www.gevent.org/api/gevent.threadpool.html - pool = gevent.get_hub().threadpool - return pool.spawn(_get_db_tx_decoder).get() - return _get_db_tx_decoder() + if running_on_gevent(): + # It's a very intensive CPU task, so to prevent blocking + # http://www.gevent.org/api/gevent.threadpool.html + pool = gevent.get_hub().threadpool + return pool.spawn(_get_db_tx_decoder).get() + return _get_db_tx_decoder() def is_db_tx_decoder_loaded() -> bool: @@ -163,9 +171,9 @@ class SafeTxDecoder: def __init__(self): logger.info("%s: Loading contract ABIs for decoding", self.__class__.__name__) - self.fn_selectors_with_abis: Dict[ - bytes, ABIFunction - ] = self._generate_selectors_with_abis_from_abis(self.get_supported_abis()) + self.fn_selectors_with_abis: Dict[bytes, ABIFunction] = ( + self._generate_selectors_with_abis_from_abis(self.get_supported_abis()) + ) logger.info( "%s: Contract ABIs for decoding were loaded", self.__class__.__name__ ) @@ -334,6 +342,7 @@ def get_supported_abis(self) -> Iterable[ABIFunction]: get_safe_V1_0_0_contract(self.dummy_w3).abi, get_safe_V1_1_1_contract(self.dummy_w3).abi, get_safe_V1_3_0_contract(self.dummy_w3).abi, + get_safe_V1_4_1_contract(self.dummy_w3).abi, ] # Order is important. If signature is the same (e.g. renaming of `baseGas`) last elements in the list diff --git a/safe_transaction_service/events/services/__init__.py b/safe_transaction_service/events/services/__init__.py index e69de29b..bf6f4932 100644 --- a/safe_transaction_service/events/services/__init__.py +++ b/safe_transaction_service/events/services/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa F401 +from .queue_service import MockedQueueService, QueueService diff --git a/safe_transaction_service/events/services/queue_service.py b/safe_transaction_service/events/services/queue_service.py index c53a85a8..6dc8b3fb 100644 --- a/safe_transaction_service/events/services/queue_service.py +++ b/safe_transaction_service/events/services/queue_service.py @@ -1,277 +1,174 @@ import json import logging +from functools import cache from typing import Any, Dict, List, Optional from django.conf import settings import pika.exceptions from pika import BlockingConnection, URLParameters -from pika.adapters.gevent_connection import GeventConnection from pika.channel import Channel from pika.exchange_type import ExchangeType logger = logging.getLogger(__name__) -class QueueServiceProvider: - def __new__(cls): - if not hasattr(cls, "instance"): - if settings.EVENTS_QUEUE_URL: - if settings.EVENTS_QUEUE_ASYNC_CONNECTION: - cls.instance = AsyncQueueService() - else: - cls.instance = SyncQueueService() - else: - # Mock send_event to not configured host us is not mandatory configure a queue for events - cls.instance = MockedQueueService() - logger.warning("MockedQueueService is used") - return cls.instance - - @classmethod - def del_singleton(cls): - if hasattr(cls, "instance"): - del cls.instance - - -class QueueService: +class BrokerConnection: def __init__(self): self.exchange_name: str = settings.EVENTS_QUEUE_EXCHANGE_NAME - self._channel: Channel = None - self._connection: GeventConnection = None - self.unsent_events: List = [] - self._connection_parameters: URLParameters = URLParameters( - settings.EVENTS_QUEUE_URL - ) + self.channel: Optional[Channel] = None + self.connection_parameters = URLParameters(settings.EVENTS_QUEUE_URL) + self.connection: Optional[BlockingConnection] = self.connect() - def send_event( - self, payload: Dict[str, Any], fail_retry: Optional[bool] = True - ) -> bool: + def connect(self) -> Optional[BlockingConnection]: """ - Send an event to rabbitMq exchange + This method connects to RabbitMq using BlockingConnection. - :param payload: Dict with the payload of the event - :param fail_retry: if True the unsent event because any error will be retried. + :return: BlockingConnection """ - if self._channel is None or not self._channel.is_open: - logger.warning("Connection is still not initialized") - if fail_retry: - self.unsent_events.append(payload) - return False - try: - event = json.dumps(payload) - self._channel.basic_publish( - exchange=self.exchange_name, routing_key="", body=event + logger.debug("Opening connection to RabbitMQ") + self.connection = BlockingConnection(self.connection_parameters) + self.channel = self.connection.channel() + self.channel.confirm_delivery() + # Declare exchange + self.channel.exchange_declare( + exchange=self.exchange_name, + exchange_type=ExchangeType.fanout, + durable=True, ) - return True - except pika.exceptions.ConnectionClosedByBroker: - logger.warning("Event can not be sent due to there is no channel opened") - if fail_retry: - self.unsent_events.append(payload) - return False + logger.debug("Opened connection to RabbitMQ") + return self.connection + except pika.exceptions.AMQPError: + logger.error("Cannot open connection to RabbitMQ") + return None - def send_unsent_events(self) -> int: + def publish(self, message: str, retry: Optional[bool] = True) -> bool: """ - If connection is ready send the unsent messages list due connection broken - - :return: number of messages sent + :param message: + :param retry: + :return: `True` if message was published, `False` otherwise """ - sent_events = 0 - if self._channel.is_open and len(self.unsent_events) > 0: - logger.info("Sending %i not sent messages", len(self.unsent_events)) - for unsent_message in list(self.unsent_events): - if self.send_event(unsent_message, fail_retry=False): - self.unsent_events.remove(unsent_message) - sent_events += 1 - else: - break - - return sent_events + try: + self.channel.basic_publish( + exchange=self.exchange_name, routing_key="", body=message + ) + return True + except pika.exceptions.AMQPError: + if retry: + logger.info("The connection has been terminated, trying again.") + # One more chance + self.connect() + return self.publish(message, retry=False) + return False - def remove_unsent_events(self): - self.unsent_events = [] +@cache +def get_queue_service(): + if settings.EVENTS_QUEUE_URL: + return QueueService() + else: + # Mock send_event to not configured host us is not mandatory configure a queue for events + logger.warning("MockedQueueService is used") + return MockedQueueService() -class AsyncQueueService(QueueService): - # Singleton class definition +class QueueService: def __init__(self): - super().__init__() - self.connect() - - def connect(self) -> GeventConnection: - """ - This method connects to RabbitMq. - When the connection is established, the on_connection_open method - will be invoked by pika. - - :return: GeventConnection - """ - return GeventConnection( - self._connection_parameters, - on_open_callback=self.on_connection_open, - on_open_error_callback=self.on_connection_open_error, - on_close_callback=self.on_connection_closed, - ) - - def on_connection_open(self, connection: GeventConnection): - """ - This method is called by pika once the connection to RabbitMQ has - been established. It passes the handle to the connection object. - - :param GeventConnection connection: The connection - """ - - logger.info("Connection opened with %s", self._connection_parameters.host) - self._connection = connection - self.open_channel() + self._connection_pool: List[BrokerConnection] = [] + self._total_connections: int = 0 + self.unsent_events: List = [] - def on_connection_open_error(self, connection: GeventConnection, err: Exception): + def get_connection(self) -> Optional[BrokerConnection]: """ - This method is called by pika if the connection to RabbitMQ - can't be established. Connection object is paased if were necessary - Always retry the reconnection every 5 seconds. - - :param GeventConnection: The connection - :param Exception err: The error + :return: A `BrokerConnection` from the connection pool if there is one available, otherwise + returns a new BrokerConnection """ - logger.error( - "Connection open failed with %s, retrying in 5 seconds: %s", - self._connection_parameters.host, - err, - ) - connection.ioloop.call_later(5, self.connect) + if ( + settings.EVENTS_QUEUE_POOL_CONNECTIONS_LIMIT + and self._total_connections >= settings.EVENTS_QUEUE_POOL_CONNECTIONS_LIMIT + ): + logger.warning( + "Number of active connections reached the pool limit: %d", + self._total_connections, + ) + return None - def on_connection_closed(self, connection: GeventConnection, reason: Exception): - """ - This method is invoked by pika when the connection to RabbitMQ is - closed unexpectedly. Since it is unexpected, we will reconnect to - RabbitMQ if it disconnects. + if self._connection_pool: + broker_connection = self._connection_pool.pop() + else: + broker_connection = BrokerConnection() - :param GeventConnection: The closed connection obj - :param Exception reason: exception representing reason for loss of - connection. - """ - self._channel = None - logger.error( - "Connection closed with %s, reopening in 5 seconds: %s", - self._connection_parameters.host, - reason, - ) - connection.ioloop.call_later(5, self.connect) + self._total_connections += 1 + return broker_connection - def open_channel(self): - """ - This method will open a new channel with RabbitMQ by issuing the - Channel.Open RPC command. When RabbitMQ confirms the channel is open - by sending the Channel.OpenOK RPC reply, the on_channel_open method - will be invoked. + def release_connection(self, broker_connection: Optional[BrokerConnection]): """ - logger.info("Opening a new channel") - self._connection.channel(on_open_callback=self.on_channel_open) + Return the `BrokerConnection` to the pool - def on_channel_open(self, channel: Channel): + :param broker_connection: + :return: """ - This method is invoked by pika when the channel has been opened. - The channel object is passed in so we can make use of it. + self._total_connections -= 1 + # Don't add broken connections to the pool + if broker_connection: + self._connection_pool.insert(0, broker_connection) - :param pika.channel.Channel channel: The channel object + def send_event(self, payload: Dict[str, Any]) -> int: """ - logger.info("Channel with number %i opened", channel.channel_number) - self._channel = channel - self._channel.add_on_close_callback(self.on_channel_closed) - self.setup_exchange() + Publish event using the `BrokerConnection` - def on_channel_closed(self, channel: Channel, reason: Exception): + :param payload: Number of events published """ - Invoked by pika when RabbitMQ unexpectedly closes the channel. - Channels are usually closed if you attempt to do something that - violates the protocol. - In this method we retry to open a new channel with rabbitMQ if the connection is still open. + event = json.dumps(payload) + if not (broker_connection := self.get_connection()): + # No available connections in the pool, store event to send it later + self.unsent_events.append(event) + return 0 - :param Channel channel: The closed channel - :param Exception reason: why the channel was closed - """ - logger.warning("Channel %i was closed: %s", channel.channel_number, reason) - self._channel = None - if self._connection and self._connection.is_open: - # If channel was closed and connection is still active we try to reopen the channel - logger.error( - "Connection is opened retry to open channel in 5 seconds: %s", - self._connection_parameters.host, - reason, - ) - self._connection.ioloop.call_later(5, self.open_channel()) + if broker_connection.publish(event): + logger.debug("Event correctly sent: %s", event) + self.release_connection(broker_connection) + return self.send_unsent_events() + 1 - def setup_exchange(self): - """ - Setup the exchange on RabbitMQ by invoking the Exchange.Declare RPC - command. When it is complete, the on_exchange_declareok method will - be invoked by pika. - """ - logger.info("Declaring exchange %s", self.exchange_name) - - self._channel.exchange_declare( - exchange=self.exchange_name, - exchange_type=ExchangeType.fanout, - durable=True, - callback=self.on_exchange_declareok, - ) - - def on_exchange_declareok(self, _unused_frame): - """Invoked by pika when RabbitMQ has finished the Exchange.Declare RPC - command. - Send unsent messages that cannot be sent as due connection errors. + logger.warning("Unable to send the event due to a connection error") + logger.debug("Adding %s to unsent messages", payload) + self.unsent_events.append(event) + # As the message cannot be sent, we don't want to send the problematic connection back to the pool, only reduce the number of total connections + self.release_connection(None) + return 0 - :param pika.Frame.Method unused_frame: Exchange.DeclareOk response frame + def send_unsent_events(self) -> int: """ + If connection is ready send the unsent messages list - logger.info("Exchange declared: %s", self.exchange_name) - self.send_unsent_events() - - -class SyncQueueService(QueueService): - """ - Synchronous connection with test purpose as we cannot test using gevent connection - """ - - def __init__(self): - super().__init__() - self.connect() - - def connect(self) -> BlockingConnection: + :return: number of messages sent """ - This method connects to RabbitMq using Blockingconnection. - Store in _connection the BlocingConnection object and creates a new channel + if not self.unsent_events: + return 0 - :return: BlockingConnection - """ - try: - self._connection = BlockingConnection(self._connection_parameters) - self._channel = self.open_channel() - self.setup_exchange() - return self._connection - except pika.exceptions.AMQPConnectionError: - logger.error("Cannot open connection, retrying") + if not (broker_connection := self.get_connection()): + # Connection not available in the pool + return 0 - def open_channel(self) -> Channel: - """ - Open a new channel + # Avoid race conditions + unsent_events = self.unsent_events + self.unsent_events = [] - :return: channel opened - """ - return self._connection.channel() + total_sent_events = 0 + logger.info("Sending previously unsent messages: %i", len(unsent_events)) + for unsent_message in unsent_events: + if broker_connection.publish(unsent_message): + total_sent_events += 1 + else: + self.unsent_events.append(unsent_message) - def setup_exchange(self): - """ - Setup the exchange on RabbitMQ by invoking the Exchange.Declare RPC - command. - """ - logger.info("Declaring exchange %s", self.exchange_name) + self.release_connection(broker_connection) + logger.info("Correctly sent messages: %i", total_sent_events) + return total_sent_events - self._channel.exchange_declare( - exchange=self.exchange_name, exchange_type=ExchangeType.fanout, durable=True - ) + def clear_unsent_events(self): + self.unsent_events.clear() class MockedQueueService: diff --git a/safe_transaction_service/events/tasks.py b/safe_transaction_service/events/tasks.py deleted file mode 100644 index 8b2391c8..00000000 --- a/safe_transaction_service/events/tasks.py +++ /dev/null @@ -1,14 +0,0 @@ -from typing import Any, Dict - -from celery import app - -from safe_transaction_service.events.services.queue_service import QueueServiceProvider - - -@app.shared_task() -def send_event_to_queue_task(payload: Dict[str, Any]) -> bool: - if payload: - queue_service = QueueServiceProvider() - return queue_service.send_event(payload) - - return False diff --git a/safe_transaction_service/events/tests/test_queue_service.py b/safe_transaction_service/events/tests/test_queue_service.py index 518d1173..5607c302 100644 --- a/safe_transaction_service/events/tests/test_queue_service.py +++ b/safe_transaction_service/events/tests/test_queue_service.py @@ -6,23 +6,29 @@ from pika.channel import Channel from pika.exceptions import ConnectionClosedByBroker -from safe_transaction_service.events.services.queue_service import QueueServiceProvider +from ..services.queue_service import BrokerConnection, QueueService, get_queue_service class TestQueueService(TestCase): def setUp(self): - self.queue_service = QueueServiceProvider() + broker_connection = BrokerConnection() # Create queue for test self.queue = "test_queue" - self.queue_service._channel.queue_declare(self.queue) - self.queue_service._channel.queue_bind( - self.queue, self.queue_service.exchange_name + + broker_connection.channel.queue_declare(self.queue) + broker_connection.channel.queue_bind( + self.queue, broker_connection.exchange_name ) + # Clean queue to avoid old messages + broker_connection.channel.queue_purge(self.queue) def test_send_unsent_messages(self): - queue_service = QueueServiceProvider() + queue_service = get_queue_service() + # Clean previous pool connections + queue_service._connection_pool = [] messages_to_send = 10 - queue_service.remove_unsent_events() + queue_service.clear_unsent_events() + self.assertEqual(len(queue_service._connection_pool), 0) with mock.patch.object( Channel, "basic_publish", @@ -30,26 +36,82 @@ def test_send_unsent_messages(self): ): for i in range(messages_to_send): payload = f"not sent {i}" - self.assertFalse(queue_service.send_event(payload)) - # Shouldn't add this message to unsent_messages list - self.assertFalse(queue_service.send_event(payload, fail_retry=False)) + queue_service.send_event(payload) - self.assertEquals(len(queue_service.unsent_events), messages_to_send) - self.assertEquals(queue_service.send_unsent_events(), 0) + self.assertEqual(len(queue_service.unsent_events), messages_to_send) + self.assertEqual(queue_service.send_unsent_events(), 0) - # After reconnection should send messages - self.assertEquals(queue_service.send_unsent_events(), messages_to_send) - self.assertEquals(len(queue_service.unsent_events), 0) + # After reconnection should send event and previous messages (10+1) + self.assertEqual(queue_service.send_event("not sent 11"), messages_to_send + 1) + # Everything should be sent by send_event + self.assertEqual(queue_service.send_unsent_events(), 0) + self.assertEqual(len(queue_service.unsent_events), 0) + # Just one connection should be requested + self.assertEqual(len(queue_service._connection_pool), 1) + broker_connection = queue_service.get_connection() + # First event published should be the last 1 + _, _, body = broker_connection.channel.basic_get(self.queue, auto_ack=True) + self.assertEqual(json.loads(body), "not sent 11") + # Check if all unsent_events were sent for i in range(messages_to_send): payload = f"not sent {i}" - _, _, body = queue_service._channel.basic_get(self.queue, auto_ack=True) - self.assertEquals(json.loads(body), payload) + _, _, body = broker_connection.channel.basic_get(self.queue, auto_ack=True) + self.assertEqual(json.loads(body), payload) + + def test_send_with_pool_limit(self): + queue_service = QueueService() + payload = "Pool limit test" + # Unused connection, just to reach the limit + connection_1 = queue_service.get_connection() + self.assertEqual(len(queue_service.unsent_events), 0) + self.assertEqual(queue_service.send_event(payload), 1) + with self.settings(EVENTS_QUEUE_POOL_CONNECTIONS_LIMIT=1): + self.assertEqual(queue_service._total_connections, 1) + self.assertEqual(len(queue_service.unsent_events), 0) + self.assertEqual(queue_service.send_event(payload), 0) + self.assertEqual(len(queue_service.unsent_events), 1) + queue_service.release_connection(connection_1) + self.assertEqual(len(queue_service.unsent_events), 1) + self.assertEqual(queue_service.send_event(payload), 2) + self.assertEqual(len(queue_service.unsent_events), 0) def test_send_event_to_queue(self): payload = {"event": "test_event", "type": "event type"} - - self.assertTrue(self.queue_service.send_event(payload)) - + queue_service = QueueService() + # Clean previous connection pool + queue_service._connection_pool = [] + self.assertEqual(len(queue_service._connection_pool), 0) + queue_service.send_event(payload) + self.assertEqual(len(queue_service._connection_pool), 1) + broker_connection = queue_service.get_connection() # Check if message was written to the queue - _, _, body = self.queue_service._channel.basic_get(self.queue, auto_ack=True) - self.assertEquals(json.loads(body), payload) + _, _, body = broker_connection.channel.basic_get(self.queue, auto_ack=True) + self.assertEqual(json.loads(body), payload) + + def test_get_connection(self): + queue_service = QueueService() + # Clean previous connection pool + queue_service._connection_pool = [] + self.assertEqual(len(queue_service._connection_pool), 0) + self.assertEqual(queue_service._total_connections, 0) + connection_1 = queue_service.get_connection() + self.assertEqual(len(queue_service._connection_pool), 0) + self.assertEqual(queue_service._total_connections, 1) + connection_2 = queue_service.get_connection() + self.assertEqual(len(queue_service._connection_pool), 0) + self.assertEqual(queue_service._total_connections, 2) + queue_service.release_connection(connection_1) + self.assertEqual(len(queue_service._connection_pool), 1) + self.assertEqual(queue_service._total_connections, 1) + queue_service.release_connection(connection_2) + self.assertEqual(len(queue_service._connection_pool), 2) + self.assertEqual(queue_service._total_connections, 0) + with self.settings(EVENTS_QUEUE_POOL_CONNECTIONS_LIMIT=1): + connection_1 = queue_service.get_connection() + self.assertEqual(len(queue_service._connection_pool), 1) + self.assertEqual(queue_service._total_connections, 1) + # We should reach the connection limit of the pool + connection_1 = queue_service.get_connection() + self.assertEqual(len(queue_service._connection_pool), 1) + self.assertEqual(queue_service._total_connections, 1) + self.assertIsNone(connection_1) diff --git a/safe_transaction_service/events/tests/test_tasks.py b/safe_transaction_service/events/tests/test_tasks.py deleted file mode 100644 index 639b7d80..00000000 --- a/safe_transaction_service/events/tests/test_tasks.py +++ /dev/null @@ -1,38 +0,0 @@ -import json -from unittest import mock - -from django.test import TestCase - -from pika.channel import Channel -from pika.exceptions import ConnectionClosedByBroker - -from safe_transaction_service.events.tasks import send_event_to_queue_task -from safe_transaction_service.events.tests.test_queue_service import TestQueueService - - -class TestTasks(TestQueueService, TestCase): - def test_send_event_to_queue_task(self): - self.assertFalse(send_event_to_queue_task(None)) - payload = {"event": "test_event_task", "type": "event task type"} - with mock.patch.object( - Channel, "basic_publish", return_value=None - ) as mock_publish: - self.assertTrue(send_event_to_queue_task(payload)) - mock_publish.assert_called_once_with( - exchange=self.queue_service.exchange_name, - routing_key="", - body=json.dumps(payload), - properties=None, - mandatory=False, - ) - - self.assertTrue(send_event_to_queue_task(payload)) - _, _, body = self.queue_service._channel.basic_get(self.queue, auto_ack=True) - self.assertEquals(json.loads(body), payload) - - with mock.patch.object( - Channel, - "basic_publish", - side_effect=ConnectionClosedByBroker(320, "Connection closed"), - ): - self.assertFalse(send_event_to_queue_task(payload)) diff --git a/safe_transaction_service/history/admin.py b/safe_transaction_service/history/admin.py index eb76cd58..43e01e1e 100644 --- a/safe_transaction_service/history/admin.py +++ b/safe_transaction_service/history/admin.py @@ -11,7 +11,7 @@ from rest_framework.authtoken.admin import TokenAdmin from gnosis.eth import EthereumClientProvider -from gnosis.eth.django.admin import BinarySearchAdmin +from gnosis.eth.django.admin import AdvancedAdminSearchMixin from gnosis.safe import SafeTx from .models import ( @@ -90,6 +90,9 @@ class SafeContractDelegateInline(admin.TabularInline): # Admin models ------------------------------ @admin.register(IndexingStatus) class IndexingStatusAdmin(admin.ModelAdmin): + class Meta: + verbose_name_plural = "Indexing Status" + list_display = ( "indexing_type", "block_number", @@ -107,7 +110,7 @@ class ChainAdmin(admin.ModelAdmin): @admin.register(EthereumBlock) -class EthereumBlockAdmin(admin.ModelAdmin): +class EthereumBlockAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): date_hierarchy = "timestamp" inlines = (EthereumTxInline,) list_display = ( @@ -120,13 +123,13 @@ class EthereumBlockAdmin(admin.ModelAdmin): ) list_filter = ("confirmed",) search_fields = [ - "number", - "=block_hash", + "==number", + "==block_hash", ] ordering = ["-number"] -class TokenTransferAdmin(BinarySearchAdmin): +class TokenTransferAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): date_hierarchy = "timestamp" list_display = ( "timestamp", @@ -140,7 +143,7 @@ class TokenTransferAdmin(BinarySearchAdmin): ) list_select_related = ("ethereum_tx",) ordering = ["-timestamp"] - search_fields = ["=_from", "=to", "=address", "=ethereum_tx__tx_hash"] + search_fields = ["==_from", "==to", "==address", "==ethereum_tx__tx_hash"] raw_id_fields = ("ethereum_tx",) @@ -169,7 +172,7 @@ def to_erc20(self, request, queryset): @admin.register(EthereumTx) -class EthereumTxAdmin(BinarySearchAdmin): +class EthereumTxAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): inlines = ( ERC20TransferInline, ERC721TransferInline, @@ -179,13 +182,13 @@ class EthereumTxAdmin(BinarySearchAdmin): ) list_display = ("block_id", "tx_hash", "nonce", "_from", "to") list_filter = ("status", "type") - search_fields = ["=tx_hash", "=_from", "=to"] + search_fields = ["==block_id", "==tx_hash", "==_from", "==to"] ordering = ["-block_id"] raw_id_fields = ("block",) @admin.register(InternalTx) -class InternalTxAdmin(BinarySearchAdmin): +class InternalTxAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): date_hierarchy = "timestamp" inlines = (InternalTxDecodedInline,) list_display = ( @@ -207,16 +210,16 @@ class InternalTxAdmin(BinarySearchAdmin): ] raw_id_fields = ("ethereum_tx",) search_fields = [ - "block_number", - "=_from", - "=to", - "=ethereum_tx__tx_hash", - "=contract_address", + "==block_number", + "==_from", + "==to", + "==ethereum_tx__tx_hash", + "==contract_address", ] class InternalTxDecodedOfficialListFilter(admin.SimpleListFilter): - title = "Gnosis official Safes" + title = "Official Safes" parameter_name = "official_safes" def lookups(self, request, model_admin): @@ -237,7 +240,7 @@ def queryset(self, request, queryset): @admin.register(InternalTxDecoded) -class InternalTxDecodedAdmin(BinarySearchAdmin): +class InternalTxDecodedAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): actions = ["process_again"] list_display = ( "block_number", @@ -257,11 +260,11 @@ class InternalTxDecodedAdmin(BinarySearchAdmin): ] raw_id_fields = ("internal_tx",) search_fields = [ - "=function_name", - "=internal_tx__to", - "=internal_tx___from", - "=internal_tx__ethereum_tx__tx_hash", - "=internal_tx__block_number", + "==function_name", + "==internal_tx__to", + "==internal_tx___from", + "==internal_tx__ethereum_tx__tx_hash", + "==internal_tx__block_number", ] @admin.action(description="Process internal tx again") @@ -287,7 +290,7 @@ def queryset(self, request, queryset): @admin.register(MultisigConfirmation) -class MultisigConfirmationAdmin(BinarySearchAdmin): +class MultisigConfirmationAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): list_display = ( "block_number", "multisig_transaction_hash", @@ -301,10 +304,10 @@ class MultisigConfirmationAdmin(BinarySearchAdmin): ordering = ["-created"] raw_id_fields = ("ethereum_tx", "multisig_transaction") search_fields = [ - "=multisig_transaction__safe", - "=ethereum_tx__tx_hash", - "=multisig_transaction_hash", - "=owner", + "==multisig_transaction__safe", + "==ethereum_tx__tx_hash", + "==multisig_transaction_hash", + "==owner", ] @admin.display() @@ -356,7 +359,7 @@ class MultisigTransactionAdminForm(forms.ModelForm): @admin.register(MultisigTransaction) -class MultisigTransactionAdmin(BinarySearchAdmin): +class MultisigTransactionAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): date_hierarchy = "created" form = MultisigTransactionAdminForm inlines = (MultisigConfirmationInline,) @@ -382,7 +385,7 @@ class MultisigTransactionAdmin(BinarySearchAdmin): ordering = ["-created"] raw_id_fields = ("ethereum_tx",) readonly_fields = ("safe_tx_hash",) - search_fields = ["=ethereum_tx__tx_hash", "=safe", "=to", "=safe_tx_hash"] + search_fields = ["==ethereum_tx__tx_hash", "==safe", "==to", "==safe_tx_hash"] @admin.display(boolean=True) def executed(self, obj: MultisigTransaction): @@ -423,7 +426,7 @@ def save_model( @admin.register(ModuleTransaction) -class ModuleTransactionAdmin(BinarySearchAdmin): +class ModuleTransactionAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): date_hierarchy = "created" list_display = ( "created", @@ -440,7 +443,7 @@ class ModuleTransactionAdmin(BinarySearchAdmin): list_select_related = ("internal_tx",) ordering = ["-created"] raw_id_fields = ("internal_tx",) - search_fields = ["=safe", "=module", "=to"] + search_fields = ["==safe", "==module", "==to"] def data_hex(self, o: ModuleTransaction): return HexBytes(o.data.tobytes()).hex() if o.data else None @@ -449,11 +452,10 @@ def tx_hash(self, o: ModuleTransaction): return o.internal_tx.ethereum_tx_id -class MonitoredAddressAdmin(BinarySearchAdmin): +class MonitoredAddressAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): actions = ["reindex", "reindex_last_day", "reindex_last_week", "reindex_last_month"] list_display = ("address", "initial_block_number", "tx_block_number") - readonly_fields = ["initial_block_number"] - search_fields = ["=address"] + search_fields = ["==address"] @admin.action(description="Reindex from initial block") def reindex(self, request, queryset): @@ -526,7 +528,7 @@ def queryset(self, request, queryset): @admin.register(SafeContract) -class SafeContractAdmin(BinarySearchAdmin): +class SafeContractAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): inlines = (SafeContractDelegateInline,) list_display = ( "created_block_number", @@ -537,16 +539,20 @@ class SafeContractAdmin(BinarySearchAdmin): list_select_related = ("ethereum_tx",) ordering = ["-ethereum_tx__block_id"] raw_id_fields = ("ethereum_tx",) - search_fields = ["=address", "=ethereum_tx__tx_hash"] + search_fields = [ + "==address", + "==ethereum_tx__tx_hash", + "==ethereum_tx__block_id", + ] @admin.register(SafeContractDelegate) -class SafeContractDelegateAdmin(BinarySearchAdmin): +class SafeContractDelegateAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): list_display = ("safe_contract", "read", "write", "delegate", "delegator") list_filter = ("read", "write") ordering = ["safe_contract_id"] raw_id_fields = ("safe_contract",) - search_fields = ["=safe_contract__address", "=delegate", "=delegator"] + search_fields = ["==safe_contract__address", "==delegate", "==delegator"] class SafeStatusModulesListFilter(admin.SimpleListFilter): @@ -568,7 +574,7 @@ def queryset(self, request, queryset): @admin.register(SafeLastStatus) -class SafeLastStatusAdmin(BinarySearchAdmin): +class SafeLastStatusAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): actions = ["remove_and_index"] fields = ( "internal_tx", @@ -607,9 +613,9 @@ class SafeLastStatusAdmin(BinarySearchAdmin): ordering = ["-internal_tx__ethereum_tx__block_id", "-internal_tx_id"] raw_id_fields = ("internal_tx",) search_fields = [ - "=address", + "==address", "owners__icontains", - "=internal_tx__ethereum_tx__tx_hash", + "==internal_tx__ethereum_tx__tx_hash", "enabled_modules__icontains", ] @@ -634,7 +640,7 @@ class SafeStatusAdmin(SafeLastStatusAdmin): @admin.register(WebHook) -class WebHookAdmin(BinarySearchAdmin): +class WebHookAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): list_display = ( "pk", "url", @@ -658,4 +664,4 @@ class WebHookAdmin(BinarySearchAdmin): "new_outgoing_transaction", ) ordering = ["-pk"] - search_fields = ["=address", "url"] + search_fields = ["==address", "==url"] diff --git a/safe_transaction_service/history/apps.py b/safe_transaction_service/history/apps.py index cd00efb0..73d67256 100644 --- a/safe_transaction_service/history/apps.py +++ b/safe_transaction_service/history/apps.py @@ -1,5 +1,3 @@ -import sys - from django.apps import AppConfig @@ -9,14 +7,3 @@ class HistoryConfig(AppConfig): def ready(self): from . import signals # noqa - - for argument in sys.argv: - if "gunicorn" in argument: # pragma: no cover - # Just run this on production - # TODO Find a better way - from safe_transaction_service.contracts.tx_decoder import ( - get_db_tx_decoder, - ) - - get_db_tx_decoder() # Build tx decoder cache - break diff --git a/safe_transaction_service/history/clients/__init__.py b/safe_transaction_service/history/clients/__init__.py deleted file mode 100644 index 71dc2fee..00000000 --- a/safe_transaction_service/history/clients/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# flake8: noqa F401 -from .ens_client import EnsClient diff --git a/safe_transaction_service/history/clients/ens_client.py b/safe_transaction_service/history/clients/ens_client.py deleted file mode 100644 index f6a97a08..00000000 --- a/safe_transaction_service/history/clients/ens_client.py +++ /dev/null @@ -1,148 +0,0 @@ -from functools import lru_cache -from typing import Any, Dict, List, Optional, Union - -import requests -from cache_memoize import cache_memoize -from hexbytes import HexBytes - - -class EnsClient: - def __init__(self, network_id: int): - base_url = "https://api.thegraph.com/subgraphs/name/ensdomains/" - if network_id == 3: # Ropsten - url = base_url + "ensropsten" - elif network_id == 4: # Rinkeby - url = base_url + "ensrinkeby" - elif network_id == 5: # Goerli - url = base_url + "ensgoerli" - else: # Fallback to mainnet - url = base_url + "ens" - self.url: str = url - self.request_timeout = 5 # Seconds - self.request_session = requests.Session() - - def is_available(self): - """ - :return: True if service is available, False if it's down - """ - try: - return self.request_session.get(self.url, timeout=self.request_timeout).ok - except IOError: - return False - - @staticmethod - def domain_hash_to_hex_str(domain_hash: Union[str, bytes, int]) -> str: - """ - :param domain_hash: - :return: Domain hash as an hex string of 66 chars (counting with 0x), padding with zeros if needed - """ - if not domain_hash: - domain_hash = b"" - return "0x" + HexBytes(domain_hash).hex()[2:].rjust(64, "0") - - @lru_cache - @cache_memoize(60 * 60 * 24, prefix="ens-_query_by_domain_hash") # 1 day - def _query_by_domain_hash(self, domain_hash_str: str) -> Optional[str]: - query = """ - { - domains(where: {labelhash: "domain_hash"}) { - labelName - } - } - """.replace( - "domain_hash", domain_hash_str - ) - try: - r = self.request_session.post( - self.url, json={"query": query}, timeout=self.request_timeout - ) - except IOError: - return None - - if not r.ok: - return None - - """ - Example: - { - "data": { - "domains": [ - { - "labelName": "safe-multisig" - } - ] - } - } - """ - data = r.json() - if data: - domains = data.get("data", {}).get("domains") - if domains: - return domains[0].get("labelName") - - def query_by_domain_hash( - self, domain_hash: Union[str, bytes, int] - ) -> Optional[str]: - """ - Get domain label from domain_hash (keccak of domain name without the TLD, don't confuse with namehash) - used for ENS ERC721 token_id. Use another method for caching purposes (use same parameter type) - - :param domain_hash: keccak of domain name without the TLD, don't confuse with namehash. E.g. For - batman.eth it would be just keccak('batman') - :return: domain label if found - """ - domain_hash_str = self.domain_hash_to_hex_str(domain_hash) - return self._query_by_domain_hash(domain_hash_str) - - def query_by_account(self, account: str) -> Optional[List[Dict[str, Any]]]: - """ - :param account: ethereum account to search for ENS registered addresses - :return: None if there's a problem or not found, otherwise example of dictionary returned: - { - "registrations": [ - { - "domain": { - "isMigrated": true, - "labelName": "gilfoyle", - "labelhash": "0xadfd886b420023026d5c0b1be0ffb5f18bb2f37143dff545aeaea0d23a4ba910", - "name": "gilfoyle.eth", - "parent": { - "name": "eth" - } - }, - "expiryDate": "1905460880" - } - ] - } - """ - query = """query getRegistrations { - account(id: "account_id") { - registrations { - expiryDate - domain { - labelName - labelhash - name - isMigrated - parent { - name - } - } - } - } - }""".replace( - "account_id", account.lower() - ) - try: - r = self.request_session.post( - self.url, json={"query": query}, timeout=self.request_timeout - ) - except IOError: - return None - - if not r.ok: - return None - else: - data = r.json() - if data: - return data.get("data", {}).get("account") diff --git a/safe_transaction_service/history/exceptions.py b/safe_transaction_service/history/exceptions.py index abb96dd5..06b88758 100644 --- a/safe_transaction_service/history/exceptions.py +++ b/safe_transaction_service/history/exceptions.py @@ -8,7 +8,6 @@ def custom_exception_handler(exc, context): - if isinstance(exc, NodeConnectionException): response = Response(status=status.HTTP_503_SERVICE_UNAVAILABLE) diff --git a/safe_transaction_service/history/filters.py b/safe_transaction_service/history/filters.py index 11ac899c..92097870 100644 --- a/safe_transaction_service/history/filters.py +++ b/safe_transaction_service/history/filters.py @@ -6,7 +6,6 @@ from gnosis.eth.django.filters import EthereumAddressFilter, Keccak256Filter from gnosis.eth.django.models import ( - EthereumAddressField, EthereumAddressV2Field, Keccak256Field, Uint256Field, @@ -17,7 +16,6 @@ filter_overrides = { Uint256Field: {"filter_class": django_filters.NumberFilter}, Keccak256Field: {"filter_class": Keccak256Filter}, - EthereumAddressField: {"filter_class": EthereumAddressFilter}, EthereumAddressV2Field: {"filter_class": EthereumAddressFilter}, } @@ -103,20 +101,30 @@ class MultisigTransactionFilter(filters.FilterSet): ) transaction_hash = Keccak256Filter(field_name="ethereum_tx_id") - def filter_confirmations(self, queryset, name: str, value: bool): + def __init__(self, data=None, *args, **kwargs): + if data is not None: + data = data.copy() + data.setdefault("trusted", True) + + super().__init__(data, *args, **kwargs) + + def filter_confirmations(self, queryset, _name: str, value: bool): if value: return queryset.with_confirmations() else: return queryset.without_confirmations() - def filter_executed(self, queryset, name: str, value: bool): + def filter_executed(self, queryset, _name: str, value: bool): if value: return queryset.executed() else: return queryset.not_executed() - def filter_trusted(self, queryset, name: str, value: bool): - return queryset.filter(trusted=value) + def filter_trusted(self, queryset, _name: str, value: bool): + if value: + return queryset.trusted() + else: + return queryset class Meta: model = MultisigTransaction diff --git a/safe_transaction_service/history/helpers.py b/safe_transaction_service/history/helpers.py index a1709722..faae0da1 100644 --- a/safe_transaction_service/history/helpers.py +++ b/safe_transaction_service/history/helpers.py @@ -2,14 +2,16 @@ import time from typing import List -from eth_typing import ChecksumAddress +from eth_typing import ChecksumAddress, Hash32, HexStr from eth_utils import keccak +from gnosis.eth.eip712 import eip712_encode_hash + from safe_transaction_service.history.models import TransferDict from safe_transaction_service.tokens.models import Token -class DelegateSignatureHelper: +class TemporarySignatureHelper: @classmethod def calculate_totp( cls, totp_tx: int = 3600, totp_t0: int = 0, previous: bool = False @@ -28,30 +30,99 @@ def calculate_totp( return int((time.time() - totp_t0) // totp_tx) + +class DeleteMultisigTxSignatureHelper(TemporarySignatureHelper): @classmethod def calculate_hash( cls, - address: ChecksumAddress, - eth_sign: bool = False, + safe_address: ChecksumAddress, + safe_tx_hash: HexStr, + chain_id: int, previous_totp: bool = False, - ) -> bytes: + ) -> Hash32: + """ + Builds a EIP712 object and calculates its hash + + :param safe_address: + :param safe_tx_hash: + :param chain_id: + :param previous_totp: + :return: Hash for the EIP712 generated object from the provided parameters + """ totp = cls.calculate_totp(previous=previous_totp) - message = address + str(totp) - if eth_sign: - return keccak( - text="\x19Ethereum Signed Message:\n" + str(len(message)) + message - ) - else: - return keccak(text=message) + payload = { + "types": { + "EIP712Domain": [ + {"name": "name", "type": "string"}, + {"name": "version", "type": "string"}, + {"name": "chainId", "type": "uint256"}, + {"name": "verifyingContract", "type": "address"}, + ], + "DeleteRequest": [ + {"name": "safeTxHash", "type": "bytes32"}, + {"name": "totp", "type": "uint256"}, + ], + }, + "primaryType": "DeleteRequest", + "domain": { + "name": "Safe Transaction Service", + "version": "1.0", + "chainId": chain_id, + "verifyingContract": safe_address, + }, + "message": { + "safeTxHash": safe_tx_hash, + "totp": totp, + }, + } + + return eip712_encode_hash(payload) + + +class DelegateSignatureHelperV2(TemporarySignatureHelper): @classmethod - def calculate_all_possible_hashes(cls, delegate: ChecksumAddress) -> List[bytes]: - return [ - cls.calculate_hash(delegate), - cls.calculate_hash(delegate, eth_sign=True), - cls.calculate_hash(delegate, previous_totp=True), - cls.calculate_hash(delegate, eth_sign=True, previous_totp=True), - ] + def calculate_hash( + cls, + delegate_address: ChecksumAddress, + chain_id: int, + previous_totp: bool = False, + ) -> Hash32: + """ + Builds a EIP712 object and calculates its hash + + :param delegate_address: + :param chain_id: + :param previous_totp: + :return: Hash for the EIP712 generated object from the provided parameters + """ + totp = cls.calculate_totp(previous=previous_totp) + + payload = { + "types": { + "EIP712Domain": [ + {"name": "name", "type": "string"}, + {"name": "version", "type": "string"}, + {"name": "chainId", "type": "uint256"}, + ], + "Delegate": [ + {"name": "delegateAddress", "type": "bytes32"}, + {"name": "totp", "type": "uint256"}, + ], + }, + "primaryType": "Delegate", + "domain": { + "name": "Safe Transaction Service", + "version": "1.0", + "chainId": chain_id, + }, + "message": { + "delegateAddress": delegate_address, + "totp": totp, + }, + } + + return eip712_encode_hash(payload) def is_valid_unique_transfer_id(unique_transfer_id: str) -> bool: @@ -90,3 +161,38 @@ def add_tokens_to_transfers(transfers: TransferDict) -> TransferDict: for transfer in transfers: transfer["token"] = tokens.get(transfer["token_address"]) return transfers + + +# Deprecated --------------------------------------------------------------- + + +class DelegateSignatureHelper(TemporarySignatureHelper): + """ + .. deprecated:: 4.38.0 + Deprecated in favour of DelegateSignatureHelperV2 + """ + + @classmethod + def calculate_hash( + cls, + address: ChecksumAddress, + eth_sign: bool = False, + previous_totp: bool = False, + ) -> bytes: + totp = cls.calculate_totp(previous=previous_totp) + message = address + str(totp) + if eth_sign: + return keccak( + text="\x19Ethereum Signed Message:\n" + str(len(message)) + message + ) + else: + return keccak(text=message) + + @classmethod + def calculate_all_possible_hashes(cls, delegate: ChecksumAddress) -> List[bytes]: + return [ + cls.calculate_hash(delegate), + cls.calculate_hash(delegate, eth_sign=True), + cls.calculate_hash(delegate, previous_totp=True), + cls.calculate_hash(delegate, eth_sign=True, previous_totp=True), + ] diff --git a/safe_transaction_service/history/indexers/abis/gnosis.py b/safe_transaction_service/history/indexers/abis/gnosis.py deleted file mode 100644 index a404bf37..00000000 --- a/safe_transaction_service/history/indexers/abis/gnosis.py +++ /dev/null @@ -1,820 +0,0 @@ -gnosis_safe_l2_v1_3_0_abi = [ - { - "anonymous": False, - "inputs": [ - { - "indexed": False, - "internalType": "address", - "name": "owner", - "type": "address", - } - ], - "name": "AddedOwner", - "type": "event", - }, - { - "anonymous": False, - "inputs": [ - { - "indexed": True, - "internalType": "bytes32", - "name": "approvedHash", - "type": "bytes32", - }, - { - "indexed": True, - "internalType": "address", - "name": "owner", - "type": "address", - }, - ], - "name": "ApproveHash", - "type": "event", - }, - { - "anonymous": False, - "inputs": [ - { - "indexed": False, - "internalType": "address", - "name": "handler", - "type": "address", - } - ], - "name": "ChangedFallbackHandler", - "type": "event", - }, - { - "anonymous": False, - "inputs": [ - { - "indexed": False, - "internalType": "address", - "name": "guard", - "type": "address", - } - ], - "name": "ChangedGuard", - "type": "event", - }, - { - "anonymous": False, - "inputs": [ - { - "indexed": False, - "internalType": "uint256", - "name": "threshold", - "type": "uint256", - } - ], - "name": "ChangedThreshold", - "type": "event", - }, - { - "anonymous": False, - "inputs": [ - { - "indexed": False, - "internalType": "address", - "name": "module", - "type": "address", - } - ], - "name": "DisabledModule", - "type": "event", - }, - { - "anonymous": False, - "inputs": [ - { - "indexed": False, - "internalType": "address", - "name": "module", - "type": "address", - } - ], - "name": "EnabledModule", - "type": "event", - }, - { - "anonymous": False, - "inputs": [ - { - "indexed": False, - "internalType": "bytes32", - "name": "txHash", - "type": "bytes32", - }, - { - "indexed": False, - "internalType": "uint256", - "name": "payment", - "type": "uint256", - }, - ], - "name": "ExecutionFailure", - "type": "event", - }, - { - "anonymous": False, - "inputs": [ - { - "indexed": True, - "internalType": "address", - "name": "module", - "type": "address", - } - ], - "name": "ExecutionFromModuleFailure", - "type": "event", - }, - { - "anonymous": False, - "inputs": [ - { - "indexed": True, - "internalType": "address", - "name": "module", - "type": "address", - } - ], - "name": "ExecutionFromModuleSuccess", - "type": "event", - }, - { - "anonymous": False, - "inputs": [ - { - "indexed": False, - "internalType": "bytes32", - "name": "txHash", - "type": "bytes32", - }, - { - "indexed": False, - "internalType": "uint256", - "name": "payment", - "type": "uint256", - }, - ], - "name": "ExecutionSuccess", - "type": "event", - }, - { - "anonymous": False, - "inputs": [ - { - "indexed": False, - "internalType": "address", - "name": "owner", - "type": "address", - } - ], - "name": "RemovedOwner", - "type": "event", - }, - { - "anonymous": False, - "inputs": [ - { - "indexed": False, - "internalType": "address", - "name": "module", - "type": "address", - }, - { - "indexed": False, - "internalType": "address", - "name": "to", - "type": "address", - }, - { - "indexed": False, - "internalType": "uint256", - "name": "value", - "type": "uint256", - }, - { - "indexed": False, - "internalType": "bytes", - "name": "data", - "type": "bytes", - }, - { - "indexed": False, - "internalType": "enum Enum.Operation", - "name": "operation", - "type": "uint8", - }, - ], - "name": "SafeModuleTransaction", - "type": "event", - }, - { - "anonymous": False, - "inputs": [ - { - "indexed": False, - "internalType": "address", - "name": "to", - "type": "address", - }, - { - "indexed": False, - "internalType": "uint256", - "name": "value", - "type": "uint256", - }, - { - "indexed": False, - "internalType": "bytes", - "name": "data", - "type": "bytes", - }, - { - "indexed": False, - "internalType": "enum Enum.Operation", - "name": "operation", - "type": "uint8", - }, - { - "indexed": False, - "internalType": "uint256", - "name": "safeTxGas", - "type": "uint256", - }, - { - "indexed": False, - "internalType": "uint256", - "name": "baseGas", - "type": "uint256", - }, - { - "indexed": False, - "internalType": "uint256", - "name": "gasPrice", - "type": "uint256", - }, - { - "indexed": False, - "internalType": "address", - "name": "gasToken", - "type": "address", - }, - { - "indexed": False, - "internalType": "address payable", - "name": "refundReceiver", - "type": "address", - }, - { - "indexed": False, - "internalType": "bytes", - "name": "signatures", - "type": "bytes", - }, - { - "indexed": False, - "internalType": "bytes", - "name": "additionalInfo", - "type": "bytes", - }, - ], - "name": "SafeMultiSigTransaction", - "type": "event", - }, - { - "anonymous": False, - "inputs": [ - { - "indexed": True, - "internalType": "address", - "name": "sender", - "type": "address", - }, - { - "indexed": False, - "internalType": "uint256", - "name": "value", - "type": "uint256", - }, - ], - "name": "SafeReceived", - "type": "event", - }, - { - "anonymous": False, - "inputs": [ - { - "indexed": True, - "internalType": "address", - "name": "initiator", - "type": "address", - }, - { - "indexed": False, - "internalType": "address[]", - "name": "owners", - "type": "address[]", - }, - { - "indexed": False, - "internalType": "uint256", - "name": "threshold", - "type": "uint256", - }, - { - "indexed": False, - "internalType": "address", - "name": "initializer", - "type": "address", - }, - { - "indexed": False, - "internalType": "address", - "name": "fallbackHandler", - "type": "address", - }, - ], - "name": "SafeSetup", - "type": "event", - }, - { - "anonymous": False, - "inputs": [ - { - "indexed": True, - "internalType": "bytes32", - "name": "msgHash", - "type": "bytes32", - } - ], - "name": "SignMsg", - "type": "event", - }, - {"stateMutability": "nonpayable", "type": "fallback"}, - { - "inputs": [], - "name": "VERSION", - "outputs": [{"internalType": "string", "name": "", "type": "string"}], - "stateMutability": "view", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address", "name": "owner", "type": "address"}, - {"internalType": "uint256", "name": "_threshold", "type": "uint256"}, - ], - "name": "addOwnerWithThreshold", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [ - {"internalType": "bytes32", "name": "hashToApprove", "type": "bytes32"} - ], - "name": "approveHash", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address", "name": "", "type": "address"}, - {"internalType": "bytes32", "name": "", "type": "bytes32"}, - ], - "name": "approvedHashes", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "stateMutability": "view", - "type": "function", - }, - { - "inputs": [ - {"internalType": "uint256", "name": "_threshold", "type": "uint256"} - ], - "name": "changeThreshold", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [ - {"internalType": "bytes32", "name": "dataHash", "type": "bytes32"}, - {"internalType": "bytes", "name": "data", "type": "bytes"}, - {"internalType": "bytes", "name": "signatures", "type": "bytes"}, - { - "internalType": "uint256", - "name": "requiredSignatures", - "type": "uint256", - }, - ], - "name": "checkNSignatures", - "outputs": [], - "stateMutability": "view", - "type": "function", - }, - { - "inputs": [ - {"internalType": "bytes32", "name": "dataHash", "type": "bytes32"}, - {"internalType": "bytes", "name": "data", "type": "bytes"}, - {"internalType": "bytes", "name": "signatures", "type": "bytes"}, - ], - "name": "checkSignatures", - "outputs": [], - "stateMutability": "view", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address", "name": "prevModule", "type": "address"}, - {"internalType": "address", "name": "module", "type": "address"}, - ], - "name": "disableModule", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [], - "name": "domainSeparator", - "outputs": [{"internalType": "bytes32", "name": "", "type": "bytes32"}], - "stateMutability": "view", - "type": "function", - }, - { - "inputs": [{"internalType": "address", "name": "module", "type": "address"}], - "name": "enableModule", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address", "name": "to", "type": "address"}, - {"internalType": "uint256", "name": "value", "type": "uint256"}, - {"internalType": "bytes", "name": "data", "type": "bytes"}, - { - "internalType": "enum Enum.Operation", - "name": "operation", - "type": "uint8", - }, - {"internalType": "uint256", "name": "safeTxGas", "type": "uint256"}, - {"internalType": "uint256", "name": "baseGas", "type": "uint256"}, - {"internalType": "uint256", "name": "gasPrice", "type": "uint256"}, - {"internalType": "address", "name": "gasToken", "type": "address"}, - {"internalType": "address", "name": "refundReceiver", "type": "address"}, - {"internalType": "uint256", "name": "_nonce", "type": "uint256"}, - ], - "name": "encodeTransactionData", - "outputs": [{"internalType": "bytes", "name": "", "type": "bytes"}], - "stateMutability": "view", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address", "name": "to", "type": "address"}, - {"internalType": "uint256", "name": "value", "type": "uint256"}, - {"internalType": "bytes", "name": "data", "type": "bytes"}, - { - "internalType": "enum Enum.Operation", - "name": "operation", - "type": "uint8", - }, - {"internalType": "uint256", "name": "safeTxGas", "type": "uint256"}, - {"internalType": "uint256", "name": "baseGas", "type": "uint256"}, - {"internalType": "uint256", "name": "gasPrice", "type": "uint256"}, - {"internalType": "address", "name": "gasToken", "type": "address"}, - { - "internalType": "address payable", - "name": "refundReceiver", - "type": "address", - }, - {"internalType": "bytes", "name": "signatures", "type": "bytes"}, - ], - "name": "execTransaction", - "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], - "stateMutability": "payable", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address", "name": "to", "type": "address"}, - {"internalType": "uint256", "name": "value", "type": "uint256"}, - {"internalType": "bytes", "name": "data", "type": "bytes"}, - { - "internalType": "enum Enum.Operation", - "name": "operation", - "type": "uint8", - }, - ], - "name": "execTransactionFromModule", - "outputs": [{"internalType": "bool", "name": "success", "type": "bool"}], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address", "name": "to", "type": "address"}, - {"internalType": "uint256", "name": "value", "type": "uint256"}, - {"internalType": "bytes", "name": "data", "type": "bytes"}, - { - "internalType": "enum Enum.Operation", - "name": "operation", - "type": "uint8", - }, - ], - "name": "execTransactionFromModuleReturnData", - "outputs": [ - {"internalType": "bool", "name": "success", "type": "bool"}, - {"internalType": "bytes", "name": "returnData", "type": "bytes"}, - ], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [], - "name": "getChainId", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "stateMutability": "view", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address", "name": "start", "type": "address"}, - {"internalType": "uint256", "name": "pageSize", "type": "uint256"}, - ], - "name": "getModulesPaginated", - "outputs": [ - {"internalType": "address[]", "name": "array", "type": "address[]"}, - {"internalType": "address", "name": "next", "type": "address"}, - ], - "stateMutability": "view", - "type": "function", - }, - { - "inputs": [], - "name": "getOwners", - "outputs": [{"internalType": "address[]", "name": "", "type": "address[]"}], - "stateMutability": "view", - "type": "function", - }, - { - "inputs": [ - {"internalType": "uint256", "name": "offset", "type": "uint256"}, - {"internalType": "uint256", "name": "length", "type": "uint256"}, - ], - "name": "getStorageAt", - "outputs": [{"internalType": "bytes", "name": "", "type": "bytes"}], - "stateMutability": "view", - "type": "function", - }, - { - "inputs": [], - "name": "getThreshold", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "stateMutability": "view", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address", "name": "to", "type": "address"}, - {"internalType": "uint256", "name": "value", "type": "uint256"}, - {"internalType": "bytes", "name": "data", "type": "bytes"}, - { - "internalType": "enum Enum.Operation", - "name": "operation", - "type": "uint8", - }, - {"internalType": "uint256", "name": "safeTxGas", "type": "uint256"}, - {"internalType": "uint256", "name": "baseGas", "type": "uint256"}, - {"internalType": "uint256", "name": "gasPrice", "type": "uint256"}, - {"internalType": "address", "name": "gasToken", "type": "address"}, - {"internalType": "address", "name": "refundReceiver", "type": "address"}, - {"internalType": "uint256", "name": "_nonce", "type": "uint256"}, - ], - "name": "getTransactionHash", - "outputs": [{"internalType": "bytes32", "name": "", "type": "bytes32"}], - "stateMutability": "view", - "type": "function", - }, - { - "inputs": [{"internalType": "address", "name": "module", "type": "address"}], - "name": "isModuleEnabled", - "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], - "stateMutability": "view", - "type": "function", - }, - { - "inputs": [{"internalType": "address", "name": "owner", "type": "address"}], - "name": "isOwner", - "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], - "stateMutability": "view", - "type": "function", - }, - { - "inputs": [], - "name": "nonce", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "stateMutability": "view", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address", "name": "prevOwner", "type": "address"}, - {"internalType": "address", "name": "owner", "type": "address"}, - {"internalType": "uint256", "name": "_threshold", "type": "uint256"}, - ], - "name": "removeOwner", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address", "name": "to", "type": "address"}, - {"internalType": "uint256", "name": "value", "type": "uint256"}, - {"internalType": "bytes", "name": "data", "type": "bytes"}, - { - "internalType": "enum Enum.Operation", - "name": "operation", - "type": "uint8", - }, - ], - "name": "requiredTxGas", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [{"internalType": "address", "name": "handler", "type": "address"}], - "name": "setFallbackHandler", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [{"internalType": "address", "name": "guard", "type": "address"}], - "name": "setGuard", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address[]", "name": "_owners", "type": "address[]"}, - {"internalType": "uint256", "name": "_threshold", "type": "uint256"}, - {"internalType": "address", "name": "to", "type": "address"}, - {"internalType": "bytes", "name": "data", "type": "bytes"}, - {"internalType": "address", "name": "fallbackHandler", "type": "address"}, - {"internalType": "address", "name": "paymentToken", "type": "address"}, - {"internalType": "uint256", "name": "payment", "type": "uint256"}, - { - "internalType": "address payable", - "name": "paymentReceiver", - "type": "address", - }, - ], - "name": "setup", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [{"internalType": "bytes32", "name": "", "type": "bytes32"}], - "name": "signedMessages", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "stateMutability": "view", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address", "name": "targetContract", "type": "address"}, - {"internalType": "bytes", "name": "calldataPayload", "type": "bytes"}, - ], - "name": "simulateAndRevert", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address", "name": "prevOwner", "type": "address"}, - {"internalType": "address", "name": "oldOwner", "type": "address"}, - {"internalType": "address", "name": "newOwner", "type": "address"}, - ], - "name": "swapOwner", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function", - }, - {"stateMutability": "payable", "type": "receive"}, -] - -proxy_factory_v1_3_0_abi = [ - { - "anonymous": False, - "inputs": [ - { - "indexed": False, - "internalType": "contract GnosisSafeProxy", - "name": "proxy", - "type": "address", - }, - { - "indexed": False, - "internalType": "address", - "name": "singleton", - "type": "address", - }, - ], - "name": "ProxyCreation", - "type": "event", - }, - { - "inputs": [ - {"internalType": "address", "name": "_singleton", "type": "address"}, - {"internalType": "bytes", "name": "initializer", "type": "bytes"}, - {"internalType": "uint256", "name": "saltNonce", "type": "uint256"}, - ], - "name": "calculateCreateProxyWithNonceAddress", - "outputs": [ - { - "internalType": "contract GnosisSafeProxy", - "name": "proxy", - "type": "address", - } - ], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address", "name": "singleton", "type": "address"}, - {"internalType": "bytes", "name": "data", "type": "bytes"}, - ], - "name": "createProxy", - "outputs": [ - { - "internalType": "contract GnosisSafeProxy", - "name": "proxy", - "type": "address", - } - ], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address", "name": "_singleton", "type": "address"}, - {"internalType": "bytes", "name": "initializer", "type": "bytes"}, - {"internalType": "uint256", "name": "saltNonce", "type": "uint256"}, - { - "internalType": "contract IProxyCreationCallback", - "name": "callback", - "type": "address", - }, - ], - "name": "createProxyWithCallback", - "outputs": [ - { - "internalType": "contract GnosisSafeProxy", - "name": "proxy", - "type": "address", - } - ], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [ - {"internalType": "address", "name": "_singleton", "type": "address"}, - {"internalType": "bytes", "name": "initializer", "type": "bytes"}, - {"internalType": "uint256", "name": "saltNonce", "type": "uint256"}, - ], - "name": "createProxyWithNonce", - "outputs": [ - { - "internalType": "contract GnosisSafeProxy", - "name": "proxy", - "type": "address", - } - ], - "stateMutability": "nonpayable", - "type": "function", - }, - { - "inputs": [], - "name": "proxyCreationCode", - "outputs": [{"internalType": "bytes", "name": "", "type": "bytes"}], - "stateMutability": "pure", - "type": "function", - }, - { - "inputs": [], - "name": "proxyRuntimeCode", - "outputs": [{"internalType": "bytes", "name": "", "type": "bytes"}], - "stateMutability": "pure", - "type": "function", - }, -] diff --git a/safe_transaction_service/history/indexers/events_indexer.py b/safe_transaction_service/history/indexers/events_indexer.py index 7e5a2403..0ee02792 100644 --- a/safe_transaction_service/history/indexers/events_indexer.py +++ b/safe_transaction_service/history/indexers/events_indexer.py @@ -6,6 +6,7 @@ from django.conf import settings import gevent +from eth_abi.exceptions import DecodingError from eth_typing import ChecksumAddress from eth_utils import event_abi_to_log_topic from gevent import pool @@ -136,7 +137,7 @@ def _find_elements_using_topics( to_block_number: int, ) -> List[LogReceipt]: """ - It will get Safe events using all the Gnosis Safe topics for filtering. + It will get Safe events using all the Safe topics for filtering. :param addresses: :param from_block_number: @@ -220,9 +221,10 @@ def decode_element(self, log_receipt: LogReceipt) -> Optional[EventData]: """ for event_to_listen in self.events_to_listen[log_receipt["topics"][0].hex()]: # Try to decode using all the existing ABIs + # One topic can have multiple matching ABIs due to `indexed` elements changing how to decode it try: return event_to_listen.process_log(log_receipt) - except LogTopicError: + except (LogTopicError, DecodingError): continue logger.error( diff --git a/safe_transaction_service/history/indexers/internal_tx_indexer.py b/safe_transaction_service/history/indexers/internal_tx_indexer.py index f75f79a6..c47c0e65 100644 --- a/safe_transaction_service/history/indexers/internal_tx_indexer.py +++ b/safe_transaction_service/history/indexers/internal_tx_indexer.py @@ -13,6 +13,7 @@ from safe_transaction_service.contracts.tx_decoder import ( CannotDecode, + UnexpectedProblemDecoding, get_safe_tx_decoder, ) from safe_transaction_service.utils.utils import chunks @@ -231,17 +232,18 @@ def _get_internal_txs_to_decode( .iterator() ): try: - function_name, arguments = self.tx_decoder.decode_transaction( - bytes(internal_tx.data) - ) + data = bytes(internal_tx.data) + function_name, arguments = self.tx_decoder.decode_transaction(data) yield InternalTxDecoded( internal_tx=internal_tx, function_name=function_name, arguments=arguments, processed=False, ) - except CannotDecode: - pass + except CannotDecode as exc: + logger.debug("Cannot decode %s: %s", data.hex(), exc) + except UnexpectedProblemDecoding as exc: + logger.warning("Unexpected problem decoding %s: %s", data.hex(), exc) def trace_transactions( self, tx_hashes: Sequence[HexStr], batch_size: int diff --git a/safe_transaction_service/history/indexers/proxy_factory_indexer.py b/safe_transaction_service/history/indexers/proxy_factory_indexer.py index 3446c34d..d7c1dcbd 100644 --- a/safe_transaction_service/history/indexers/proxy_factory_indexer.py +++ b/safe_transaction_service/history/indexers/proxy_factory_indexer.py @@ -8,8 +8,9 @@ from gnosis.eth import EthereumClient from gnosis.eth.constants import NULL_ADDRESS from gnosis.eth.contracts import ( - get_proxy_factory_contract, get_proxy_factory_V1_1_1_contract, + get_proxy_factory_V1_3_0_contract, + get_proxy_factory_V1_4_1_contract, ) from ..models import ProxyFactory, SafeContract @@ -40,13 +41,22 @@ def del_singleton(cls): class ProxyFactoryIndexer(EventsIndexer): @cached_property def contract_events(self) -> List[ContractEvent]: - old_proxy_factory_contract = get_proxy_factory_V1_1_1_contract( + proxy_factory_v1_1_1_contract = get_proxy_factory_V1_1_1_contract( + self.ethereum_client.w3 + ) + proxy_factory_v1_3_0_contract = get_proxy_factory_V1_3_0_contract( + self.ethereum_client.w3 + ) + proxy_factory_v_1_4_1_contract = get_proxy_factory_V1_4_1_contract( self.ethereum_client.w3 ) - proxy_factory_contract = get_proxy_factory_contract(self.ethereum_client.w3) return [ - old_proxy_factory_contract.events.ProxyCreation(), - proxy_factory_contract.events.ProxyCreation(), + # event ProxyCreation(Proxy proxy) + proxy_factory_v1_1_1_contract.events.ProxyCreation(), + # event ProxyCreation(GnosisSafeProxy proxy, address singleton) + proxy_factory_v1_3_0_contract.events.ProxyCreation(), + # event ProxyCreation(SafeProxy indexed proxy, address singleton) + proxy_factory_v_1_4_1_contract.events.ProxyCreation(), ] @property diff --git a/safe_transaction_service/history/indexers/safe_events_indexer.py b/safe_transaction_service/history/indexers/safe_events_indexer.py index d51731e5..8573814a 100644 --- a/safe_transaction_service/history/indexers/safe_events_indexer.py +++ b/safe_transaction_service/history/indexers/safe_events_indexer.py @@ -5,6 +5,7 @@ from django.db import IntegrityError, transaction from eth_abi import decode as decode_abi +from eth_abi.exceptions import DecodingError from eth_typing import ChecksumAddress from hexbytes import HexBytes from web3.contract.contract import ContractEvent @@ -12,7 +13,13 @@ from gnosis.eth import EthereumClient from gnosis.eth.constants import NULL_ADDRESS -from gnosis.eth.contracts import get_safe_V1_1_1_contract +from gnosis.eth.contracts import ( + get_proxy_factory_V1_3_0_contract, + get_proxy_factory_V1_4_1_contract, + get_safe_V1_1_1_contract, + get_safe_V1_3_0_contract, + get_safe_V1_4_1_contract, +) from ..models import ( EthereumBlock, @@ -22,7 +29,6 @@ InternalTxType, SafeMasterCopy, ) -from .abis.gnosis import gnosis_safe_l2_v1_3_0_abi, proxy_factory_v1_3_0_abi from .events_indexer import EventsIndexer logger = getLogger(__name__) @@ -102,10 +108,12 @@ def contract_events(self) -> List[ContractEvent]: ); event ExecutionFailure( - bytes32 txHash, uint256 payment + bytes32 txHash, + uint256 payment ); event ExecutionSuccess( - bytes32 txHash, uint256 payment + bytes32 txHash, + uint256 payment ); event EnabledModule(address module); @@ -129,46 +137,127 @@ def contract_events(self) -> List[ContractEvent]: # ProxyFactory event ProxyCreation(GnosisSafeProxy proxy, address singleton); - Safe v1.4.0 L2 Events + Safe v1.4.1 L2 Events ------------------ - TODO: Add them on on deployment + event SafeMultiSigTransaction( + address to, + uint256 value, + bytes data, + Enum.Operation operation, + uint256 safeTxGas, + uint256 baseGas, + uint256 gasPrice, + address gasToken, + address payable refundReceiver, + bytes signatures, + // We combine nonce, sender and threshold into one to avoid stack too deep + // Dev note: additionalInfo should not contain `bytes`, as this complicates decoding + bytes additionalInfo + ); + + event SafeModuleTransaction( + address module, + address to, + uint256 value, + bytes data, + Enum.Operation operation, + ); + + event SafeSetup( + address indexed initiator, + address[] owners, + uint256 threshold, + address initializer, + address fallbackHandler + ); + + event ApproveHash( + bytes32 indexed approvedHash, + address indexed owner + ); + + event SignMsg( + bytes32 indexed msgHash + ); + + event ExecutionFailure( + bytes32 indexed txHash, + uint256 payment + ); + + event ExecutionSuccess( + bytes32 indexed txHash, + uint256 payment + ); + + event EnabledModule(address indexed module); + event DisabledModule(address indexed module); + event ExecutionFromModuleSuccess(address indexed module); + event ExecutionFromModuleFailure(address indexed module); + + event AddedOwner(address indexed owner); + event RemovedOwner(address indexed owner); + event ChangedThreshold(uint256 threshold); + + # Incoming Ether + event SafeReceived( + address indexed sender, + uint256 value + ); + + event ChangedFallbackHandler(address indexed handler); + event ChangedGuard(address indexed guard); + + # ProxyFactory + event ProxyCreation(GnosisSafeProxy indexed proxy, address singleton); :return: List of supported `ContractEvent` """ - l2_contract = self.ethereum_client.w3.eth.contract( - abi=gnosis_safe_l2_v1_3_0_abi + proxy_factory_v1_4_1_contract = get_proxy_factory_V1_4_1_contract( + self.ethereum_client.w3 ) - proxy_factory_contract = self.ethereum_client.w3.eth.contract( - abi=proxy_factory_v1_3_0_abi + proxy_factory_v1_3_0_contract = get_proxy_factory_V1_3_0_contract( + self.ethereum_client.w3 ) - old_contract = get_safe_V1_1_1_contract(self.ethereum_client.w3) + safe_l2_v1_4_1_contract = get_safe_V1_4_1_contract(self.ethereum_client.w3) + safe_l2_v1_3_0_contract = get_safe_V1_3_0_contract(self.ethereum_client.w3) + safe_v1_1_1_contract = get_safe_V1_1_1_contract(self.ethereum_client.w3) return [ - l2_contract.events.SafeMultiSigTransaction(), - l2_contract.events.SafeModuleTransaction(), - l2_contract.events.SafeSetup(), - l2_contract.events.ApproveHash(), - l2_contract.events.SignMsg(), - l2_contract.events.ExecutionFailure(), - l2_contract.events.ExecutionSuccess(), + safe_l2_v1_3_0_contract.events.SafeMultiSigTransaction(), + safe_l2_v1_3_0_contract.events.SafeModuleTransaction(), + safe_l2_v1_3_0_contract.events.SafeSetup(), + safe_l2_v1_3_0_contract.events.ApproveHash(), + safe_l2_v1_3_0_contract.events.SignMsg(), + safe_l2_v1_4_1_contract.events.ExecutionFailure(), + safe_l2_v1_3_0_contract.events.ExecutionFailure(), + safe_l2_v1_4_1_contract.events.ExecutionSuccess(), + safe_l2_v1_3_0_contract.events.ExecutionSuccess(), # Modules - l2_contract.events.EnabledModule(), - l2_contract.events.DisabledModule(), - l2_contract.events.ExecutionFromModuleSuccess(), - l2_contract.events.ExecutionFromModuleFailure(), + safe_l2_v1_4_1_contract.events.EnabledModule(), + safe_l2_v1_3_0_contract.events.EnabledModule(), + safe_l2_v1_4_1_contract.events.DisabledModule(), + safe_l2_v1_3_0_contract.events.DisabledModule(), + safe_l2_v1_3_0_contract.events.ExecutionFromModuleSuccess(), + safe_l2_v1_3_0_contract.events.ExecutionFromModuleFailure(), # Owners - l2_contract.events.AddedOwner(), - l2_contract.events.RemovedOwner(), - l2_contract.events.ChangedThreshold(), + safe_l2_v1_4_1_contract.events.AddedOwner(), + safe_l2_v1_3_0_contract.events.AddedOwner(), + safe_l2_v1_4_1_contract.events.RemovedOwner(), + safe_l2_v1_3_0_contract.events.RemovedOwner(), + safe_l2_v1_3_0_contract.events.ChangedThreshold(), # Incoming Ether - l2_contract.events.SafeReceived(), + safe_l2_v1_3_0_contract.events.SafeReceived(), # Changed FallbackHandler - l2_contract.events.ChangedFallbackHandler(), + safe_l2_v1_4_1_contract.events.ChangedFallbackHandler(), + safe_l2_v1_3_0_contract.events.ChangedFallbackHandler(), # Changed Guard - l2_contract.events.ChangedGuard(), + safe_l2_v1_4_1_contract.events.ChangedGuard(), + safe_l2_v1_3_0_contract.events.ChangedGuard(), # Change Master Copy - old_contract.events.ChangedMasterCopy(), + safe_v1_1_1_contract.events.ChangedMasterCopy(), # Proxy creation - proxy_factory_contract.events.ProxyCreation(), + proxy_factory_v1_4_1_contract.events.ProxyCreation(), + proxy_factory_v1_3_0_contract.events.ProxyCreation(), ] @property @@ -207,10 +296,18 @@ def _process_decoded_element( log_index = decoded_element["logIndex"] trace_address = str(log_index) args = dict(decoded_element["args"]) - ethereum_tx_hash = decoded_element["transactionHash"] + ethereum_tx_hash = HexBytes(decoded_element["transactionHash"]) + ethereum_tx_hash_hex = ethereum_tx_hash.hex() ethereum_block = EthereumBlock.objects.values("number", "timestamp").get( txs=ethereum_tx_hash ) + logger.debug( + "[%s] %s - tx-hash=%s - Processing event %s", + safe_address, + event_name, + ethereum_tx_hash_hex, + decoded_element, + ) internal_tx = InternalTx( ethereum_tx_id=ethereum_tx_hash, @@ -276,10 +373,22 @@ def _process_decoded_element( data = HexBytes(args["data"]) args["data"] = data.hex() args["signatures"] = HexBytes(args["signatures"]).hex() - args["nonce"], args["sender"], args["threshold"] = decode_abi( - ["uint256", "address", "uint256"], - internal_tx_decoded.arguments.pop("additionalInfo"), + additional_info = HexBytes( + internal_tx_decoded.arguments.pop("additionalInfo") ) + try: + args["nonce"], args["sender"], args["threshold"] = decode_abi( + ["uint256", "address", "uint256"], + additional_info, + ) + except DecodingError: + logger.error( + "[%s] %s - tx-hash=%s - Cannot decode SafeMultiSigTransaction with additionalInfo=%s", + safe_address, + event_name, + ethereum_tx_hash_hex, + additional_info.hex(), + ) if args["value"] and not data: # Simulate ether transfer child_internal_tx = InternalTx( ethereum_tx_id=ethereum_tx_hash, @@ -357,4 +466,11 @@ def _process_decoded_element( ) return None + logger.debug( + "[%s] %s - tx-hash=%s - Processed event", + safe_address, + event_name, + ethereum_tx_hash_hex, + ) + return internal_tx diff --git a/safe_transaction_service/history/indexers/tx_processor.py b/safe_transaction_service/history/indexers/tx_processor.py index e05b833d..5ad8760e 100644 --- a/safe_transaction_service/history/indexers/tx_processor.py +++ b/safe_transaction_service/history/indexers/tx_processor.py @@ -1,5 +1,9 @@ +""" +Contains classes for processing indexed data and store Safe related models in database +""" + +import logging from abc import ABC, abstractmethod -from logging import getLogger from typing import Dict, Iterator, List, Optional, Sequence, Union from django.db import transaction @@ -12,10 +16,18 @@ from gnosis.eth import EthereumClient, EthereumClientProvider from gnosis.eth.constants import NULL_ADDRESS -from gnosis.eth.contracts import get_safe_V1_0_0_contract, get_safe_V1_3_0_contract +from gnosis.eth.contracts import ( + get_safe_V1_0_0_contract, + get_safe_V1_3_0_contract, + get_safe_V1_4_1_contract, +) from gnosis.safe import SafeTx from gnosis.safe.safe_signature import SafeSignature, SafeSignatureApprovedHash +from safe_transaction_service.account_abstraction.services import ( + AaProcessorService, + get_aa_processor_service, +) from safe_transaction_service.safe_messages import models as safe_message_models from ..models import ( @@ -26,12 +38,13 @@ MultisigConfirmation, MultisigTransaction, SafeContract, + SafeContractDelegate, SafeLastStatus, SafeMasterCopy, SafeStatus, ) -logger = getLogger(__name__) +logger = logging.getLogger(__name__) class TxProcessorException(Exception): @@ -42,6 +55,10 @@ class OwnerCannotBeRemoved(TxProcessorException): pass +class UserOperationFailed(TxProcessorException): + pass + + class SafeTxProcessorProvider: def __new__(cls): if not hasattr(cls, "instance"): @@ -53,7 +70,12 @@ def __new__(cls): if settings.ETHEREUM_TRACING_NODE_URL else None ) - cls.instance = SafeTxProcessor(ethereum_client, ethereum_tracing_client) + + if not ethereum_tracing_client: + logger.warning("Ethereum tracing client was not configured") + cls.instance = SafeTxProcessor( + ethereum_client, ethereum_tracing_client, get_aa_processor_service() + ) return cls.instance @classmethod @@ -87,23 +109,28 @@ def __init__( self, ethereum_client: EthereumClient, ethereum_tracing_client: Optional[EthereumClient], + aa_processor_service: AaProcessorService, ): """ :param ethereum_client: Used for regular RPC calls :param ethereum_tracing_client: Used for RPC calls requiring trace methods. It's required to get previous traces for a given `InternalTx` if not found on database + :param aa_processor_service: Used for detecting and processing 4337 transactions """ # This safe_tx_failure events allow us to detect a failed safe transaction self.ethereum_client = ethereum_client self.ethereum_tracing_client = ethereum_tracing_client + self.aa_processor_service = aa_processor_service dummy_w3 = Web3() self.safe_tx_failure_events = [ get_safe_V1_0_0_contract(dummy_w3).events.ExecutionFailed(), get_safe_V1_3_0_contract(dummy_w3).events.ExecutionFailure(), + get_safe_V1_4_1_contract(dummy_w3).events.ExecutionFailure(), ] self.safe_tx_module_failure_events = [ - get_safe_V1_3_0_contract(dummy_w3).events.ExecutionFromModuleFailure() + get_safe_V1_3_0_contract(dummy_w3).events.ExecutionFromModuleFailure(), + get_safe_V1_4_1_contract(dummy_w3).events.ExecutionFromModuleFailure(), ] self.safe_tx_failure_events_topics = { @@ -131,40 +158,54 @@ def is_failed( ) -> bool: """ Detects failure events on a Safe Multisig Tx + :param ethereum_tx: :param safe_tx_hash: :return: True if a Multisig Transaction is failed, False otherwise """ - # TODO Refactor this function to `Safe` in gnosis-py, it doesn't belong here - safe_tx_hash = HexBytes(safe_tx_hash).hex() + # TODO Refactor this function to `Safe` in safe-eth-py, it doesn't belong here + safe_tx_hash = HexBytes(safe_tx_hash) for log in ethereum_tx.logs: if ( log["topics"] and log["data"] and HexBytes(log["topics"][0]) in self.safe_tx_failure_events_topics - and log["data"][:66] == safe_tx_hash - ): # 66 is the beginning of the event data, the rest is payment - return True + ): + if ( + len(log["topics"]) == 2 + and HexBytes(log["topics"][1]) == safe_tx_hash + ): + # On v1.4.1 safe_tx_hash is indexed, so it will be topic[1] + # event ExecutionFailure(bytes32 indexed txHash, uint256 payment); + return True + elif HexBytes(log["data"])[:32] == safe_tx_hash: + # On v1.3.0 safe_tx_hash was not indexed, it was stored in the first 32 bytes, the rest is payment + # event ExecutionFailure(bytes32 txHash, uint256 payment); + return True return False def is_module_failed( - self, ethereum_tx: EthereumTx, module_address: str, safe_address: str + self, + ethereum_tx: EthereumTx, + module_address: ChecksumAddress, + safe_address: ChecksumAddress, ) -> bool: """ Detects module failure events on a Safe Module Tx + :param ethereum_tx: :param module_address: :param safe_address: :return: True if a Module Transaction is failed, False otherwise """ - # TODO Refactor this function to `Safe` in gnosis-py, it doesn't belong here + # TODO Refactor this function to `Safe` in safe-eth-py, it doesn't belong here for log in ethereum_tx.logs: if ( len(log["topics"]) == 2 and (log["address"] == safe_address if "address" in log else True) and HexBytes(log["topics"][0]) in self.safe_tx_module_failure_topics and HexBytes(log["topics"][1])[-20:] - == HexBytes(module_address) # 20 address size in bytes + == HexBytes(module_address) # 20 bytes is an address size ): return True return False @@ -187,7 +228,7 @@ def get_last_safe_status_for_address( ) or SafeLastStatus.objects.get_or_generate(address) return safe_status except SafeLastStatus.DoesNotExist: - logger.error("SafeLastStatus not found for address=%s", address) + logger.error("[%s] SafeLastStatus not found", address) def is_version_breaking_signatures( self, old_safe_version: str, new_safe_version: str @@ -226,10 +267,10 @@ def swap_owner( contract_address = internal_tx._from if owner not in safe_status.owners: logger.error( - "Error processing trace=%s for contract=%s with tx-hash=%s. Cannot remove owner=%s . " + "[%s] Error processing trace=%s with tx-hash=%s. Cannot remove owner=%s . " "Current owners=%s", - internal_tx.trace_address, contract_address, + internal_tx.trace_address, internal_tx.ethereum_tx_id, owner, safe_status.owners, @@ -240,12 +281,20 @@ def swap_owner( if not new_owner: safe_status.owners.remove(owner) + SafeContractDelegate.objects.remove_delegates_for_owner_in_safe( + safe_status.address, owner + ) else: # Replace owner by new_owner in the same place of the list + old_owners = list(safe_status.owners) safe_status.owners = [ new_owner if current_owner == owner else current_owner for current_owner in safe_status.owners ] + if old_owners != safe_status.owners: + SafeContractDelegate.objects.remove_delegates_for_owner_in_safe( + safe_status.address, owner + ) MultisigConfirmation.objects.remove_unused_confirmations( contract_address, safe_status.nonce, owner ) @@ -307,8 +356,12 @@ def __process_decoded_transaction( :return: True if tx could be processed, False otherwise """ internal_tx = internal_tx_decoded.internal_tx + ethereum_tx = internal_tx.ethereum_tx + contract_address = internal_tx._from + logger.debug( - "Start processing InternalTxDecoded in tx-hash=%s", + "[%s] Start processing InternalTxDecoded in tx-hash=%s", + contract_address, HexBytes(internal_tx_decoded.internal_tx.ethereum_tx_id).hex(), ) @@ -317,19 +370,19 @@ def __process_decoded_transaction( # this kind of functions due to little gas used. Some of this transactions get decoded as they were # valid in old versions of the proxies, like changes to `setup` logger.debug( - "Calling a non existing function, will not process it", + "[%s] Calling a non existing function, will not process it", + contract_address, ) return False function_name = internal_tx_decoded.function_name arguments = internal_tx_decoded.arguments - contract_address = internal_tx._from master_copy = internal_tx.to processed_successfully = True if function_name == "setup" and contract_address != NULL_ADDRESS: # Index new Safes - logger.debug("Processing Safe setup") + logger.debug("[%s] Processing Safe setup", contract_address) owners = arguments["_owners"] threshold = arguments["_threshold"] fallback_handler = arguments.get("fallbackHandler", NULL_ADDRESS) @@ -366,7 +419,7 @@ def __process_decoded_transaction( # Usually this happens from Safes coming from a not supported Master Copy # TODO When archive node is available, build SafeStatus from blockchain status logger.debug( - "Cannot process trace as `SafeLastStatus` is not found for Safe=%s", + "[%s] Cannot process trace as `SafeLastStatus` is not found", contract_address, ) processed_successfully = False @@ -375,7 +428,9 @@ def __process_decoded_transaction( "removeOwner", "removeOwnerWithThreshold", ): - logger.debug("Processing owner/threshold modification") + logger.debug( + "[%s] Processing owner/threshold modification", contract_address + ) safe_last_status.threshold = ( arguments["_threshold"] or safe_last_status.threshold ) # Event doesn't have threshold @@ -386,17 +441,17 @@ def __process_decoded_transaction( self.swap_owner(internal_tx, safe_last_status, owner, None) self.store_new_safe_status(safe_last_status, internal_tx) elif function_name == "swapOwner": - logger.debug("Processing owner swap") + logger.debug("[%s] Processing owner swap", contract_address) old_owner = arguments["oldOwner"] new_owner = arguments["newOwner"] self.swap_owner(internal_tx, safe_last_status, old_owner, new_owner) self.store_new_safe_status(safe_last_status, internal_tx) elif function_name == "changeThreshold": - logger.debug("Processing threshold change") + logger.debug("[%s] Processing threshold change", contract_address) safe_last_status.threshold = arguments["_threshold"] self.store_new_safe_status(safe_last_status, internal_tx) elif function_name == "changeMasterCopy": - logger.debug("Processing master copy change") + logger.debug("[%s] Processing master copy change", contract_address) # TODO Ban address if it doesn't have a valid master copy old_safe_version = self.get_safe_version_from_master_copy( safe_last_status.master_copy @@ -416,7 +471,7 @@ def __process_decoded_transaction( MultisigTransaction.objects.queued(contract_address).delete() self.store_new_safe_status(safe_last_status, internal_tx) elif function_name == "setFallbackHandler": - logger.debug("Setting FallbackHandler") + logger.debug("[%s] Setting FallbackHandler", contract_address) safe_last_status.fallback_handler = arguments["handler"] self.store_new_safe_status(safe_last_status, internal_tx) elif function_name == "setGuard": @@ -424,25 +479,24 @@ def __process_decoded_transaction( arguments["guard"] if arguments["guard"] != NULL_ADDRESS else None ) if safe_last_status.guard: - logger.debug("Setting Guard") + logger.debug("[%s] Setting Guard", contract_address) else: - logger.debug("Unsetting Guard") + logger.debug("[%s] Unsetting Guard", contract_address) self.store_new_safe_status(safe_last_status, internal_tx) elif function_name == "enableModule": - logger.debug("Enabling Module") + logger.debug("[%s] Enabling Module", contract_address) safe_last_status.enabled_modules.append(arguments["module"]) self.store_new_safe_status(safe_last_status, internal_tx) elif function_name == "disableModule": - logger.debug("Disabling Module") + logger.debug("[%s] Disabling Module", contract_address) safe_last_status.enabled_modules.remove(arguments["module"]) self.store_new_safe_status(safe_last_status, internal_tx) elif function_name in { "execTransactionFromModule", "execTransactionFromModuleReturnData", }: - logger.debug("Executing Tx from Module") + logger.debug("[%s] Executing Tx from Module", contract_address) # TODO Add test with previous traces for processing a module transaction - ethereum_tx = internal_tx.ethereum_tx if "module" in arguments: # L2 Safe with event SafeModuleTransaction indexed using events module_address = arguments["module"] @@ -459,7 +513,8 @@ def __process_decoded_transaction( ) if not previous_trace: message = ( - f"Cannot find previous trace for tx-hash={HexBytes(internal_tx.ethereum_tx_id).hex()} " + f"[{contract_address}] Cannot find previous trace for " + f"tx-hash={HexBytes(internal_tx.ethereum_tx_id).hex()} " f"and trace-address={internal_tx.trace_address}" ) logger.warning(message) @@ -487,11 +542,21 @@ def __process_decoded_transaction( "failed": failed, }, ) + # Detect 4337 UserOperations in this transaction + number_detected_user_operations = ( + self.aa_processor_service.process_aa_transaction( + contract_address, ethereum_tx + ) + ) + logger.debug( + "[%s] Detected %d 4337 transaction(s)", + contract_address, + number_detected_user_operations, + ) elif function_name == "approveHash": - logger.debug("Processing hash approval") + logger.debug("[%s] Processing hash approval", contract_address) multisig_transaction_hash = arguments["hashToApprove"] - ethereum_tx = internal_tx.ethereum_tx if "owner" in arguments: # Event approveHash owner = arguments["owner"] else: @@ -504,7 +569,7 @@ def __process_decoded_transaction( ) if not previous_trace: message = ( - f"Cannot find previous trace for tx-hash={HexBytes(internal_tx.ethereum_tx_id).hex()} and " + f"[{contract_address}] Cannot find previous trace for tx-hash={HexBytes(internal_tx.ethereum_tx_id).hex()} and " f"trace-address={internal_tx.trace_address}" ) logger.warning(message) @@ -530,7 +595,7 @@ def __process_decoded_transaction( multisig_confirmation.ethereum_tx = ethereum_tx multisig_confirmation.save(update_fields=["ethereum_tx"]) elif function_name == "execTransaction": - logger.debug("Processing transaction execution") + logger.debug("[%s] Processing transaction execution", contract_address) # Events for L2 Safes store information about nonce nonce = ( arguments["nonce"] @@ -569,8 +634,6 @@ def __process_decoded_transaction( ) safe_tx_hash = safe_tx.safe_tx_hash - ethereum_tx = internal_tx.ethereum_tx - failed = self.is_failed(ethereum_tx, safe_tx_hash) multisig_tx, _ = MultisigTransaction.objects.get_or_create( safe_tx_hash=safe_tx_hash, @@ -635,14 +698,17 @@ def __process_decoded_transaction( safe_last_status.nonce = nonce + 1 self.store_new_safe_status(safe_last_status, internal_tx) elif function_name == "execTransactionFromModule": - logger.debug("Not processing execTransactionFromModule") + logger.debug( + "[%s] Not processing execTransactionFromModule", contract_address + ) # No side effects or nonce increasing, but trace will be set as processed else: processed_successfully = False logger.warning( - "Cannot process InternalTxDecoded function_name=%s and arguments=%s", + "[%s] Cannot process InternalTxDecoded function_name=%s and arguments=%s", + contract_address, function_name, arguments, ) - logger.debug("End processing") + logger.debug("[%s] End processing", contract_address) return processed_successfully diff --git a/safe_transaction_service/history/management/commands/check_chainid_matches.py b/safe_transaction_service/history/management/commands/check_chainid_matches.py index 224bb32c..0d310602 100644 --- a/safe_transaction_service/history/management/commands/check_chainid_matches.py +++ b/safe_transaction_service/history/management/commands/check_chainid_matches.py @@ -1,5 +1,6 @@ from django.core.management.base import BaseCommand, CommandError +from safe_transaction_service.account_abstraction.utils import get_bundler_client from safe_transaction_service.utils.ethereum import get_chain_id from ...models import Chain @@ -16,11 +17,23 @@ def handle(self, *args, **options): except Chain.DoesNotExist: chain = Chain.objects.create(chain_id=chain_id) - if chain.chain_id == chain_id: - self.stdout.write( - self.style.SUCCESS(f"EthereumRPC chainId {chain_id} looks good") - ) - else: + if chain_id != chain.chain_id: raise CommandError( f"EthereumRPC chainId {chain_id} does not match previously used chainId {chain.chain_id}" ) + self.stdout.write( + self.style.SUCCESS(f"EthereumRPC chainId {chain_id} looks good") + ) + + if bundler_client := get_bundler_client(): + bundler_chain_id = bundler_client.get_chain_id() + if bundler_chain_id != chain.chain_id: + raise CommandError( + f"ERC4337 BundlerClient chainId {bundler_chain_id} does not match " + f"EthereumClient chainId {chain.chain_id}" + ) + self.stdout.write( + self.style.SUCCESS( + f"ERC4337 BundlerClient chainId {chain_id} looks good" + ) + ) diff --git a/safe_transaction_service/history/management/commands/check_index_problems.py b/safe_transaction_service/history/management/commands/check_index_problems.py index 33c70510..f54db725 100644 --- a/safe_transaction_service/history/management/commands/check_index_problems.py +++ b/safe_transaction_service/history/management/commands/check_index_problems.py @@ -40,6 +40,12 @@ def add_arguments(self, parser): help="Number of blocks to query each time if reindexing", default=100, ) + parser.add_argument( + "--batch-size", + type=int, + help="Size of batch requests", + default=1000, + ) def get_nonce_fn(self, ethereum_client: EthereumClient): return get_safe_V1_3_0_contract( @@ -51,100 +57,105 @@ def handle(self, *args, **options): reindex = not options["dont_reindex"] force_batch_call = options["force_batch_call"] block_process_limit = options["block_process_limit"] - + batch_size = options["batch_size"] queryset = SafeLastStatus.objects.all() if settings.ETH_L2_NETWORK: # Filter nonce=0 to exclude not initialized or non L2 Safes in a L2 network queryset = queryset.exclude(nonce=0) - count = queryset.count() - batch = 1000 - index_service = IndexServiceProvider() - ethereum_client = index_service.ethereum_client - nonce_fn = self.get_nonce_fn(ethereum_client) - first_issue_block_number = ethereum_client.current_block_number - all_problematic_addresses = set() - - for i in range(0, count, batch): - self.stdout.write(self.style.SUCCESS(f"Processed {i}/{count}")) - safe_statuses = queryset[i : i + batch] - safe_statuses_list = list(safe_statuses) # Force retrieve queryset from DB - - blockchain_nonces = ethereum_client.batch_call_same_function( - nonce_fn, - [safe_status.address for safe_status in safe_statuses_list], - raise_exception=False, - force_batch_call=force_batch_call, - ) + if (count := queryset.count()) > 0: + index_service = IndexServiceProvider() + ethereum_client = index_service.ethereum_client + nonce_fn = self.get_nonce_fn(ethereum_client) + first_issue_block_number = ethereum_client.current_block_number + all_problematic_addresses = set() + + for i in range(0, count, batch_size): + self.stdout.write(self.style.SUCCESS(f"Processed {i}/{count}")) + safe_statuses = queryset[i : i + batch_size] + safe_statuses_list = list( + safe_statuses + ) # Force retrieve queryset from DB + + blockchain_nonces = ethereum_client.batch_call_same_function( + nonce_fn, + [safe_status.address for safe_status in safe_statuses_list], + raise_exception=False, + force_batch_call=force_batch_call, + ) - addresses_to_reindex = set() - for safe_status, blockchain_nonce in zip( - safe_statuses_list, blockchain_nonces - ): - address = safe_status.address - nonce = safe_status.nonce - if safe_status.is_corrupted(): - self.stdout.write( - self.style.WARNING( - f"Safe={address} is corrupted, has some old " - f"transactions missing" + addresses_to_reindex = set() + for safe_status, blockchain_nonce in zip( + safe_statuses_list, blockchain_nonces + ): + address = safe_status.address + nonce = safe_status.nonce + if safe_status.is_corrupted(): + self.stdout.write( + self.style.WARNING( + f"Safe={address} is corrupted, has some old " + f"transactions missing" + ) ) - ) - addresses_to_reindex.add(address) + addresses_to_reindex.add(address) - if blockchain_nonce is None: - self.stdout.write( - self.style.WARNING( - f"Safe={address} looks problematic, " - f"cannot retrieve blockchain-nonce" - ) - ) - elif nonce != blockchain_nonce: - self.stdout.write( - self.style.WARNING( - f"Safe={address} stored nonce={nonce} is " - f"different from blockchain-nonce={blockchain_nonce}" + if blockchain_nonce is None: + self.stdout.write( + self.style.WARNING( + f"Safe={address} looks problematic, " + f"cannot retrieve blockchain-nonce" + ) ) - ) - if last_valid_transaction := MultisigTransaction.objects.last_valid_transaction( - address - ): + elif nonce != blockchain_nonce: self.stdout.write( self.style.WARNING( - f"Last valid transaction for Safe={address} has safe-nonce={last_valid_transaction.nonce} " - f"safe-transaction-hash={last_valid_transaction.safe_tx_hash} and " - f"ethereum-tx-hash={last_valid_transaction.ethereum_tx_id}" + f"Safe={address} stored nonce={nonce} is " + f"different from blockchain-nonce={blockchain_nonce}" ) ) - first_issue_block_number = min( - last_valid_transaction.ethereum_tx.block_id, - first_issue_block_number, + if last_valid_transaction := MultisigTransaction.objects.last_valid_transaction( + address + ): + self.stdout.write( + self.style.WARNING( + f"Last valid transaction for Safe={address} has safe-nonce={last_valid_transaction.nonce} " + f"safe-transaction-hash={last_valid_transaction.safe_tx_hash} and " + f"ethereum-tx-hash={last_valid_transaction.ethereum_tx_id}" + ) + ) + first_issue_block_number = min( + last_valid_transaction.ethereum_tx.block_id, + first_issue_block_number, + ) + addresses_to_reindex.add(address) + + if reindex and addresses_to_reindex: + self.stdout.write( + self.style.SUCCESS( + f"Reindexing from-block-number={first_issue_block_number} Safes={addresses_to_reindex}" ) - addresses_to_reindex.add(address) + ) + index_service.reindex_master_copies( + first_issue_block_number, + block_process_limit=block_process_limit, + addresses=list(addresses_to_reindex), + ) - if reindex and addresses_to_reindex: - self.stdout.write( - self.style.SUCCESS( - f"Reindexing from-block-number={first_issue_block_number} Safes={addresses_to_reindex}" + if fix and addresses_to_reindex: + self.stdout.write( + self.style.SUCCESS(f"Fixing Safes={addresses_to_reindex}") ) - ) - index_service.reindex_master_copies( - first_issue_block_number, - block_process_limit=block_process_limit, - addresses=list(addresses_to_reindex), - ) + index_service.reprocess_addresses(addresses_to_reindex) - if fix and addresses_to_reindex: + all_problematic_addresses |= addresses_to_reindex + + if all_problematic_addresses: self.stdout.write( - self.style.SUCCESS(f"Fixing Safes={addresses_to_reindex}") + self.style.SUCCESS( + f"First issue found on {first_issue_block_number} - Problematic Safes {all_problematic_addresses}" + ) ) - index_service.reprocess_addresses(addresses_to_reindex) - - all_problematic_addresses |= addresses_to_reindex - - if all_problematic_addresses: + else: self.stdout.write( - self.style.SUCCESS( - f"First issue found on {first_issue_block_number} - Problematic Safes {all_problematic_addresses}" - ) + self.style.SUCCESS("Database haven't any address to be checked") ) diff --git a/safe_transaction_service/history/management/commands/export_multisig_tx_data.py b/safe_transaction_service/history/management/commands/export_multisig_tx_data.py index 6f880e21..a89398b8 100644 --- a/safe_transaction_service/history/management/commands/export_multisig_tx_data.py +++ b/safe_transaction_service/history/management/commands/export_multisig_tx_data.py @@ -60,9 +60,13 @@ def handle(self, *args, **options): m.to, str(m.failed), m.origin, - json.dumps(decoder.get_data_decoded(m.data.tobytes())) - if m.data - else "", + ( + json.dumps( + decoder.get_data_decoded(m.data.tobytes()) + ) + if m.data + else "" + ), ] ) + "\n" diff --git a/safe_transaction_service/history/migrations/0001_initial.py b/safe_transaction_service/history/migrations/0001_initial.py index 2b68430a..5705c77e 100644 --- a/safe_transaction_service/history/migrations/0001_initial.py +++ b/safe_transaction_service/history/migrations/0001_initial.py @@ -10,7 +10,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [] diff --git a/safe_transaction_service/history/migrations/0002_auto_20190725_0857.py b/safe_transaction_service/history/migrations/0002_auto_20190725_0857.py index bf8a36c2..24aca248 100644 --- a/safe_transaction_service/history/migrations/0002_auto_20190725_0857.py +++ b/safe_transaction_service/history/migrations/0002_auto_20190725_0857.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0001_initial"), ] diff --git a/safe_transaction_service/history/migrations/0003_auto_20191107_1459.py b/safe_transaction_service/history/migrations/0003_auto_20191107_1459.py index 92f14aeb..f1eb0681 100644 --- a/safe_transaction_service/history/migrations/0003_auto_20191107_1459.py +++ b/safe_transaction_service/history/migrations/0003_auto_20191107_1459.py @@ -12,7 +12,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0002_auto_20190725_0857"), ] diff --git a/safe_transaction_service/history/migrations/0004_auto_20191118_1001.py b/safe_transaction_service/history/migrations/0004_auto_20191118_1001.py index 3e8d5c1b..19ebf083 100644 --- a/safe_transaction_service/history/migrations/0004_auto_20191118_1001.py +++ b/safe_transaction_service/history/migrations/0004_auto_20191118_1001.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0003_auto_20191107_1459"), ] diff --git a/safe_transaction_service/history/migrations/0005_multisigtransaction_failed.py b/safe_transaction_service/history/migrations/0005_multisigtransaction_failed.py index 87588e26..95ad0b45 100644 --- a/safe_transaction_service/history/migrations/0005_multisigtransaction_failed.py +++ b/safe_transaction_service/history/migrations/0005_multisigtransaction_failed.py @@ -22,7 +22,6 @@ def set_failed_for_multisig_txs(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("history", "0004_auto_20191118_1001"), ] diff --git a/safe_transaction_service/history/migrations/0006_auto_20200113_1204.py b/safe_transaction_service/history/migrations/0006_auto_20200113_1204.py index 3512d17d..35f8225e 100644 --- a/safe_transaction_service/history/migrations/0006_auto_20200113_1204.py +++ b/safe_transaction_service/history/migrations/0006_auto_20200113_1204.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0005_multisigtransaction_failed"), ] diff --git a/safe_transaction_service/history/migrations/0007_auto_20200122_1305.py b/safe_transaction_service/history/migrations/0007_auto_20200122_1305.py index ef5ef964..b9e51f2f 100644 --- a/safe_transaction_service/history/migrations/0007_auto_20200122_1305.py +++ b/safe_transaction_service/history/migrations/0007_auto_20200122_1305.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0006_auto_20200113_1204"), ] diff --git a/safe_transaction_service/history/migrations/0008_ethereumtx_logs.py b/safe_transaction_service/history/migrations/0008_ethereumtx_logs.py index 56e05f7e..de3a8a8c 100644 --- a/safe_transaction_service/history/migrations/0008_ethereumtx_logs.py +++ b/safe_transaction_service/history/migrations/0008_ethereumtx_logs.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0007_auto_20200122_1305"), ] diff --git a/safe_transaction_service/history/migrations/0009_multisigtransaction_origin.py b/safe_transaction_service/history/migrations/0009_multisigtransaction_origin.py index 35c0d3cc..086b5884 100644 --- a/safe_transaction_service/history/migrations/0009_multisigtransaction_origin.py +++ b/safe_transaction_service/history/migrations/0009_multisigtransaction_origin.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0008_ethereumtx_logs"), ] diff --git a/safe_transaction_service/history/migrations/0010_auto_20200226_1508.py b/safe_transaction_service/history/migrations/0010_auto_20200226_1508.py index 9caf792a..3308b586 100644 --- a/safe_transaction_service/history/migrations/0010_auto_20200226_1508.py +++ b/safe_transaction_service/history/migrations/0010_auto_20200226_1508.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0009_multisigtransaction_origin"), ] diff --git a/safe_transaction_service/history/migrations/0011_auto_20200303_0956.py b/safe_transaction_service/history/migrations/0011_auto_20200303_0956.py index cf0cd9ed..69f81a5e 100644 --- a/safe_transaction_service/history/migrations/0011_auto_20200303_0956.py +++ b/safe_transaction_service/history/migrations/0011_auto_20200303_0956.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0010_auto_20200226_1508"), ] diff --git a/safe_transaction_service/history/migrations/0012_moduletransaction.py b/safe_transaction_service/history/migrations/0012_moduletransaction.py index efb109ce..6dee05e5 100644 --- a/safe_transaction_service/history/migrations/0012_moduletransaction.py +++ b/safe_transaction_service/history/migrations/0012_moduletransaction.py @@ -10,7 +10,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0011_auto_20200303_0956"), ] diff --git a/safe_transaction_service/history/migrations/0013_multisigtransaction_fix_failed.py b/safe_transaction_service/history/migrations/0013_multisigtransaction_fix_failed.py index 4b3558c2..d39f055f 100644 --- a/safe_transaction_service/history/migrations/0013_multisigtransaction_fix_failed.py +++ b/safe_transaction_service/history/migrations/0013_multisigtransaction_fix_failed.py @@ -24,7 +24,6 @@ def set_failed_for_multisig_txs(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("history", "0012_moduletransaction"), ] diff --git a/safe_transaction_service/history/migrations/0014_auto_20200327_0954.py b/safe_transaction_service/history/migrations/0014_auto_20200327_0954.py index e291588a..ac96e760 100644 --- a/safe_transaction_service/history/migrations/0014_auto_20200327_0954.py +++ b/safe_transaction_service/history/migrations/0014_auto_20200327_0954.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0013_multisigtransaction_fix_failed"), ] diff --git a/safe_transaction_service/history/migrations/0015_auto_20200327_1233.py b/safe_transaction_service/history/migrations/0015_auto_20200327_1233.py index 5f7c4caf..f6bc31bb 100644 --- a/safe_transaction_service/history/migrations/0015_auto_20200327_1233.py +++ b/safe_transaction_service/history/migrations/0015_auto_20200327_1233.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0014_auto_20200327_0954"), ] diff --git a/safe_transaction_service/history/migrations/0016_multisigconfirmation_signature_type.py b/safe_transaction_service/history/migrations/0016_multisigconfirmation_signature_type.py index 989eb544..45b7bead 100644 --- a/safe_transaction_service/history/migrations/0016_multisigconfirmation_signature_type.py +++ b/safe_transaction_service/history/migrations/0016_multisigconfirmation_signature_type.py @@ -53,7 +53,6 @@ def add_signature_type(apps, schema_editor): or multisig_confirmation.signature_type != safe_signature.signature_type.value ): - multisig_confirmation.signature = safe_signature.export_signature() multisig_confirmation.signature_type = ( safe_signature.signature_type.value @@ -64,7 +63,6 @@ def add_signature_type(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("history", "0015_auto_20200327_1233"), ] diff --git a/safe_transaction_service/history/migrations/0017_safecontractdelegate.py b/safe_transaction_service/history/migrations/0017_safecontractdelegate.py index 8b3acdc8..aa28b2bf 100644 --- a/safe_transaction_service/history/migrations/0017_safecontractdelegate.py +++ b/safe_transaction_service/history/migrations/0017_safecontractdelegate.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0016_multisigconfirmation_signature_type"), ] diff --git a/safe_transaction_service/history/migrations/0018_multisigtransaction_trusted.py b/safe_transaction_service/history/migrations/0018_multisigtransaction_trusted.py index 483b164e..446a2645 100644 --- a/safe_transaction_service/history/migrations/0018_multisigtransaction_trusted.py +++ b/safe_transaction_service/history/migrations/0018_multisigtransaction_trusted.py @@ -9,7 +9,6 @@ def set_confirmed_txs_trusted(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("history", "0017_safecontractdelegate"), ] diff --git a/safe_transaction_service/history/migrations/0019_transactions_data_none.py b/safe_transaction_service/history/migrations/0019_transactions_data_none.py index 5f0e5a9e..9531658a 100644 --- a/safe_transaction_service/history/migrations/0019_transactions_data_none.py +++ b/safe_transaction_service/history/migrations/0019_transactions_data_none.py @@ -10,7 +10,6 @@ def set_data_none(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("history", "0018_multisigtransaction_trusted"), ] diff --git a/safe_transaction_service/history/migrations/0020_safemastercopy_version.py b/safe_transaction_service/history/migrations/0020_safemastercopy_version.py index 9c049c63..fe417ec4 100644 --- a/safe_transaction_service/history/migrations/0020_safemastercopy_version.py +++ b/safe_transaction_service/history/migrations/0020_safemastercopy_version.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0019_transactions_data_none"), ] diff --git a/safe_transaction_service/history/migrations/0021_moduletransaction_failed.py b/safe_transaction_service/history/migrations/0021_moduletransaction_failed.py index 16774a41..8e5c7d2d 100644 --- a/safe_transaction_service/history/migrations/0021_moduletransaction_failed.py +++ b/safe_transaction_service/history/migrations/0021_moduletransaction_failed.py @@ -20,7 +20,6 @@ def set_failed_for_module_txs(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("history", "0020_safemastercopy_version"), ] diff --git a/safe_transaction_service/history/migrations/0022_auto_20200903_1045.py b/safe_transaction_service/history/migrations/0022_auto_20200903_1045.py index 0bc872a2..62bd1f8d 100644 --- a/safe_transaction_service/history/migrations/0022_auto_20200903_1045.py +++ b/safe_transaction_service/history/migrations/0022_auto_20200903_1045.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0021_moduletransaction_failed"), ] diff --git a/safe_transaction_service/history/migrations/0023_auto_20200924_0841.py b/safe_transaction_service/history/migrations/0023_auto_20200924_0841.py index 82fb29bb..a2196104 100644 --- a/safe_transaction_service/history/migrations/0023_auto_20200924_0841.py +++ b/safe_transaction_service/history/migrations/0023_auto_20200924_0841.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0022_auto_20200903_1045"), ] diff --git a/safe_transaction_service/history/migrations/0024_auto_20201014_1523.py b/safe_transaction_service/history/migrations/0024_auto_20201014_1523.py index c70a0878..d04535c6 100644 --- a/safe_transaction_service/history/migrations/0024_auto_20201014_1523.py +++ b/safe_transaction_service/history/migrations/0024_auto_20201014_1523.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0023_auto_20200924_0841"), ] diff --git a/safe_transaction_service/history/migrations/0025_auto_20201015_1147.py b/safe_transaction_service/history/migrations/0025_auto_20201015_1147.py index 879c8239..39d11996 100644 --- a/safe_transaction_service/history/migrations/0025_auto_20201015_1147.py +++ b/safe_transaction_service/history/migrations/0025_auto_20201015_1147.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0024_auto_20201014_1523"), ] diff --git a/safe_transaction_service/history/migrations/0026_auto_20201030_1355.py b/safe_transaction_service/history/migrations/0026_auto_20201030_1355.py index c53ca4e8..56167440 100644 --- a/safe_transaction_service/history/migrations/0026_auto_20201030_1355.py +++ b/safe_transaction_service/history/migrations/0026_auto_20201030_1355.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0025_auto_20201015_1147"), ] diff --git a/safe_transaction_service/history/migrations/0027_remove_errored_internal_txs_20201112_1547.py b/safe_transaction_service/history/migrations/0027_remove_errored_internal_txs_20201112_1547.py index 04a6dec8..9c9acd88 100644 --- a/safe_transaction_service/history/migrations/0027_remove_errored_internal_txs_20201112_1547.py +++ b/safe_transaction_service/history/migrations/0027_remove_errored_internal_txs_20201112_1547.py @@ -35,7 +35,6 @@ def delete_errored_internal_txs(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("history", "0026_auto_20201030_1355"), ] diff --git a/safe_transaction_service/history/migrations/0028_auto_20201112_1613.py b/safe_transaction_service/history/migrations/0028_auto_20201112_1613.py index 3ebe4452..13898f21 100644 --- a/safe_transaction_service/history/migrations/0028_auto_20201112_1613.py +++ b/safe_transaction_service/history/migrations/0028_auto_20201112_1613.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0027_remove_errored_internal_txs_20201112_1547"), ] diff --git a/safe_transaction_service/history/migrations/0029_auto_20201118_1015.py b/safe_transaction_service/history/migrations/0029_auto_20201118_1015.py index 37c689c9..ce3436c6 100644 --- a/safe_transaction_service/history/migrations/0029_auto_20201118_1015.py +++ b/safe_transaction_service/history/migrations/0029_auto_20201118_1015.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0028_auto_20201112_1613"), ] diff --git a/safe_transaction_service/history/migrations/0030_auto_20210203_1541.py b/safe_transaction_service/history/migrations/0030_auto_20210203_1541.py index 69c7bbae..7acc1dec 100644 --- a/safe_transaction_service/history/migrations/0030_auto_20210203_1541.py +++ b/safe_transaction_service/history/migrations/0030_auto_20210203_1541.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0029_auto_20201118_1015"), ] diff --git a/safe_transaction_service/history/migrations/0031_webhook_new_safe.py b/safe_transaction_service/history/migrations/0031_webhook_new_safe.py index c5be9a8e..06c8ec86 100644 --- a/safe_transaction_service/history/migrations/0031_webhook_new_safe.py +++ b/safe_transaction_service/history/migrations/0031_webhook_new_safe.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0030_auto_20210203_1541"), ] diff --git a/safe_transaction_service/history/migrations/0032_webhook_new_module_transaction.py b/safe_transaction_service/history/migrations/0032_webhook_new_module_transaction.py index 4942a7c7..0c92d266 100644 --- a/safe_transaction_service/history/migrations/0032_webhook_new_module_transaction.py +++ b/safe_transaction_service/history/migrations/0032_webhook_new_module_transaction.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0031_webhook_new_safe"), ] diff --git a/safe_transaction_service/history/migrations/0033_auto_20210318_1654.py b/safe_transaction_service/history/migrations/0033_auto_20210318_1654.py index fb715b7a..c339d30e 100644 --- a/safe_transaction_service/history/migrations/0033_auto_20210318_1654.py +++ b/safe_transaction_service/history/migrations/0033_auto_20210318_1654.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0032_webhook_new_module_transaction"), ] diff --git a/safe_transaction_service/history/migrations/0034_webhook_new_outgoing_transaction.py b/safe_transaction_service/history/migrations/0034_webhook_new_outgoing_transaction.py index 09b67656..3adb0468 100644 --- a/safe_transaction_service/history/migrations/0034_webhook_new_outgoing_transaction.py +++ b/safe_transaction_service/history/migrations/0034_webhook_new_outgoing_transaction.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0033_auto_20210318_1654"), ] diff --git a/safe_transaction_service/history/migrations/0035_safemastercopy_deployer.py b/safe_transaction_service/history/migrations/0035_safemastercopy_deployer.py index b3c30c36..93fb46e5 100644 --- a/safe_transaction_service/history/migrations/0035_safemastercopy_deployer.py +++ b/safe_transaction_service/history/migrations/0035_safemastercopy_deployer.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0034_webhook_new_outgoing_transaction"), ] diff --git a/safe_transaction_service/history/migrations/0036_fix_exec_from_module.py b/safe_transaction_service/history/migrations/0036_fix_exec_from_module.py index cd190a39..e87c4f58 100644 --- a/safe_transaction_service/history/migrations/0036_fix_exec_from_module.py +++ b/safe_transaction_service/history/migrations/0036_fix_exec_from_module.py @@ -49,7 +49,6 @@ def fix_ethereum_logs(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("history", "0035_safemastercopy_deployer"), ] diff --git a/safe_transaction_service/history/migrations/0037_fix_failed_module_transactions.py b/safe_transaction_service/history/migrations/0037_fix_failed_module_transactions.py index 94f70d62..57e31571 100644 --- a/safe_transaction_service/history/migrations/0037_fix_failed_module_transactions.py +++ b/safe_transaction_service/history/migrations/0037_fix_failed_module_transactions.py @@ -19,7 +19,6 @@ def fix_failed_module_transactions(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("history", "0036_fix_exec_from_module"), ] diff --git a/safe_transaction_service/history/migrations/0038_safestatus_guard.py b/safe_transaction_service/history/migrations/0038_safestatus_guard.py index 53b62a02..a9abde8c 100644 --- a/safe_transaction_service/history/migrations/0038_safestatus_guard.py +++ b/safe_transaction_service/history/migrations/0038_safestatus_guard.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0037_fix_failed_module_transactions"), ] diff --git a/safe_transaction_service/history/migrations/0039_safel2mastercopy_20210519.py b/safe_transaction_service/history/migrations/0039_safel2mastercopy_20210519.py index 244ae3e9..73f2b0b7 100644 --- a/safe_transaction_service/history/migrations/0039_safel2mastercopy_20210519.py +++ b/safe_transaction_service/history/migrations/0039_safel2mastercopy_20210519.py @@ -9,7 +9,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0038_safestatus_guard"), ] diff --git a/safe_transaction_service/history/migrations/0040_auto_20210607_1007.py b/safe_transaction_service/history/migrations/0040_auto_20210607_1007.py index e29e7586..a9b5a21f 100644 --- a/safe_transaction_service/history/migrations/0040_auto_20210607_1007.py +++ b/safe_transaction_service/history/migrations/0040_auto_20210607_1007.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0039_safel2mastercopy_20210519"), ] diff --git a/safe_transaction_service/history/migrations/0041_auto_20210729_0916.py b/safe_transaction_service/history/migrations/0041_auto_20210729_0916.py index d7d8c35f..416e1efe 100644 --- a/safe_transaction_service/history/migrations/0041_auto_20210729_0916.py +++ b/safe_transaction_service/history/migrations/0041_auto_20210729_0916.py @@ -42,7 +42,6 @@ def migrate_back_l2_master_copies(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("history", "0040_auto_20210607_1007"), ] diff --git a/safe_transaction_service/history/migrations/0042_safestatus_history_saf_address_1c362b_idx.py b/safe_transaction_service/history/migrations/0042_safestatus_history_saf_address_1c362b_idx.py index 76ab4821..8b0a5a74 100644 --- a/safe_transaction_service/history/migrations/0042_safestatus_history_saf_address_1c362b_idx.py +++ b/safe_transaction_service/history/migrations/0042_safestatus_history_saf_address_1c362b_idx.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0041_auto_20210729_0916"), ] diff --git a/safe_transaction_service/history/migrations/0043_auto_20210927_1515.py b/safe_transaction_service/history/migrations/0043_auto_20210927_1515.py index ceb878e9..03fe641c 100644 --- a/safe_transaction_service/history/migrations/0043_auto_20210927_1515.py +++ b/safe_transaction_service/history/migrations/0043_auto_20210927_1515.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0042_safestatus_history_saf_address_1c362b_idx"), ] diff --git a/safe_transaction_service/history/migrations/0045_erc20transfer_erc721transfer.py b/safe_transaction_service/history/migrations/0045_erc20transfer_erc721transfer.py index b6ee5260..09aa4879 100644 --- a/safe_transaction_service/history/migrations/0045_erc20transfer_erc721transfer.py +++ b/safe_transaction_service/history/migrations/0045_erc20transfer_erc721transfer.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0044_reprocess_module_txs"), ] diff --git a/safe_transaction_service/history/migrations/0046_delete_ethereumevent.py b/safe_transaction_service/history/migrations/0046_delete_ethereumevent.py index ee083af8..be987951 100644 --- a/safe_transaction_service/history/migrations/0046_delete_ethereumevent.py +++ b/safe_transaction_service/history/migrations/0046_delete_ethereumevent.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0045_erc20transfer_erc721transfer"), ] diff --git a/safe_transaction_service/history/migrations/0047_auto_20211102_1659.py b/safe_transaction_service/history/migrations/0047_auto_20211102_1659.py index 29e90206..8e5dd4d3 100644 --- a/safe_transaction_service/history/migrations/0047_auto_20211102_1659.py +++ b/safe_transaction_service/history/migrations/0047_auto_20211102_1659.py @@ -27,7 +27,6 @@ def remove_duplicated(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("history", "0046_delete_ethereumevent"), ] diff --git a/safe_transaction_service/history/migrations/0048_block_number_token_transfers_20211126_1443.py b/safe_transaction_service/history/migrations/0048_block_number_token_transfers_20211126_1443.py index 7a6884eb..4f87012d 100644 --- a/safe_transaction_service/history/migrations/0048_block_number_token_transfers_20211126_1443.py +++ b/safe_transaction_service/history/migrations/0048_block_number_token_transfers_20211126_1443.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0047_auto_20211102_1659"), ] diff --git a/safe_transaction_service/history/migrations/0049_block_number_internaltx_20211129_1111.py b/safe_transaction_service/history/migrations/0049_block_number_internaltx_20211129_1111.py index 1a98ed3b..e868da6d 100644 --- a/safe_transaction_service/history/migrations/0049_block_number_internaltx_20211129_1111.py +++ b/safe_transaction_service/history/migrations/0049_block_number_internaltx_20211129_1111.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0048_block_number_token_transfers_20211126_1443"), ] diff --git a/safe_transaction_service/history/migrations/0050_ethereum_address_field_v2_20211201_1507.py b/safe_transaction_service/history/migrations/0050_ethereum_address_field_v2_20211201_1507.py index 3cd20e56..24e01ee9 100644 --- a/safe_transaction_service/history/migrations/0050_ethereum_address_field_v2_20211201_1507.py +++ b/safe_transaction_service/history/migrations/0050_ethereum_address_field_v2_20211201_1507.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0049_block_number_internaltx_20211129_1111"), ] diff --git a/safe_transaction_service/history/migrations/0051_ethereum_address_field_v2_update.py b/safe_transaction_service/history/migrations/0051_ethereum_address_field_v2_update.py index 8854bc92..93fa3550 100644 --- a/safe_transaction_service/history/migrations/0051_ethereum_address_field_v2_update.py +++ b/safe_transaction_service/history/migrations/0051_ethereum_address_field_v2_update.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0050_ethereum_address_field_v2_20211201_1507"), ] diff --git a/safe_transaction_service/history/migrations/0052_keccak256_field_20211209_1628.py b/safe_transaction_service/history/migrations/0052_keccak256_field_20211209_1628.py index 7774ea32..75d61ac4 100644 --- a/safe_transaction_service/history/migrations/0052_keccak256_field_20211209_1628.py +++ b/safe_transaction_service/history/migrations/0052_keccak256_field_20211209_1628.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0051_ethereum_address_field_v2_update"), ] diff --git a/safe_transaction_service/history/migrations/0053_alter_webhook_address.py b/safe_transaction_service/history/migrations/0053_alter_webhook_address.py index f308dafc..97a433a5 100644 --- a/safe_transaction_service/history/migrations/0053_alter_webhook_address.py +++ b/safe_transaction_service/history/migrations/0053_alter_webhook_address.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0052_keccak256_field_20211209_1628"), ] diff --git a/safe_transaction_service/history/migrations/0054_webhook_authorization.py b/safe_transaction_service/history/migrations/0054_webhook_authorization.py index 71ebe028..1b7b3c4e 100644 --- a/safe_transaction_service/history/migrations/0054_webhook_authorization.py +++ b/safe_transaction_service/history/migrations/0054_webhook_authorization.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0053_alter_webhook_address"), ] diff --git a/safe_transaction_service/history/migrations/0055_alter_multisigtransaction_options.py b/safe_transaction_service/history/migrations/0055_alter_multisigtransaction_options.py index e3ffdb71..21fd0466 100644 --- a/safe_transaction_service/history/migrations/0055_alter_multisigtransaction_options.py +++ b/safe_transaction_service/history/migrations/0055_alter_multisigtransaction_options.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0054_webhook_authorization"), ] diff --git a/safe_transaction_service/history/migrations/0056_alter_multisigconfirmation_signature.py b/safe_transaction_service/history/migrations/0056_alter_multisigconfirmation_signature.py index 0a5899c2..6e5576a5 100644 --- a/safe_transaction_service/history/migrations/0056_alter_multisigconfirmation_signature.py +++ b/safe_transaction_service/history/migrations/0056_alter_multisigconfirmation_signature.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0055_alter_multisigtransaction_options"), ] diff --git a/safe_transaction_service/history/migrations/0057_alter_webhook_authorization.py b/safe_transaction_service/history/migrations/0057_alter_webhook_authorization.py index 6dc543ad..2c4c78ef 100644 --- a/safe_transaction_service/history/migrations/0057_alter_webhook_authorization.py +++ b/safe_transaction_service/history/migrations/0057_alter_webhook_authorization.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0056_alter_multisigconfirmation_signature"), ] diff --git a/safe_transaction_service/history/migrations/0058_alter_webhook_url.py b/safe_transaction_service/history/migrations/0058_alter_webhook_url.py index dae05ae3..d567e32f 100644 --- a/safe_transaction_service/history/migrations/0058_alter_webhook_url.py +++ b/safe_transaction_service/history/migrations/0058_alter_webhook_url.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0057_alter_webhook_authorization"), ] diff --git a/safe_transaction_service/history/migrations/0059_auto_20220408_1020.py b/safe_transaction_service/history/migrations/0059_auto_20220408_1020.py index 8d1e4423..a1933cc8 100644 --- a/safe_transaction_service/history/migrations/0059_auto_20220408_1020.py +++ b/safe_transaction_service/history/migrations/0059_auto_20220408_1020.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0058_alter_webhook_url"), ] diff --git a/safe_transaction_service/history/migrations/0060_add_eip1559_fields_20220419_0955.py b/safe_transaction_service/history/migrations/0060_add_eip1559_fields_20220419_0955.py index 38471478..fabad44f 100644 --- a/safe_transaction_service/history/migrations/0060_add_eip1559_fields_20220419_0955.py +++ b/safe_transaction_service/history/migrations/0060_add_eip1559_fields_20220419_0955.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0059_auto_20220408_1020"), ] diff --git a/safe_transaction_service/history/migrations/0061_alter_internaltx_block_number.py b/safe_transaction_service/history/migrations/0061_alter_internaltx_block_number.py index 75c35705..c2d94f96 100644 --- a/safe_transaction_service/history/migrations/0061_alter_internaltx_block_number.py +++ b/safe_transaction_service/history/migrations/0061_alter_internaltx_block_number.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0060_add_eip1559_fields_20220419_0955"), ] diff --git a/safe_transaction_service/history/migrations/0062_add_safe_last_status_20220429_0922.py b/safe_transaction_service/history/migrations/0062_add_safe_last_status_20220429_0922.py index d8fb273c..1302d44b 100644 --- a/safe_transaction_service/history/migrations/0062_add_safe_last_status_20220429_0922.py +++ b/safe_transaction_service/history/migrations/0062_add_safe_last_status_20220429_0922.py @@ -38,7 +38,6 @@ def init_safe_last_status(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("history", "0061_alter_internaltx_block_number"), ] diff --git a/safe_transaction_service/history/migrations/0063_alter_internaltx__from.py b/safe_transaction_service/history/migrations/0063_alter_internaltx__from.py index 39bce9ca..7331b413 100644 --- a/safe_transaction_service/history/migrations/0063_alter_internaltx__from.py +++ b/safe_transaction_service/history/migrations/0063_alter_internaltx__from.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0062_add_safe_last_status_20220429_0922"), ] diff --git a/safe_transaction_service/history/migrations/0064_auto_20220523_0919.py b/safe_transaction_service/history/migrations/0064_auto_20220523_0919.py index be105a82..7bd3f130 100644 --- a/safe_transaction_service/history/migrations/0064_auto_20220523_0919.py +++ b/safe_transaction_service/history/migrations/0064_auto_20220523_0919.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0063_alter_internaltx__from"), ] diff --git a/safe_transaction_service/history/migrations/0065_safelaststatus_history_saf_enabled_020fea_gin.py b/safe_transaction_service/history/migrations/0065_safelaststatus_history_saf_enabled_020fea_gin.py index 144e9b8a..8b5c5151 100644 --- a/safe_transaction_service/history/migrations/0065_safelaststatus_history_saf_enabled_020fea_gin.py +++ b/safe_transaction_service/history/migrations/0065_safelaststatus_history_saf_enabled_020fea_gin.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0064_auto_20220523_0919"), ] diff --git a/safe_transaction_service/history/migrations/0066_auto_20220628_1125.py b/safe_transaction_service/history/migrations/0066_auto_20220628_1125.py index 69677f50..c00dbb8f 100644 --- a/safe_transaction_service/history/migrations/0066_auto_20220628_1125.py +++ b/safe_transaction_service/history/migrations/0066_auto_20220628_1125.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0065_safelaststatus_history_saf_enabled_020fea_gin"), ] diff --git a/safe_transaction_service/history/migrations/0067_auto_20220705_1545.py b/safe_transaction_service/history/migrations/0067_auto_20220705_1545.py index e5ce0097..36984b94 100644 --- a/safe_transaction_service/history/migrations/0067_auto_20220705_1545.py +++ b/safe_transaction_service/history/migrations/0067_auto_20220705_1545.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0066_auto_20220628_1125"), ] diff --git a/safe_transaction_service/history/migrations/0068_alter_multisigtransaction_origin.py b/safe_transaction_service/history/migrations/0068_alter_multisigtransaction_origin.py index 69498ead..ff2528b3 100644 --- a/safe_transaction_service/history/migrations/0068_alter_multisigtransaction_origin.py +++ b/safe_transaction_service/history/migrations/0068_alter_multisigtransaction_origin.py @@ -30,13 +30,12 @@ def repair_backward_migration(apps, schema_editor): transactions = MultisigTransaction.objects.filter(origin__isnull=False).iterator() for transaction in transactions: value = json.loads(transaction.origin) - if type(value) == str: + if isinstance(value, str): transaction.origin = value transaction.save() class Migration(migrations.Migration): - dependencies = [ ("history", "0067_auto_20220705_1545"), ] diff --git a/safe_transaction_service/history/migrations/0069_indexingstatus_and_more.py b/safe_transaction_service/history/migrations/0069_indexingstatus_and_more.py index 88051eb3..94b0e55b 100644 --- a/safe_transaction_service/history/migrations/0069_indexingstatus_and_more.py +++ b/safe_transaction_service/history/migrations/0069_indexingstatus_and_more.py @@ -50,7 +50,6 @@ def remove_indexing_status(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("history", "0068_alter_multisigtransaction_origin"), ] diff --git a/safe_transaction_service/history/migrations/0070_chain.py b/safe_transaction_service/history/migrations/0070_chain.py index 3852c79a..3f787843 100644 --- a/safe_transaction_service/history/migrations/0070_chain.py +++ b/safe_transaction_service/history/migrations/0070_chain.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0069_indexingstatus_and_more"), ] diff --git a/safe_transaction_service/history/migrations/0071_alter_ethereumblock_confirmed_and_more.py b/safe_transaction_service/history/migrations/0071_alter_ethereumblock_confirmed_and_more.py index 06ff94af..e0b26d32 100644 --- a/safe_transaction_service/history/migrations/0071_alter_ethereumblock_confirmed_and_more.py +++ b/safe_transaction_service/history/migrations/0071_alter_ethereumblock_confirmed_and_more.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("history", "0070_chain"), ] diff --git a/safe_transaction_service/history/migrations/0075_multisigtransaction_proposer.py b/safe_transaction_service/history/migrations/0075_multisigtransaction_proposer.py new file mode 100644 index 00000000..5f4d6f4e --- /dev/null +++ b/safe_transaction_service/history/migrations/0075_multisigtransaction_proposer.py @@ -0,0 +1,19 @@ +# Generated by Django 4.2.5 on 2023-10-05 09:23 + +from django.db import migrations + +import gnosis.eth.django.models + + +class Migration(migrations.Migration): + dependencies = [ + ("history", "0074_internaltx_history_internal_transfer_idx"), + ] + + operations = [ + migrations.AddField( + model_name="multisigtransaction", + name="proposer", + field=gnosis.eth.django.models.EthereumAddressV2Field(null=True), + ), + ] diff --git a/safe_transaction_service/history/migrations/0076_alter_safemastercopy_deployer.py b/safe_transaction_service/history/migrations/0076_alter_safemastercopy_deployer.py new file mode 100644 index 00000000..e463107d --- /dev/null +++ b/safe_transaction_service/history/migrations/0076_alter_safemastercopy_deployer.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.5 on 2023-10-09 12:35 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("history", "0075_multisigtransaction_proposer"), + ] + + operations = [ + migrations.AlterField( + model_name="safemastercopy", + name="deployer", + field=models.CharField(default="Safe", max_length=50), + ), + ] diff --git a/safe_transaction_service/history/migrations/0077_alter_safecontractdelegate_delegate_and_more.py b/safe_transaction_service/history/migrations/0077_alter_safecontractdelegate_delegate_and_more.py new file mode 100644 index 00000000..f5f38c5e --- /dev/null +++ b/safe_transaction_service/history/migrations/0077_alter_safecontractdelegate_delegate_and_more.py @@ -0,0 +1,24 @@ +# Generated by Django 4.2.6 on 2023-11-06 11:27 + +from django.db import migrations + +import gnosis.eth.django.models + + +class Migration(migrations.Migration): + dependencies = [ + ("history", "0076_alter_safemastercopy_deployer"), + ] + + operations = [ + migrations.AlterField( + model_name="safecontractdelegate", + name="delegate", + field=gnosis.eth.django.models.EthereumAddressV2Field(db_index=True), + ), + migrations.AlterField( + model_name="safecontractdelegate", + name="delegator", + field=gnosis.eth.django.models.EthereumAddressV2Field(db_index=True), + ), + ] diff --git a/safe_transaction_service/history/migrations/0078_remove_safestatus_history_saf_address_1c362b_idx_and_more.py b/safe_transaction_service/history/migrations/0078_remove_safestatus_history_saf_address_1c362b_idx_and_more.py new file mode 100644 index 00000000..aa8b1ff8 --- /dev/null +++ b/safe_transaction_service/history/migrations/0078_remove_safestatus_history_saf_address_1c362b_idx_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 4.2.7 on 2024-02-16 17:15 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("history", "0077_alter_safecontractdelegate_delegate_and_more"), + ] + + operations = [ + migrations.RemoveIndex( + model_name="safestatus", + name="history_saf_address_1c362b_idx", + ), + migrations.AddIndex( + model_name="multisigtransaction", + index=models.Index( + fields=["safe", "-nonce", "-created"], + name="history_multisigtx_safe_sorted", + ), + ), + ] diff --git a/safe_transaction_service/history/migrations/0079_alter_erc20transfer_unique_together_and_more.py b/safe_transaction_service/history/migrations/0079_alter_erc20transfer_unique_together_and_more.py new file mode 100644 index 00000000..685a51fe --- /dev/null +++ b/safe_transaction_service/history/migrations/0079_alter_erc20transfer_unique_together_and_more.py @@ -0,0 +1,86 @@ +# Generated by Django 4.2.10 on 2024-02-27 15:58 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("history", "0078_remove_safestatus_history_saf_address_1c362b_idx_and_more"), + ] + + operations = [ + migrations.AlterUniqueTogether( + name="erc20transfer", + unique_together=set(), + ), + migrations.AlterUniqueTogether( + name="erc721transfer", + unique_together=set(), + ), + migrations.AlterUniqueTogether( + name="internaltx", + unique_together=set(), + ), + migrations.AlterUniqueTogether( + name="multisigconfirmation", + unique_together=set(), + ), + migrations.AlterUniqueTogether( + name="safecontractdelegate", + unique_together=set(), + ), + migrations.AlterUniqueTogether( + name="safestatus", + unique_together=set(), + ), + migrations.AlterUniqueTogether( + name="webhook", + unique_together=set(), + ), + migrations.AddConstraint( + model_name="erc20transfer", + constraint=models.UniqueConstraint( + fields=("ethereum_tx", "log_index"), name="unique_erc20_transfer_index" + ), + ), + migrations.AddConstraint( + model_name="erc721transfer", + constraint=models.UniqueConstraint( + fields=("ethereum_tx", "log_index"), name="unique_erc721_transfer_index" + ), + ), + migrations.AddConstraint( + model_name="internaltx", + constraint=models.UniqueConstraint( + fields=("ethereum_tx", "trace_address"), + name="unique_internal_tx_trace_address", + ), + ), + migrations.AddConstraint( + model_name="multisigconfirmation", + constraint=models.UniqueConstraint( + fields=("multisig_transaction_hash", "owner"), + name="unique_multisig_transaction_owner_confirmation", + ), + ), + migrations.AddConstraint( + model_name="safecontractdelegate", + constraint=models.UniqueConstraint( + fields=("safe_contract", "delegate", "delegator"), + name="unique_safe_contract_delegate_delegator", + ), + ), + migrations.AddConstraint( + model_name="safestatus", + constraint=models.UniqueConstraint( + fields=("internal_tx", "address"), name="unique_safe_tx_address_status" + ), + ), + migrations.AddConstraint( + model_name="webhook", + constraint=models.UniqueConstraint( + fields=("address", "url"), name="unique_webhook_address_url" + ), + ), + ] diff --git a/safe_transaction_service/history/migrations/0080_alter_multisigconfirmation_signature.py b/safe_transaction_service/history/migrations/0080_alter_multisigconfirmation_signature.py new file mode 100644 index 00000000..2cbdd488 --- /dev/null +++ b/safe_transaction_service/history/migrations/0080_alter_multisigconfirmation_signature.py @@ -0,0 +1,30 @@ +# Generated by Django 5.0.3 on 2024-03-20 12:54 + +from django.db import migrations + +import gnosis.eth.django.models + + +class Migration(migrations.Migration): + + dependencies = [ + ("history", "0079_alter_erc20transfer_unique_together_and_more"), + ] + + operations = [ + migrations.RunSQL( + """ + ALTER TABLE "history_multisigconfirmation" ALTER COLUMN "signature" TYPE bytea USING DECODE("signature", 'hex'); + """, + reverse_sql=""" + ALTER TABLE "history_multisigconfirmation" ALTER COLUMN "signature" TYPE text USING ENCODE("signature"::bytea, 'hex'); + """, + ), + migrations.AlterField( + model_name="multisigconfirmation", + name="signature", + field=gnosis.eth.django.models.HexV2Field( + default=None, max_length=5000, null=True + ), + ), + ] diff --git a/safe_transaction_service/history/migrations/0081_internaltx_history_internal_transfer_from.py b/safe_transaction_service/history/migrations/0081_internaltx_history_internal_transfer_from.py new file mode 100644 index 00000000..24957f99 --- /dev/null +++ b/safe_transaction_service/history/migrations/0081_internaltx_history_internal_transfer_from.py @@ -0,0 +1,22 @@ +# Generated by Django 5.0.3 on 2024-04-05 10:01 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("history", "0080_alter_multisigconfirmation_signature"), + ] + + operations = [ + migrations.AddIndex( + model_name="internaltx", + index=models.Index( + condition=models.Q(("call_type", 0), ("value__gt", 0)), + fields=["_from", "timestamp"], + include=("ethereum_tx_id", "block_number"), + name="history_internal_transfer_from", + ), + ), + ] diff --git a/safe_transaction_service/history/models.py b/safe_transaction_service/history/models.py index 3ff15a5f..924b8dc8 100644 --- a/safe_transaction_service/history/models.py +++ b/safe_transaction_service/history/models.py @@ -10,6 +10,7 @@ Iterator, List, Optional, + Self, Sequence, Set, Tuple, @@ -27,6 +28,7 @@ from django.db.models import Case, Count, Exists, Index, JSONField, Max, Q, QuerySet from django.db.models.expressions import F, OuterRef, RawSQL, Subquery, Value, When from django.db.models.functions import Coalesce +from django.db.models.query import RawQuerySet from django.db.models.signals import post_save from django.utils import timezone from django.utils.translation import gettext_lazy as _ @@ -40,16 +42,22 @@ from gnosis.eth.constants import ERC20_721_TRANSFER_TOPIC, NULL_ADDRESS from gnosis.eth.django.models import ( EthereumAddressV2Field, - HexField, + HexV2Field, Keccak256Field, Uint256Field, ) from gnosis.eth.utils import fast_to_checksum_address -from gnosis.safe import SafeOperation +from gnosis.safe import SafeOperationEnum from gnosis.safe.safe import SafeInfo from gnosis.safe.safe_signature import SafeSignature, SafeSignatureType +from safe_transaction_service.account_abstraction.constants import ( + USER_OPERATION_EVENT_TOPIC, +) from safe_transaction_service.contracts.models import Contract +from safe_transaction_service.utils.constants import ( + SIGNATURE_LENGTH as MAX_SIGNATURE_LENGTH, +) from .utils import clean_receipt_log @@ -69,7 +77,7 @@ class EthereumTxCallType(Enum): STATIC_CALL = 3 @staticmethod - def parse_call_type(call_type: Optional[str]): + def parse_call_type(call_type: Optional[str]) -> Optional[Self]: if not call_type: return None @@ -251,7 +259,7 @@ def create_from_block( f"Marking block as not confirmed" ) - @lru_cache(maxsize=10000) + @lru_cache(maxsize=10_000) def get_timestamp_by_hash(self, block_hash: HexBytes) -> datetime.datetime: try: return self.values("timestamp").get(block_hash=block_hash)["timestamp"] @@ -360,6 +368,16 @@ def create_from_tx_dict( type=tx.get("type", 0), ) + def account_abstraction_txs(self) -> RawQuerySet: + """ + :return: Transactions containing ERC4337 `UserOperation` event + """ + query = '{"topics": ["' + USER_OPERATION_EVENT_TOPIC.hex() + '"]}' + + return self.raw( + f"SELECT * FROM history_ethereumtx WHERE '{query}'::jsonb <@ ANY (logs)" + ) + class EthereumTx(TimeStampedModel): objects = EthereumTxManager() @@ -495,7 +513,11 @@ class Meta: Index(fields=["_from", "timestamp"]), Index(fields=["to", "timestamp"]), ] - unique_together = (("ethereum_tx", "log_index"),) + constraints = [ + models.UniqueConstraint( + fields=["ethereum_tx", "log_index"], name="unique_token_transfer_index" + ) + ] def __str__(self): return f"Token Transfer from={self._from} to={self.to}" @@ -560,7 +582,11 @@ class Meta(TokenTransfer.Meta): abstract = False verbose_name = "ERC20 Transfer" verbose_name_plural = "ERC20 Transfers" - unique_together = (("ethereum_tx", "log_index"),) + constraints = [ + models.UniqueConstraint( + fields=["ethereum_tx", "log_index"], name="unique_erc20_transfer_index" + ) + ] def __str__(self): return f"ERC20 Transfer from={self._from} to={self.to} value={self.value}" @@ -672,7 +698,11 @@ class Meta(TokenTransfer.Meta): abstract = False verbose_name = "ERC721 Transfer" verbose_name_plural = "ERC721 Transfers" - unique_together = (("ethereum_tx", "log_index"),) + constraints = [ + models.UniqueConstraint( + fields=["ethereum_tx", "log_index"], name="unique_erc721_transfer_index" + ) + ] def __str__(self): return ( @@ -957,7 +987,12 @@ class InternalTx(models.Model): error = models.CharField(max_length=200, null=True) class Meta: - unique_together = (("ethereum_tx", "trace_address"),) + constraints = [ + models.UniqueConstraint( + fields=["ethereum_tx", "trace_address"], + name="unique_internal_tx_trace_address", + ) + ] indexes = [ models.Index( name="history_internaltx_value_idx", @@ -973,6 +1008,12 @@ class Meta: include=["ethereum_tx_id", "block_number"], condition=Q(call_type=0) & Q(value__gt=0), ), + Index( + name="history_internal_transfer_from", + fields=["_from", "timestamp"], + include=["ethereum_tx_id", "block_number"], + condition=Q(call_type=0) & Q(value__gt=0), + ), ] def __str__(self): @@ -1099,7 +1140,7 @@ def order_by_processing_queue(self): """ :return: Transactions ordered to be processed. First `setup` and then older transactions """ - return self.annotate( + return self.alias( is_setup=Case( When(function_name="setup", then=Value(0)), default=Value(1), @@ -1124,7 +1165,7 @@ def pending_for_safe(self, safe_address: ChecksumAddress): return ( self.pending_for_safes() .filter(internal_tx___from=safe_address) - .select_related("internal_tx") + .select_related("internal_tx", "internal_tx__ethereum_tx") ) def safes_pending_to_be_processed(self) -> QuerySet[ChecksumAddress]: @@ -1316,12 +1357,26 @@ def with_confirmations_required(self): :return: queryset with `confirmations_required: int` field """ + + """ + SafeStatus works the following way: + - First entry of any Multisig Transactions is `execTransaction`, that increments the nonce. + - Next entries are configuration changes on the Safe. + For example, for a Multisig Transaction with nonce 1 changing the threshold the `SafeStatus` table + will look like: + - setup with nonce 0 + - execTransaction with nonce already increased to 1 for a previous Multisig Transaction. + - execTransaction with nonce already increased to 2, old threshold and internal_tx_id=7 (auto increased id). + - changeThreshold with nonce already increased to 2, new threshold and internal_tx_id=8 (any number + higher than 7). + We need to get the previous entry to get the proper threshold at that point before it's changed. + """ threshold_safe_status_query = ( SafeStatus.objects.filter( address=OuterRef("safe"), - internal_tx__ethereum_tx=OuterRef("ethereum_tx"), + nonce=OuterRef("nonce"), ) - .sorted_reverse_by_mined() + .order_by("-internal_tx_id") .values("threshold") ) @@ -1353,7 +1408,7 @@ def queued(self, safe_address: str): ) return ( self.not_executed() - .annotate( + .alias( max_executed_nonce=Coalesce( Subquery(subquery), Value(-1), output_field=Uint256Field() ) @@ -1366,6 +1421,7 @@ class MultisigTransaction(TimeStampedModel): objects = MultisigTransactionManager.from_queryset(MultisigTransactionQuerySet)() safe_tx_hash = Keccak256Field(primary_key=True) safe = EthereumAddressV2Field(db_index=True) + proposer = EthereumAddressV2Field(null=True) ethereum_tx = models.ForeignKey( EthereumTx, null=True, @@ -1378,7 +1434,7 @@ class MultisigTransaction(TimeStampedModel): value = Uint256Field() data = models.BinaryField(null=True, blank=True, editable=True) operation = models.PositiveSmallIntegerField( - choices=[(tag.value, tag.name) for tag in SafeOperation] + choices=[(tag.value, tag.name) for tag in SafeOperationEnum] ) safe_tx_gas = Uint256Field() base_gas = Uint256Field() @@ -1397,6 +1453,12 @@ class Meta: permissions = [ ("create_trusted", "Can create trusted transactions"), ] + indexes = [ + Index( + name="history_multisigtx_safe_sorted", + fields=["safe", "-nonce", "-created"], + ), + ] def __str__(self): return f"{self.safe} - {self.nonce} - {self.safe_tx_hash}" @@ -1429,7 +1491,7 @@ def data_should_be_decoded(self) -> bool: :return: `True` if data should be decoded, `False` otherwise """ return not ( - self.operation == SafeOperation.DELEGATE_CALL.value + self.operation == SafeOperationEnum.DELEGATE_CALL.value and self.to not in Contract.objects.trusted_addresses_for_delegate_call() ) @@ -1462,7 +1524,7 @@ class ModuleTransaction(TimeStampedModel): value = Uint256Field() data = models.BinaryField(null=True) operation = models.PositiveSmallIntegerField( - choices=[(tag.value, tag.name) for tag in SafeOperation] + choices=[(tag.value, tag.name) for tag in SafeOperationEnum] ) failed = models.BooleanField(default=False) @@ -1521,13 +1583,18 @@ class MultisigConfirmation(TimeStampedModel): ) # Use this while we don't have a `multisig_transaction` owner = EthereumAddressV2Field() - signature = HexField(null=True, default=None, max_length=5000) + signature = HexV2Field(null=True, default=None, max_length=MAX_SIGNATURE_LENGTH) signature_type = models.PositiveSmallIntegerField( choices=[(tag.value, tag.name) for tag in SafeSignatureType], db_index=True ) class Meta: - unique_together = (("multisig_transaction_hash", "owner"),) + constraints = [ + models.UniqueConstraint( + fields=["multisig_transaction_hash", "owner"], + name="unique_multisig_transaction_owner_confirmation", + ) + ] ordering = ["created"] def __str__(self): @@ -1605,7 +1672,7 @@ def relevant(self): class SafeMasterCopy(MonitoredAddress): objects = SafeMasterCopyManager.from_queryset(SafeMasterCopyQueryset)() version = models.CharField(max_length=20, validators=[validate_version]) - deployer = models.CharField(max_length=50, default="Gnosis") + deployer = models.CharField(max_length=50, default="Safe") l2 = models.BooleanField(default=False) class Meta: @@ -1617,70 +1684,6 @@ class SafeContractManager(models.Manager): def get_banned_safes(self) -> QuerySet[ChecksumAddress]: return self.filter(banned=True).values_list("address", flat=True) - def get_count_relevant_txs_for_safe(self, address: ChecksumAddress) -> int: - """ - This method searches multiple tables and count every tx or event for a Safe. - It will return the same or higher value if compared to counting ``get_all_tx_identifiers`` - as that method will group some transactions (for example, 3 ERC20 can be grouped in a ``MultisigTransaction``, - so it will be ``1`` element for ``get_all_tx_identifiers`` but ``4`` for this function. - - This query should be pretty fast, and it's meant to be used for invalidating caches. - - :param address: - :return: number of relevant txs for a Safe - """ - - query = """ - SELECT SUM(count_all) - FROM ( - -- Get multisig transactions - SELECT COUNT(*) AS count_all - FROM "history_multisigtransaction" - WHERE "history_multisigtransaction"."safe" = %s - UNION ALL - -- Get confirmations - SELECT COUNT(*) - FROM "history_multisigtransaction" - JOIN "history_multisigconfirmation" ON "history_multisigtransaction"."safe_tx_hash" = "history_multisigconfirmation"."multisig_transaction_id" - WHERE "history_multisigtransaction"."safe" = %s - UNION ALL - -- Get ERC20 Transfers - SELECT COUNT(*) - FROM "history_erc20transfer" - WHERE ( - "history_erc20transfer"."to" = %s - OR "history_erc20transfer"."_from" = %s - ) - UNION ALL - -- Get ERC721 Transfers - SELECT COUNT(*) - FROM "history_erc721transfer" - WHERE ( - "history_erc721transfer"."to" = %s - OR "history_erc721transfer"."_from" = %s - ) - UNION ALL - -- Get Ether Transfers - SELECT COUNT(*) - FROM "history_internaltx" - WHERE ( - "history_internaltx"."call_type" = 0 - AND "history_internaltx"."to" = %s - AND "history_internaltx"."value" > 0 - ) - UNION ALL - -- Get Module Transactions - SELECT COUNT(*) - FROM "history_moduletransaction" - WHERE "history_moduletransaction"."safe" = %s - ) subquery - """ - - with connection.cursor() as cursor: - hex_address = HexBytes(address) - cursor.execute(query, [hex_address] * 8) - return cursor.fetchone()[0] - class SafeContract(models.Model): objects = SafeContractManager() @@ -1714,33 +1717,55 @@ def created_block_number(self) -> Optional[Type[int]]: class SafeContractDelegateManager(models.Manager): - def get_delegates_for_safe(self, address: ChecksumAddress) -> Set[ChecksumAddress]: - return set( - self.filter(safe_contract_id=address) - .values_list("delegate", flat=True) - .distinct() + def get_for_safe( + self, safe_address: ChecksumAddress, owner_addresses: Sequence[ChecksumAddress] + ) -> QuerySet["SafeContractDelegate"]: + if not owner_addresses: + return self.none() + + return self.filter( + # If safe_contract is null on SafeContractDelegate, delegates are valid for every Safe + Q(safe_contract_id=safe_address) + | Q(safe_contract=None) + ).filter(delegator__in=owner_addresses) + + def get_for_safe_and_delegate( + self, + safe_address: ChecksumAddress, + owner_addresses: Sequence[ChecksumAddress], + delegate: ChecksumAddress, + ) -> QuerySet["SafeContractDelegate"]: + return self.get_for_safe(safe_address, owner_addresses).filter( + delegate=delegate ) def get_delegates_for_safe_and_owners( self, safe_address: ChecksumAddress, owner_addresses: Sequence[ChecksumAddress] ) -> Set[ChecksumAddress]: - if not owner_addresses: - return set() return set( - self.filter( - # If safe_contract is null on SafeContractDelegate, delegates are valid for every Safe - Q(safe_contract_id=safe_address) - | Q(safe_contract=None) - ) - .filter(delegator__in=owner_addresses) + self.get_for_safe(safe_address, owner_addresses) .values_list("delegate", flat=True) .distinct() ) + def remove_delegates_for_owner_in_safe( + self, safe_address: ChecksumAddress, owner_address: ChecksumAddress + ) -> int: + """ + This method deletes delegated users only if the safe address and the owner address match. + Used when an owner is removed from the Safe. + + :return: number of delegated users deleted + """ + return self.filter( + safe_contract_id=safe_address, delegator=owner_address + ).delete()[0] + class SafeContractDelegate(models.Model): """ - The owners of the Safe can add users so they can propose/retrieve txs as if they were the owners of the Safe + Owners (delegators) can delegate on delegates, so they can propose trusted transactions + in their name """ objects = SafeContractDelegateManager() @@ -1750,15 +1775,20 @@ class SafeContractDelegate(models.Model): related_name="safe_contract_delegates", null=True, default=None, - ) - delegate = EthereumAddressV2Field() - delegator = EthereumAddressV2Field() # Owner who created the delegate + ) # If safe_contract is not defined, delegate is valid for every Safe which delegator is an owner + delegate = EthereumAddressV2Field(db_index=True) + delegator = EthereumAddressV2Field(db_index=True) # Owner who created the delegate label = models.CharField(max_length=50) read = models.BooleanField(default=True) # For permissions in the future write = models.BooleanField(default=True) class Meta: - unique_together = (("safe_contract", "delegate", "delegator"),) + constraints = [ + models.UniqueConstraint( + fields=["safe_contract", "delegate", "delegator"], + name="unique_safe_contract_delegate_delegator", + ) + ] def __str__(self): return ( @@ -1971,9 +2001,12 @@ class SafeStatus(SafeStatusBase): class Meta: indexes = [ Index(fields=["address", "-nonce"]), # Index on address and nonce DESC - Index(fields=["address", "-nonce", "-internal_tx"]), # For Window search ] - unique_together = (("internal_tx", "address"),) + constraints = [ + models.UniqueConstraint( + fields=["internal_tx", "address"], name="unique_safe_tx_address_status" + ) + ] verbose_name_plural = "Safe statuses" def __str__(self): @@ -2007,6 +2040,7 @@ class WebHookType(Enum): OUTGOING_TOKEN = 9 MESSAGE_CREATED = 10 MESSAGE_CONFIRMATION = 11 + DELETED_MULTISIG_TRANSACTION = 12 class WebHookQuerySet(models.QuerySet): @@ -2060,7 +2094,11 @@ class WebHook(models.Model): ) class Meta: - unique_together = (("address", "url"),) + constraints = [ + models.UniqueConstraint( + fields=["address", "url"], name="unique_webhook_address_url" + ) + ] def __str__(self): if self.address: diff --git a/safe_transaction_service/history/pagination.py b/safe_transaction_service/history/pagination.py index 07d92526..f9f1a446 100644 --- a/safe_transaction_service/history/pagination.py +++ b/safe_transaction_service/history/pagination.py @@ -25,3 +25,20 @@ def __init__(self, request: HttpRequest): def set_count(self, value): self.count = value + + +class DummyPagination(LimitOffsetPagination): + """ + Class to easily get limit and offset from a request, not intended to be used + as a pagination class + """ + + def __init__(self, request: HttpRequest): + super().__init__() + self.request = request + self.limit = self.get_limit(request) + self.offset = self.get_offset(request) + self.count: int = 0 + + def set_count(self, value): + self.count = value diff --git a/safe_transaction_service/history/serializers.py b/safe_transaction_service/history/serializers.py index 7c88d237..895d5e8a 100644 --- a/safe_transaction_service/history/serializers.py +++ b/safe_transaction_service/history/serializers.py @@ -2,12 +2,15 @@ from enum import Enum from typing import Any, Dict, List, Optional +from django.http import Http404 + from drf_yasg.utils import swagger_serializer_method from eth_typing import ChecksumAddress, HexStr from rest_framework import serializers from rest_framework.exceptions import NotFound, ValidationError from gnosis.eth import EthereumClient, EthereumClientProvider +from gnosis.eth.constants import NULL_ADDRESS from gnosis.eth.django.models import EthereumAddressV2Field as EthereumAddressDbField from gnosis.eth.django.models import Keccak256Field as Keccak256DbField from gnosis.eth.django.serializers import ( @@ -16,19 +19,25 @@ Sha3HashField, ) from gnosis.safe import Safe -from gnosis.safe.safe_signature import SafeSignature, SafeSignatureType +from gnosis.safe.safe_signature import EthereumBytes, SafeSignature, SafeSignatureType from gnosis.safe.serializers import SafeMultisigTxSerializerV1 +from safe_transaction_service.account_abstraction import serializers as aa_serializers from safe_transaction_service.contracts.tx_decoder import ( TxDecoderException, get_db_tx_decoder, ) from safe_transaction_service.tokens.serializers import TokenInfoResponseSerializer -from safe_transaction_service.utils.serializers import get_safe_owners, get_safe_version +from safe_transaction_service.utils.serializers import get_safe_owners from .exceptions import NodeConnectionException -from .helpers import DelegateSignatureHelper +from .helpers import ( + DelegateSignatureHelper, + DelegateSignatureHelperV2, + DeleteMultisigTxSignatureHelper, +) from .models import ( + MAX_SIGNATURE_LENGTH, EthereumTx, ModuleTransaction, MultisigConfirmation, @@ -60,27 +69,46 @@ class GnosisBaseModelSerializer(serializers.ModelSerializer): # Request Serializers # ================================================ # class SafeMultisigConfirmationSerializer(serializers.Serializer): - signature = HexadecimalField(min_length=65) # Signatures must be at least 65 bytes + signature = HexadecimalField(min_length=65, max_length=MAX_SIGNATURE_LENGTH) def validate_signature(self, signature: bytes): safe_tx_hash = self.context["safe_tx_hash"] try: - multisig_transaction = MultisigTransaction.objects.select_related( - "ethereum_tx" - ).get(safe_tx_hash=safe_tx_hash) + multisig_transaction: MultisigTransaction = ( + MultisigTransaction.objects.select_related("ethereum_tx").get( + safe_tx_hash=safe_tx_hash + ) + ) except MultisigTransaction.DoesNotExist as exc: raise NotFound( f"Multisig transaction with safe-tx-hash={safe_tx_hash} was not found" ) from exc - safe_address = multisig_transaction.safe if multisig_transaction.executed: raise ValidationError( f"Transaction with safe-tx-hash={safe_tx_hash} was already executed" ) + safe_address = multisig_transaction.safe + ethereum_client = EthereumClientProvider() + safe = Safe(safe_address, ethereum_client) + safe_tx = safe.build_multisig_tx( + multisig_transaction.to, + multisig_transaction.value, + multisig_transaction.data, + multisig_transaction.operation, + multisig_transaction.safe_tx_gas, + multisig_transaction.base_gas, + multisig_transaction.gas_price, + multisig_transaction.gas_token, + multisig_transaction.refund_receiver, + safe_nonce=multisig_transaction.nonce, + ) + safe_owners = get_safe_owners(safe_address) - parsed_signatures = SafeSignature.parse_signature(signature, safe_tx_hash) + parsed_signatures = SafeSignature.parse_signature( + signature, safe_tx_hash, safe_tx.safe_tx_hash_preimage + ) signature_owners = [] ethereum_client = EthereumClientProvider() for safe_signature in parsed_signatures: @@ -124,6 +152,7 @@ def save(self, **kwargs): class SafeMultisigTransactionSerializer(SafeMultisigTxSerializerV1): + to = EthereumAddressField(allow_zero_address=True, allow_sentinel_address=True) contract_transaction_hash = Sha3HashField() sender = EthereumAddressField() # TODO Make signature mandatory @@ -149,9 +178,8 @@ def validate(self, attrs): ethereum_client = EthereumClientProvider() safe_address = attrs["safe"] - safe_version = get_safe_version(safe_address) - safe = Safe(safe_address, EthereumClientProvider()) + safe = Safe(safe_address, ethereum_client) safe_tx = safe.build_multisig_tx( attrs["to"], attrs["value"], @@ -163,14 +191,13 @@ def validate(self, attrs): attrs["gas_token"], attrs["refund_receiver"], safe_nonce=attrs["nonce"], - safe_version=safe_version, ) - contract_transaction_hash = safe_tx.safe_tx_hash + safe_tx_hash = safe_tx.safe_tx_hash # Check safe tx hash matches - if contract_transaction_hash != attrs["contract_transaction_hash"]: + if safe_tx_hash != attrs["contract_transaction_hash"]: raise ValidationError( - f"Contract-transaction-hash={contract_transaction_hash.hex()} " + f"Contract-transaction-hash={safe_tx_hash.hex()} " f'does not match provided contract-tx-hash={attrs["contract_transaction_hash"].hex()}' ) @@ -181,9 +208,9 @@ def validate(self, attrs): ).executed() if multisig_transactions: for multisig_transaction in multisig_transactions: - if multisig_transaction.safe_tx_hash == contract_transaction_hash.hex(): + if multisig_transaction.safe_tx_hash == safe_tx_hash.hex(): raise ValidationError( - f"Tx with safe-tx-hash={contract_transaction_hash.hex()} " + f"Tx with safe-tx-hash={safe_tx_hash.hex()} " f"for safe={safe_address} was already executed in " f"tx-hash={multisig_transaction.ethereum_tx_id}" ) @@ -210,7 +237,7 @@ def validate(self, attrs): # TODO Make signature mandatory signature = attrs.get("signature", b"") parsed_signatures = SafeSignature.parse_signature( - signature, contract_transaction_hash + signature, safe_tx_hash, safe_hash_preimage=safe_tx.safe_tx_hash_preimage ) attrs["parsed_signatures"] = parsed_signatures # If there's at least one signature, transaction is trusted (until signatures are mandatory) @@ -258,15 +285,28 @@ def save(self, **kwargs): ): trusted = user.has_perm("history.create_trusted") + if self.validated_data["sender"] in self.validated_data["safe_owners"]: + proposer = self.validated_data["sender"] + else: + proposer = ( + SafeContractDelegate.objects.get_for_safe_and_delegate( + self.validated_data["safe"], + self.validated_data["safe_owners"], + self.validated_data["sender"], + ) + .first() + .delegator + ) + multisig_transaction, created = MultisigTransaction.objects.get_or_create( safe_tx_hash=safe_tx_hash, defaults={ "safe": self.validated_data["safe"], "to": self.validated_data["to"], "value": self.validated_data["value"], - "data": self.validated_data["data"] - if self.validated_data["data"] - else None, + "data": ( + self.validated_data["data"] if self.validated_data["data"] else None + ), "operation": self.validated_data["operation"], "safe_tx_gas": self.validated_data["safe_tx_gas"], "base_gas": self.validated_data["base_gas"], @@ -276,6 +316,7 @@ def save(self, **kwargs): "nonce": self.validated_data["nonce"], "origin": origin, "trusted": trusted, + "proposer": proposer, }, ) @@ -326,91 +367,88 @@ def save(self, **kwargs): ) from exc -class DelegateSignatureCheckerMixin: +class SafeDelegateResponseSerializer(serializers.Serializer): + safe = EthereumAddressField(source="safe_contract_id") + delegate = EthereumAddressField() + delegator = EthereumAddressField() + label = serializers.CharField(max_length=50) + + +class DelegateSerializerMixin: """ - Mixin to include delegate signature validation + Mixin to validate delegate operations data """ - def check_delegate_signature( - self, - ethereum_client: EthereumClient, - signature: bytes, - operation_hash: bytes, - delegator: ChecksumAddress, - ) -> bool: - """ - Checks signature and returns a valid owner if found, None otherwise + def validate_safe_address_and_delegator( + self, safe_address: ChecksumAddress, delegator: ChecksumAddress + ) -> None: + if ( + safe_address + and not SafeContract.objects.filter(address=safe_address).exists() + ): + raise ValidationError( + f"Safe={safe_address} does not exist or it's still not indexed" + ) - :param ethereum_client: - :param signature: - :param operation_hash: - :param delegator: - :return: `True` if signature is valid for the delegator, `False` otherwise - """ - safe_signatures = SafeSignature.parse_signature(signature, operation_hash) - if not safe_signatures: - raise ValidationError("Signature is not valid") + if safe_address: + # Valid delegators must be owners + valid_delegators = get_safe_owners(safe_address) + if delegator not in valid_delegators: + raise ValidationError( + f"Provided delegator={delegator} is not an owner of Safe={safe_address}" + ) - if len(safe_signatures) > 1: - raise ValidationError( - "More than one signatures detected, just one is expected" + def validate_delegator_signature( + self, + delegate: ChecksumAddress, + signature: EthereumBytes, + signer: ChecksumAddress, + ) -> bool: + ethereum_client = EthereumClientProvider() + chain_id = ethereum_client.get_chain_id() + # Accept a message with the current topt and the previous totp (to prevent replay attacks) + for previous_totp in (True, False): + message_hash = DelegateSignatureHelperV2.calculate_hash( + delegate, chain_id, previous_totp=previous_totp ) + safe_signatures = SafeSignature.parse_signature(signature, message_hash) + if not safe_signatures: + raise ValidationError("Signature is not valid") - safe_signature = safe_signatures[0] - owner = safe_signature.owner - if owner == delegator: + if len(safe_signatures) > 1: + raise ValidationError( + "More than one signatures detected, just one is expected" + ) + safe_signature = safe_signatures[0] + owner = safe_signature.owner if not safe_signature.is_valid(ethereum_client, owner): raise ValidationError( f"Signature of type={safe_signature.signature_type.name} " - f"for delegator={delegator} is not valid" + f"for signer={signer} is not valid" ) - return True + if owner == signer: + return True return False -class DelegateSerializer(DelegateSignatureCheckerMixin, serializers.Serializer): +class DelegateSerializerV2(DelegateSerializerMixin, serializers.Serializer): safe = EthereumAddressField(allow_null=True, required=False, default=None) delegate = EthereumAddressField() delegator = EthereumAddressField() - signature = HexadecimalField(min_length=65) + signature = HexadecimalField(min_length=65, max_length=MAX_SIGNATURE_LENGTH) label = serializers.CharField(max_length=50) def validate(self, attrs): super().validate(attrs) - safe_address: Optional[ChecksumAddress] = attrs.get("safe") - if ( - safe_address - and not SafeContract.objects.filter(address=safe_address).exists() - ): - raise ValidationError( - f"Safe={safe_address} does not exist or it's still not indexed" - ) - signature = attrs["signature"] - delegate = attrs["delegate"] # Delegate address to be added/removed - delegator = attrs[ - "delegator" - ] # Delegator giving permissions to delegate (signer) - - ethereum_client = EthereumClientProvider() - if safe_address: - # Valid delegators must be owners - valid_delegators = get_safe_owners(safe_address) - if delegator not in valid_delegators: - raise ValidationError( - f"Provided delegator={delegator} is not an owner of Safe={safe_address}" - ) - - # Tries to find a valid delegator using multiple strategies - for operation_hash in DelegateSignatureHelper.calculate_all_possible_hashes( - delegate + delegate = attrs["delegate"] + delegator = attrs["delegator"] + self.validate_safe_address_and_delegator(safe_address, delegator) + if self.validate_delegator_signature( + delegate=delegate, signature=signature, signer=delegator ): - if self.check_delegate_signature( - ethereum_client, signature, operation_hash, delegator - ): - return attrs - + return attrs raise ValidationError( f"Signature does not match provided delegator={delegator}" ) @@ -431,34 +469,79 @@ def save(self, **kwargs): return obj -class DelegateDeleteSerializer(DelegateSignatureCheckerMixin, serializers.Serializer): - delegate = EthereumAddressField() +class DelegateDeleteSerializerV2(DelegateSerializerMixin, serializers.Serializer): + safe = EthereumAddressField(allow_null=True, required=False, default=None) delegator = EthereumAddressField() - signature = HexadecimalField(min_length=65) + signature = HexadecimalField(min_length=65, max_length=MAX_SIGNATURE_LENGTH) def validate(self, attrs): super().validate(attrs) - + safe_address: Optional[ChecksumAddress] = attrs.get("safe") signature = attrs["signature"] - delegate = attrs["delegate"] # Delegate address to be added/removed - delegator = attrs["delegator"] # Delegator - - ethereum_client = EthereumClientProvider() - # Tries to find a valid delegator using multiple strategies - for operation_hash in DelegateSignatureHelper.calculate_all_possible_hashes( - delegate - ): - for signer in (delegate, delegator): - if self.check_delegate_signature( - ethereum_client, signature, operation_hash, signer - ): - return attrs + delegate = self.context["request"].parser_context["kwargs"]["delegate_address"] + delegator = attrs["delegator"] + self.validate_safe_address_and_delegator(safe_address, delegator) + if self.validate_delegator_signature( + delegate, signature, delegator + ) or self.validate_delegator_signature(delegate, signature, delegate): + return attrs raise ValidationError( f"Signature does not match provided delegate={delegate} or delegator={delegator}" ) +class SafeMultisigTransactionDeleteSerializer(serializers.Serializer): + safe_tx_hash = Sha3HashField() + signature = HexadecimalField(min_length=65, max_length=MAX_SIGNATURE_LENGTH) + + def validate(self, attrs): + super().validate(attrs) + safe_tx_hash = attrs["safe_tx_hash"] + signature = attrs["signature"] + + try: + multisig_tx = MultisigTransaction.objects.select_related("ethereum_tx").get( + safe_tx_hash=safe_tx_hash + ) + except MultisigTransaction.DoesNotExist: + raise Http404("Multisig transaction not found") + + if multisig_tx.executed: + raise ValidationError("Executed transactions cannot be deleted") + + proposer = multisig_tx.proposer + if not proposer or proposer == NULL_ADDRESS: + raise ValidationError("Old transactions without proposer cannot be deleted") + + ethereum_client = EthereumClientProvider() + chain_id = ethereum_client.get_chain_id() + safe_address = multisig_tx.safe + # Accept a message with the current topt and the previous totp (to prevent replay attacks) + for previous_totp in (True, False): + message_hash = DeleteMultisigTxSignatureHelper.calculate_hash( + safe_address, safe_tx_hash, chain_id, previous_totp=previous_totp + ) + safe_signatures = SafeSignature.parse_signature(signature, message_hash) + if len(safe_signatures) != 1: + raise ValidationError( + f"1 owner signature was expected, {len(safe_signatures)} received" + ) + safe_signature = safe_signatures[0] + # Currently almost all the transactions are proposed using EOAs. Adding support for EIP1271, for example, + # would require to use the EIP712 domain of the Safe and a blockchain check. For starting + # with this feature we will try to keep it simple and only support EOA signatures. + if safe_signature.signature_type not in ( + SafeSignatureType.EOA, + SafeSignatureType.ETH_SIGN, + ): + raise ValidationError("Only EOA and ETH_SIGN signatures are supported") + if safe_signature.owner == proposer: + return attrs + + raise ValidationError("Provided owner is not the proposer of the transaction") + + class DataDecoderSerializer(serializers.Serializer): data = HexadecimalField(allow_null=False, allow_blank=False, min_length=4) to = EthereumAddressField(allow_null=True, required=False) @@ -547,6 +630,7 @@ class SafeMultisigTransactionResponseSerializer(SafeMultisigTxSerializerV1): block_number = serializers.SerializerMethodField() transaction_hash = Sha3HashField(source="ethereum_tx_id") safe_tx_hash = Sha3HashField() + proposer = EthereumAddressField() executor = serializers.SerializerMethodField() value = serializers.CharField() is_executed = serializers.BooleanField(source="executed") @@ -648,14 +732,6 @@ class SafeBalanceResponseSerializer(serializers.Serializer): balance = serializers.CharField() -class SafeBalanceUsdResponseSerializer(SafeBalanceResponseSerializer): - eth_value = serializers.CharField() - timestamp = serializers.DateTimeField() - fiat_balance = serializers.CharField() - fiat_conversion = serializers.CharField() - fiat_code = serializers.CharField() - - class SafeCollectibleResponseSerializer(serializers.Serializer): address = serializers.CharField() token_name = serializers.CharField() @@ -673,13 +749,6 @@ class SafeMultisigTransactionEstimateResponseSerializer(serializers.Serializer): safe_tx_gas = serializers.CharField() -class SafeDelegateResponseSerializer(serializers.Serializer): - safe = EthereumAddressField(source="safe_contract_id") - delegate = EthereumAddressField() - delegator = EthereumAddressField() - label = serializers.CharField(max_length=50) - - class SafeCreationInfoResponseSerializer(serializers.Serializer): created = serializers.DateTimeField() creator = EthereumAddressField() @@ -688,6 +757,9 @@ class SafeCreationInfoResponseSerializer(serializers.Serializer): master_copy = EthereumAddressField(allow_null=True) setup_data = HexadecimalField(allow_null=True) data_decoded = serializers.SerializerMethodField() + user_operation = aa_serializers.UserOperationWithSafeOperationResponseSerializer( + allow_null=True + ) def get_data_decoded(self, obj: SafeCreationInfo) -> Dict[str, Any]: return get_data_decoded_from_data(obj.setup_data or b"") @@ -894,12 +966,13 @@ class AllTransactionsSchemaSerializer(serializers.Serializer): class SafeDelegateDeleteSerializer(serializers.Serializer): """ - Deprecated in favour of DelegateDeleteSerializer + .. deprecated:: 3.3.0 + Deprecated in favour of DelegateDeleteSerializer """ safe = EthereumAddressField() delegate = EthereumAddressField() - signature = HexadecimalField(min_length=65) + signature = HexadecimalField(min_length=65, max_length=MAX_SIGNATURE_LENGTH) def get_valid_delegators( self, @@ -988,3 +1061,158 @@ def validate(self, attrs): attrs["delegator"] = delegator return attrs + + +class DelegateSignatureCheckerMixin: + """ + Mixin to include delegate signature validation + .. deprecated:: 4.38.0 + Deprecated in favour of DelegateSerializerMixin + """ + + def check_delegate_signature( + self, + ethereum_client: EthereumClient, + signature: bytes, + operation_hash: bytes, + delegator: ChecksumAddress, + ) -> bool: + """ + Verifies signature to check if it matches the delegator + + :param ethereum_client: + :param signature: + :param operation_hash: + :param delegator: + :return: `True` if signature is valid for the delegator, `False` otherwise + """ + safe_signatures = SafeSignature.parse_signature(signature, operation_hash) + if not safe_signatures: + raise ValidationError("Signature is not valid") + + if len(safe_signatures) > 1: + raise ValidationError( + "More than one signatures detected, just one is expected" + ) + + safe_signature = safe_signatures[0] + owner = safe_signature.owner + if owner == delegator: + if not safe_signature.is_valid(ethereum_client, owner): + raise ValidationError( + f"Signature of type={safe_signature.signature_type.name} " + f"for delegator={delegator} is not valid" + ) + return True + return False + + +class DelegateSerializer(DelegateSignatureCheckerMixin, serializers.Serializer): + """ + .. deprecated:: 4.38.0 + Deprecated in favour of DelegateSerializerV2 + """ + + safe = EthereumAddressField(allow_null=True, required=False, default=None) + delegate = EthereumAddressField() + delegator = EthereumAddressField() + signature = HexadecimalField(min_length=65, max_length=MAX_SIGNATURE_LENGTH) + label = serializers.CharField(max_length=50) + + def validate(self, attrs): + super().validate(attrs) + + safe_address: Optional[ChecksumAddress] = attrs.get("safe") + if ( + safe_address + and not SafeContract.objects.filter(address=safe_address).exists() + ): + raise ValidationError( + f"Safe={safe_address} does not exist or it's still not indexed" + ) + + signature = attrs["signature"] + delegate = attrs["delegate"] # Delegate address to be added/removed + delegator = attrs[ + "delegator" + ] # Delegator giving permissions to delegate (signer) + + ethereum_client = EthereumClientProvider() + if safe_address: + # Valid delegators must be owners + valid_delegators = get_safe_owners(safe_address) + if delegator not in valid_delegators: + raise ValidationError( + f"Provided delegator={delegator} is not an owner of Safe={safe_address}" + ) + + # Tries to find a valid delegator using multiple strategies + for operation_hash in DelegateSignatureHelper.calculate_all_possible_hashes( + delegate + ): + if self.check_delegate_signature( + ethereum_client, signature, operation_hash, delegator + ): + return attrs + + raise ValidationError( + f"Signature does not match provided delegator={delegator}" + ) + + def save(self, **kwargs): + safe_address = self.validated_data["safe"] + delegate = self.validated_data["delegate"] + delegator = self.validated_data["delegator"] + label = self.validated_data["label"] + obj, _ = SafeContractDelegate.objects.update_or_create( + safe_contract_id=safe_address, + delegate=delegate, + delegator=delegator, + defaults={ + "label": label, + }, + ) + return obj + + +class DelegateDeleteSerializer(DelegateSignatureCheckerMixin, serializers.Serializer): + """ + .. deprecated:: 4.38.0 + Deprecated in favour of DelegateDeleteSerializerV2 + """ + + delegate = EthereumAddressField() + delegator = EthereumAddressField() + signature = HexadecimalField(min_length=65, max_length=MAX_SIGNATURE_LENGTH) + + def validate(self, attrs): + super().validate(attrs) + + signature = attrs["signature"] + delegate = attrs["delegate"] # Delegate address to be added/removed + delegator = attrs["delegator"] # Delegator + + ethereum_client = EthereumClientProvider() + # Tries to find a valid delegator using multiple strategies + for operation_hash in DelegateSignatureHelper.calculate_all_possible_hashes( + delegate + ): + for signer in (delegate, delegator): + if self.check_delegate_signature( + ethereum_client, signature, operation_hash, signer + ): + return attrs + + raise ValidationError( + f"Signature does not match provided delegate={delegate} or delegator={delegator}" + ) + + +class SafeDeploymentContractSerializer(serializers.Serializer): + contract_name = serializers.CharField() + address = EthereumAddressField(allow_null=True) + + +class SafeDeploymentSerializer(serializers.Serializer): + version = serializers.CharField(max_length=10) # Example 1.3.0 + contracts = SafeDeploymentContractSerializer(many=True) diff --git a/safe_transaction_service/history/services/balance_service.py b/safe_transaction_service/history/services/balance_service.py index 915c4046..798c4059 100644 --- a/safe_transaction_service/history/services/balance_service.py +++ b/safe_transaction_service/history/services/balance_service.py @@ -1,7 +1,6 @@ import logging import operator from dataclasses import dataclass -from datetime import datetime from typing import List, Optional, Sequence from django.conf import settings @@ -16,13 +15,7 @@ from gnosis.eth import EthereumClient, EthereumClientProvider from gnosis.eth.utils import fast_is_checksum_address -from safe_transaction_service.tokens.clients import CannotGetPrice from safe_transaction_service.tokens.models import Token -from safe_transaction_service.tokens.services.price_service import ( - FiatCode, - PriceService, - PriceServiceProvider, -) from safe_transaction_service.utils.redis import get_redis from safe_transaction_service.utils.utils import chunks @@ -72,21 +65,10 @@ def get_price_address(self) -> ChecksumAddress: return self.token_address -@dataclass -class BalanceWithFiat(Balance): - eth_value: float # Value in ether - timestamp: datetime # Calculated timestamp - fiat_balance: float - fiat_conversion: float - fiat_code: str = FiatCode.USD.name - - class BalanceServiceProvider: def __new__(cls): if not hasattr(cls, "instance"): - cls.instance = BalanceService( - EthereumClientProvider(), PriceServiceProvider(), get_redis() - ) + cls.instance = BalanceService(EthereumClientProvider(), get_redis()) return cls.instance @classmethod @@ -96,12 +78,9 @@ def del_singleton(cls): class BalanceService: - def __init__( - self, ethereum_client: EthereumClient, price_service: PriceService, redis: Redis - ): + def __init__(self, ethereum_client: EthereumClient, redis: Redis): self.ethereum_client = ethereum_client self.ethereum_network = self.ethereum_client.get_network() - self.price_service = price_service self.redis = redis self.cache_token_info = TTLCache( maxsize=4096, ttl=60 * 30 @@ -260,60 +239,3 @@ def get_token_info( "Cannot get erc20 token info for token-address=%s", token_address ) return None - - def get_usd_balances( - self, - safe_address: ChecksumAddress, - only_trusted: bool = False, - exclude_spam: bool = False, - ) -> List[BalanceWithFiat]: - """ - All this could be more optimal (e.g. batching requests), but as everything is cached - I think we should be alright - - :param safe_address: - :param only_trusted: If True, return balance only for trusted tokens - :param exclude_spam: If True, exclude spam tokens - :return: List of BalanceWithFiat - """ - # TODO Use price service get_token_cached_usd_values - balances: List[Balance] = self.get_balances( - safe_address, only_trusted, exclude_spam - ) - try: - eth_price = self.price_service.get_native_coin_usd_price() - except CannotGetPrice: - logger.warning("Cannot get network ether price", exc_info=True) - eth_price = 0 - balances_with_usd = [] - price_token_addresses = [balance.get_price_address() for balance in balances] - token_eth_values_with_timestamp = ( - self.price_service.get_token_cached_eth_values(price_token_addresses) - ) - for balance, token_eth_value_with_timestamp in zip( - balances, token_eth_values_with_timestamp - ): - token_eth_value = token_eth_value_with_timestamp.eth_value - token_address = balance.token_address - if not token_address: # Ether - fiat_conversion = eth_price - fiat_balance = fiat_conversion * (balance.balance / 10**18) - else: - fiat_conversion = eth_price * token_eth_value - balance_with_decimals = balance.balance / 10**balance.token.decimals - fiat_balance = fiat_conversion * balance_with_decimals - - balances_with_usd.append( - BalanceWithFiat( - balance.token_address, - balance.token, - balance.balance, - token_eth_value, - token_eth_value_with_timestamp.timestamp, - round(fiat_balance, 4), - round(fiat_conversion, 4), - FiatCode.USD.name, - ) - ) - - return balances_with_usd diff --git a/safe_transaction_service/history/services/collectibles_service.py b/safe_transaction_service/history/services/collectibles_service.py index cdfeae6c..e167d99a 100644 --- a/safe_transaction_service/history/services/collectibles_service.py +++ b/safe_transaction_service/history/services/collectibles_service.py @@ -18,6 +18,7 @@ from redis import Redis from gnosis.eth import EthereumClient, EthereumClientProvider +from gnosis.eth.clients import EnsClient from safe_transaction_service.tokens.constants import ( CRYPTO_KITTIES_CONTRACT_ADDRESSES, @@ -27,7 +28,6 @@ from safe_transaction_service.utils.redis import get_redis from safe_transaction_service.utils.utils import chunks -from ..clients import EnsClient from ..exceptions import NodeConnectionException from ..models import ERC721Transfer @@ -489,9 +489,9 @@ def _get_collectibles_with_metadata( # Creates a collectibles metadata keeping the initial order for collectible_metadata_cached_index in range(len(collectibles_with_metadata)): if collectibles_with_metadata[collectible_metadata_cached_index] is None: - collectibles_with_metadata[ - collectible_metadata_cached_index - ] = collectibles_with_metadata_not_cached.pop(0) + collectibles_with_metadata[collectible_metadata_cached_index] = ( + collectibles_with_metadata_not_cached.pop(0) + ) return collectibles_with_metadata, count @@ -607,9 +607,9 @@ def get_redis_key(address_with_token_id: Tuple[ChecksumAddress, int]) -> str: if blockchain_token_uris: pipe = self.redis.pipeline() redis_map_to_store = { - get_redis_key(address_with_token_id): token_uri - if token_uri is not None - else "" + get_redis_key(address_with_token_id): ( + token_uri if token_uri is not None else "" + ) for address_with_token_id, token_uri in blockchain_token_uris.items() } pipe.mset(redis_map_to_store) diff --git a/safe_transaction_service/history/services/index_service.py b/safe_transaction_service/history/services/index_service.py index 31d2119d..b0a53559 100644 --- a/safe_transaction_service/history/services/index_service.py +++ b/safe_transaction_service/history/services/index_service.py @@ -151,10 +151,14 @@ def is_service_synced(self) -> bool: :return: `True` if master copies and ERC20/721 are synced, `False` otherwise """ + try: + current_block_number = self.ethereum_client.current_block_number + except (IOError, ValueError): + # If there's an error connecting to the node or invalid response we consider the service as out of sync + return False + # Use number of reorg blocks to consider as not synced - reference_block_number = ( - self.ethereum_client.current_block_number - self.eth_reorg_blocks - ) + reference_block_number = current_block_number - self.eth_reorg_blocks synced: bool = True for safe_master_copy in SafeMasterCopy.objects.relevant().filter( tx_block_number__lt=reference_block_number @@ -192,7 +196,6 @@ def tx_create_or_update_from_tx_hash(self, tx_hash: str) -> "EthereumTx": def txs_create_or_update_from_tx_hashes( self, tx_hashes: Collection[Union[str, bytes]] ) -> List["EthereumTx"]: - logger.debug("Don't retrieve existing txs on DB. Find them first") # Search first in database ethereum_txs_dict = OrderedDict.fromkeys( @@ -409,7 +412,7 @@ def _reindex( else current_block_number ) for block_number in range( - from_block_number, stop_block_number, block_process_limit + from_block_number, stop_block_number + 1, block_process_limit ): elements = indexer.find_relevant_elements( addresses, diff --git a/safe_transaction_service/history/services/safe_service.py b/safe_transaction_service/history/services/safe_service.py index 1d469896..6ccffb49 100644 --- a/safe_transaction_service/history/services/safe_service.py +++ b/safe_transaction_service/history/services/safe_service.py @@ -7,11 +7,17 @@ from web3 import Web3 from gnosis.eth import EthereumClient, EthereumClientProvider -from gnosis.eth.contracts import get_cpk_factory_contract, get_proxy_factory_contract +from gnosis.eth.contracts import ( + get_cpk_factory_contract, + get_proxy_factory_V1_3_0_contract, + get_proxy_factory_V1_4_1_contract, +) from gnosis.safe import Safe from gnosis.safe.exceptions import CannotRetrieveSafeInfoException from gnosis.safe.safe import SafeInfo +from safe_transaction_service.account_abstraction import models as aa_models + from ..exceptions import NodeConnectionException from ..models import InternalTx, SafeLastStatus, SafeMasterCopy @@ -41,6 +47,7 @@ class SafeCreationInfo: master_copy: Optional[EthereumAddress] setup_data: Optional[bytes] transaction_hash: str + user_operation: Optional[aa_models.UserOperation] class SafeServiceProvider: @@ -77,7 +84,8 @@ def __init__( self.ethereum_client = ethereum_client self.ethereum_tracing_client = ethereum_tracing_client dummy_w3 = Web3() # Not needed, just used to decode contracts - self.proxy_factory_contract = get_proxy_factory_contract(dummy_w3) + self.proxy_factory_v1_4_1_contract = get_proxy_factory_V1_4_1_contract(dummy_w3) + self.proxy_factory_v1_3_0_contract = get_proxy_factory_V1_3_0_contract(dummy_w3) self.cpk_proxy_factory_contract = get_cpk_factory_contract(dummy_w3) def get_safe_creation_info(self, safe_address: str) -> Optional[SafeCreationInfo]: @@ -129,6 +137,16 @@ def get_safe_creation_info(self, safe_address: str) -> Optional[SafeCreationInfo except IOError as exc: raise NodeConnectionException from exc + user_operation = ( + aa_models.UserOperation.objects.filter( + ethereum_tx=creation_ethereum_tx, + sender=safe_address, + ) + .exclude(init_code=None) + .select_related("receipt", "safe_operation") + .prefetch_related("safe_operation__confirmations") + .first() + ) return SafeCreationInfo( created_time, creator, @@ -136,6 +154,7 @@ def get_safe_creation_info(self, safe_address: str) -> Optional[SafeCreationInfo master_copy, setup_data, creation_internal_tx.ethereum_tx_id, + user_operation, ) def get_safe_info(self, safe_address: ChecksumAddress) -> SafeInfo: @@ -194,23 +213,29 @@ def _decode_proxy_factory( if not data: return None try: - _, data_decoded = self.proxy_factory_contract.decode_function_input(data) - master_copy = ( - data_decoded.get("masterCopy") - or data_decoded.get("_mastercopy") - or data_decoded.get("_singleton") - or data_decoded.get("singleton") - ) - setup_data = data_decoded.get("data") or data_decoded.get("initializer") - if master_copy and setup_data is not None: - return master_copy, setup_data - - logger.error( - "Problem decoding proxy factory, data_decoded=%s", data_decoded + _, data_decoded = self.proxy_factory_v1_3_0_contract.decode_function_input( + data ) - return None except ValueError: - return None + try: + ( + _, + data_decoded, + ) = self.proxy_factory_v1_4_1_contract.decode_function_input(data) + except ValueError: + return None + master_copy = ( + data_decoded.get("masterCopy") + or data_decoded.get("_mastercopy") + or data_decoded.get("_singleton") + or data_decoded.get("singleton") + ) + setup_data = data_decoded.get("data") or data_decoded.get("initializer") + if master_copy and setup_data is not None: + return master_copy, setup_data + + logger.error("Problem decoding proxy factory, data_decoded=%s", data_decoded) + return None def _decode_cpk_proxy_factory( self, data: Union[bytes, str] diff --git a/safe_transaction_service/history/services/transaction_service.py b/safe_transaction_service/history/services/transaction_service.py index 7946c74f..354f49ae 100644 --- a/safe_transaction_service/history/services/transaction_service.py +++ b/safe_transaction_service/history/services/transaction_service.py @@ -4,6 +4,7 @@ from datetime import timedelta from typing import Any, Dict, List, Optional, Sequence, Tuple, Union +from django.conf import settings from django.db.models import Case, Exists, F, OuterRef, QuerySet, Subquery, Value, When from django.utils import timezone @@ -24,7 +25,6 @@ InternalTx, ModuleTransaction, MultisigTransaction, - SafeContract, TransferDict, ) from ..serializers import ( @@ -105,22 +105,26 @@ def store_txs_in_cache( pipe.expire(key, 60 * 60) # Expire in one hour pipe.execute() - # End of cache methods ---------------------------- + def get_all_txs_cache_hash_key(self, safe_address: ChecksumAddress) -> str: + """ + Retrieves a redis hash for the provided Safe address that group several fields together, so when something changes for that address everything in cache gets invalidated at once. + https://redis.io/docs/latest/develop/data-types/hashes/ - def get_count_relevant_txs_for_safe(self, safe_address: ChecksumAddress) -> int: + :param safe_address: + :return: cache hash key """ - This method searches multiple tables and count every tx or event for a Safe. - It will return the same or higher value if compared to counting ``get_all_tx_identifiers`` - as that method will group some transactions (for example, 3 ERC20 can be grouped in a ``MultisigTransaction``, - so it will be ``1`` element for ``get_all_tx_identifiers`` but ``4`` for this function. + return f"all-txs:{safe_address}" - This query should be pretty fast, and it's meant to be used for invalidating caches. + def del_all_txs_cache_hash_key(self, safe_address: ChecksumAddress) -> None: + """ + Deletes the hash for a specific Safe address, invalidating all-transactions cache related with Safe at once. :param safe_address: - :return: number of relevant txs for a Safe + :return: """ + self.redis.unlink(self.get_all_txs_cache_hash_key(safe_address)) - return SafeContract.objects.get_count_relevant_txs_for_safe(safe_address) + # End of cache methods ---------------------------- def get_all_tx_identifiers( self, @@ -244,7 +248,9 @@ def get_all_tx_identifiers( "ethereum_tx_id", "execution_date", "created", "block", "safe_nonce" ) .distinct() - .order_by("-execution_date") + .order_by("-execution_date")[ + : settings.TX_SERVICE_ALL_TXS_ENDPOINT_LIMIT_TRANSFERS + ] ) erc721_tx_ids = ( @@ -261,7 +267,9 @@ def get_all_tx_identifiers( "ethereum_tx_id", "execution_date", "created", "block", "safe_nonce" ) .distinct() - .order_by("-execution_date") + .order_by("-execution_date")[ + : settings.TX_SERVICE_ALL_TXS_ENDPOINT_LIMIT_TRANSFERS + ] ) # Get incoming ether txs not included on Multisig or Module txs @@ -283,7 +291,9 @@ def get_all_tx_identifiers( "ethereum_tx_id", "execution_date", "created", "block", "safe_nonce" ) .distinct() - .order_by("-execution_date") + .order_by("-execution_date")[ + : settings.TX_SERVICE_ALL_TXS_ENDPOINT_LIMIT_TRANSFERS + ] ) # Tricky, we merge SafeTx hashes with EthereumTx hashes diff --git a/safe_transaction_service/history/services/webhooks.py b/safe_transaction_service/history/services/webhooks.py index 49022168..173f862b 100644 --- a/safe_transaction_service/history/services/webhooks.py +++ b/safe_transaction_service/history/services/webhooks.py @@ -30,10 +30,12 @@ def build_webhook_payload( instance: Union[ TokenTransfer, InternalTx, MultisigConfirmation, MultisigTransaction ], + deleted: bool = False, ) -> List[Dict[str, Any]]: """ :param sender: Sender type :param instance: Sender instance + :param deleted: If the instance has been deleted :return: A list of webhooks generated from the instance provided """ payloads: List[Dict[str, Any]] = [] @@ -48,6 +50,14 @@ def build_webhook_payload( ).hex(), } ] + elif sender == MultisigTransaction and deleted: + payloads = [ + { + "address": instance.safe, + "type": WebHookType.DELETED_MULTISIG_TRANSACTION.name, + "safeTxHash": HexBytes(instance.safe_tx_hash).hex(), + } + ] elif sender == MultisigTransaction: payload = { "address": instance.safe, diff --git a/safe_transaction_service/history/signals.py b/safe_transaction_service/history/signals.py index 893912a2..39ce17a9 100644 --- a/safe_transaction_service/history/signals.py +++ b/safe_transaction_service/history/signals.py @@ -1,14 +1,16 @@ from logging import getLogger -from typing import Type, Union +from typing import List, Optional, Type, Union from django.db.models import Model -from django.db.models.signals import post_save +from django.db.models.signals import post_delete, post_save from django.dispatch import receiver from django.utils import timezone -from safe_transaction_service.events.tasks import send_event_to_queue_task +from eth_typing import ChecksumAddress + from safe_transaction_service.notifications.tasks import send_notification_task +from ..events.services.queue_service import get_queue_service from .models import ( ERC20Transfer, ERC721Transfer, @@ -22,6 +24,7 @@ SafeStatus, TokenTransfer, ) +from .services import TransactionServiceProvider from .services.webhooks import build_webhook_payload, is_relevant_notification from .tasks import send_webhook_task @@ -112,30 +115,57 @@ def safe_master_copy_clear_cache( SafeMasterCopy.objects.get_version_for_address.cache_clear() -@receiver( - post_save, - sender=ModuleTransaction, - dispatch_uid="module_transaction.process_webhook", -) -@receiver( - post_save, - sender=MultisigConfirmation, - dispatch_uid="multisig_confirmation.process_webhook", -) -@receiver( - post_save, - sender=MultisigTransaction, - dispatch_uid="multisig_transaction.process_webhook", -) -@receiver( - post_save, sender=ERC20Transfer, dispatch_uid="erc20_transfer.process_webhook" -) -@receiver( - post_save, sender=ERC721Transfer, dispatch_uid="erc721_transfer.process_webhook" -) -@receiver(post_save, sender=InternalTx, dispatch_uid="internal_tx.process_webhook") -@receiver(post_save, sender=SafeContract, dispatch_uid="safe_contract.process_webhook") -def process_webhook( +def get_safe_addresses_involved_from_db_instance( + instance: Union[ + TokenTransfer, + InternalTx, + MultisigConfirmation, + MultisigTransaction, + ] +) -> List[Optional[ChecksumAddress]]: + """ + Retrieves the Safe addresses involved in the provided database instance. + + :param instance: + :return: List of Safe addresses from the provided instance + """ + addresses = [] + if isinstance(instance, TokenTransfer): + addresses.append(instance.to) + addresses.append(instance._from) + return addresses + elif isinstance(instance, MultisigTransaction): + addresses.append(instance.safe) + return addresses + elif isinstance(instance, MultisigConfirmation) and instance.multisig_transaction: + addresses.append(instance.multisig_transaction.safe) + return addresses + elif isinstance(instance, InternalTx): + addresses.append(instance.to) + return addresses + + return addresses + + +def _clean_all_txs_cache( + instance: Union[ + TokenTransfer, + InternalTx, + MultisigConfirmation, + MultisigTransaction, + ] +) -> None: + """ + Remove the all-transactions cache related with instance modified + + :param instance: + """ + transaction_service = TransactionServiceProvider() + for address in get_safe_addresses_involved_from_db_instance(instance): + transaction_service.del_all_txs_cache_hash_key(address) + + +def _process_webhook( sender: Type[Model], instance: Union[ TokenTransfer, @@ -145,10 +175,18 @@ def process_webhook( SafeContract, ], created: bool, - **kwargs, -) -> None: + deleted: bool, +): + assert not ( + created and deleted + ), "An instance cannot be created and deleted at the same time" + + # Ignore SafeContract because it is not affecting all-transaction cache. + if sender != SafeContract: + _clean_all_txs_cache(instance) + logger.debug("Start building payloads for created=%s object=%s", created, instance) - payloads = build_webhook_payload(sender, instance) + payloads = build_webhook_payload(sender, instance, deleted=deleted) logger.debug( "End building payloads %s for created=%s object=%s", payloads, created, instance ) @@ -168,7 +206,8 @@ def process_webhook( countdown=5, priority=2, # Almost lowest priority ) - send_event_to_queue_task.delay(payload) + queue_service = get_queue_service() + queue_service.send_event(payload) else: logger.debug( "Notification will not be sent for created=%s object=%s", @@ -177,6 +216,55 @@ def process_webhook( ) +@receiver( + post_save, + sender=ModuleTransaction, + dispatch_uid="module_transaction.process_webhook", +) +@receiver( + post_save, + sender=MultisigConfirmation, + dispatch_uid="multisig_confirmation.process_webhook", +) +@receiver( + post_save, + sender=MultisigTransaction, + dispatch_uid="multisig_transaction.process_webhook", +) +@receiver( + post_save, sender=ERC20Transfer, dispatch_uid="erc20_transfer.process_webhook" +) +@receiver( + post_save, sender=ERC721Transfer, dispatch_uid="erc721_transfer.process_webhook" +) +@receiver(post_save, sender=InternalTx, dispatch_uid="internal_tx.process_webhook") +@receiver(post_save, sender=SafeContract, dispatch_uid="safe_contract.process_webhook") +def process_webhook( + sender: Type[Model], + instance: Union[ + TokenTransfer, + InternalTx, + MultisigConfirmation, + MultisigTransaction, + SafeContract, + ], + created: bool, + **kwargs, +) -> None: + return _process_webhook(sender, instance, created, False) + + +@receiver( + post_delete, + sender=MultisigTransaction, + dispatch_uid="multisig_transaction.process_delete_webhook", +) +def process_delete_webhook( + sender: Type[Model], instance: MultisigTransaction, *args, **kwargs +): + return _process_webhook(sender, instance, False, True) + + @receiver( post_save, sender=SafeLastStatus, diff --git a/safe_transaction_service/history/tasks.py b/safe_transaction_service/history/tasks.py index cd6f5aaa..d8e1ef9b 100644 --- a/safe_transaction_service/history/tasks.py +++ b/safe_transaction_service/history/tasks.py @@ -389,9 +389,7 @@ def process_decoded_internal_txs_for_safe_task( """ with contextlib.suppress(LockError): with only_one_running_task(self, lock_name_suffix=safe_address): - logger.info( - "Start processing decoded internal txs for safe %s", safe_address - ) + logger.info("[%s] Start processing decoded internal txs", safe_address) tx_processor: SafeTxProcessor = SafeTxProcessorProvider() index_service: IndexService = IndexServiceProvider() @@ -413,21 +411,21 @@ def process_decoded_internal_txs_for_safe_task( ).sorted_reverse_by_mined(): if safe_status.is_corrupted(): message = ( - f"Safe-address={safe_address} A problem was found in SafeStatus " + f"[{safe_address}] A problem was found in SafeStatus " f"with nonce={safe_status.nonce} " f"on internal-tx-id={safe_status.internal_tx_id} " f"tx-hash={safe_status.internal_tx.ethereum_tx_id} " ) logger.error(message) logger.info( - "Safe-address=%s Processing traces again", + "[%s] Processing traces again", safe_address, ) if reindex_master_copies and previous_safe_status: block_number = previous_safe_status.block_number to_block_number = safe_last_status.block_number logger.info( - "Safe-address=%s Last known not corrupted SafeStatus with nonce=%d on block=%d , " + "[%s] Last known not corrupted SafeStatus with nonce=%d on block=%d , " "reindexing until block=%d", safe_address, previous_safe_status.nonce, @@ -441,7 +439,7 @@ def process_decoded_internal_txs_for_safe_task( addresses=[safe_address], ) logger.info( - "Safe-address=%s Processing traces again after reindexing", + "[%s] Processing traces again after reindexing", safe_address, ) raise ValueError(message) @@ -452,9 +450,7 @@ def process_decoded_internal_txs_for_safe_task( # Check if a new decoded tx appeared before other already processed (due to a reindex) if InternalTxDecoded.objects.out_of_order_for_safe(safe_address): - logger.error( - "Found out of order transactions for Safe=%s", safe_address - ) + logger.error("[%s] Found out of order transactions", safe_address) tx_processor.clear_cache(safe_address) index_service.reprocess_addresses([safe_address]) @@ -474,12 +470,14 @@ def process_decoded_internal_txs_for_safe_task( ) ) - logger.info("Processed %d decoded transactions", number_processed) + logger.info( + "[%s] Processed %d decoded transactions", safe_address, number_processed + ) if number_processed: logger.info( - "%d decoded internal txs successfully processed for safe %s", - number_processed, + "[%s] %d decoded internal txs successfully processed", safe_address, + number_processed, ) return number_processed diff --git a/safe_transaction_service/history/tests/clients/test_ens_client.py b/safe_transaction_service/history/tests/clients/test_ens_client.py deleted file mode 100644 index f6b53913..00000000 --- a/safe_transaction_service/history/tests/clients/test_ens_client.py +++ /dev/null @@ -1,72 +0,0 @@ -from unittest import mock - -from django.test import TestCase - -from eth_utils import keccak -from requests import Session - -from gnosis.eth.ethereum_client import EthereumNetwork - -from ...clients import EnsClient - - -class TestEnsClient(TestCase): - def test_domain_hash_to_hex_str(self): - domain_hash_bytes = keccak(text="gnosis") - domain_hash_int = int.from_bytes(domain_hash_bytes, byteorder="big") - - result = EnsClient.domain_hash_to_hex_str(domain_hash_bytes) - self.assertEqual(result, EnsClient.domain_hash_to_hex_str(domain_hash_int)) - self.assertEqual(len(result), 66) - - self.assertEqual(len(EnsClient.domain_hash_to_hex_str(b"")), 66) - self.assertEqual(len(EnsClient.domain_hash_to_hex_str(None)), 66) - self.assertEqual(len(EnsClient.domain_hash_to_hex_str(2)), 66) - - def test_query_by_account(self): - ens_client = EnsClient(EthereumNetwork.GOERLI.value) # Mainnet - if not ens_client.is_available(): - self.skipTest("ENS Goerli Client is not available") - - self.assertEqual( - ens_client.query_by_account("0x0D28d3C544757B9DBb99AC33FcB774534D7C8a7D"), - { - "registrations": [ - { - "expiryDate": "2308985592", - "domain": { - "labelName": "safe-tx-service", - "labelhash": "0x4d9600e939c494d5af0e62d974199a3674381907b1a7469ff900d13ff74f04d1", - "name": "safe-tx-service.eth", - "isMigrated": True, - "parent": {"name": "eth"}, - }, - } - ] - }, - ) - - def test_query_by_domain_hash(self): - ens_client = EnsClient(EthereumNetwork.MAINNET.value) # Mainnet - if not ens_client.is_available(): - self.skipTest("ENS Mainnet Client is not available") - - # Query for gnosis domain - domain_hash = keccak(text="gnosis") - self.assertEqual("gnosis", ens_client.query_by_domain_hash(domain_hash)) - - domain_hash_2 = keccak( - text="notverycommon-domain-name-made-up-by-me-with-forbidden-word-ñ" - ) - self.assertIsNone(ens_client.query_by_domain_hash(domain_hash_2)) - - def test_is_available(self): - for ethereum_network in ( - EthereumNetwork.GOERLI, - EthereumNetwork.MAINNET, - ): - with self.subTest(ethereum_network=ethereum_network): - ens_client = EnsClient(ethereum_network) - self.assertTrue(ens_client.is_available()) - with mock.patch.object(Session, "get", side_effect=IOError()): - self.assertFalse(ens_client.is_available()) diff --git a/safe_transaction_service/history/tests/factories.py b/safe_transaction_service/history/tests/factories.py index a6dd4be6..bd37d7fa 100644 --- a/safe_transaction_service/history/tests/factories.py +++ b/safe_transaction_service/history/tests/factories.py @@ -7,9 +7,9 @@ from factory.django import DjangoModelFactory from factory.fuzzy import FuzzyInteger from hexbytes import HexBytes -from web3 import Web3 from gnosis.eth.constants import NULL_ADDRESS +from gnosis.eth.utils import fast_keccak_text from gnosis.safe.safe_signature import SafeSignatureType from ..models import ( @@ -53,8 +53,8 @@ class Meta: gas_limit = factory.fuzzy.FuzzyInteger(100000000, 200000000) gas_used = factory.fuzzy.FuzzyInteger(100000, 500000) timestamp = factory.LazyFunction(timezone.now) - block_hash = factory.Sequence(lambda n: Web3.keccak(text=f"block-{n}").hex()) - parent_hash = factory.Sequence(lambda n: Web3.keccak(text=f"block{n - 1}").hex()) + block_hash = factory.Sequence(lambda n: fast_keccak_text(f"block-{n}").hex()) + parent_hash = factory.Sequence(lambda n: fast_keccak_text(f"block{n - 1}").hex()) class EthereumTxFactory(DjangoModelFactory): @@ -63,7 +63,7 @@ class Meta: block = factory.SubFactory(EthereumBlockFactory) tx_hash = factory.Sequence( - lambda n: Web3.keccak(text=f"ethereum_tx_hash-{n}").hex() + lambda n: fast_keccak_text(f"ethereum_tx_hash-{n}").hex() ) _from = factory.LazyFunction(lambda: Account.create().address) gas = factory.fuzzy.FuzzyInteger(1000, 5000) @@ -226,7 +226,7 @@ class Meta: module = factory.LazyFunction(lambda: Account.create().address) to = factory.LazyFunction(lambda: Account.create().address) value = FuzzyInteger(low=0, high=10) - data = factory.Sequence(lambda n: Web3.keccak(text=f"module-tx-{n}")) + data = factory.Sequence(lambda n: fast_keccak_text(f"module-tx-{n}")) operation = FuzzyInteger(low=0, high=1) failed = False @@ -236,7 +236,7 @@ class Meta: model = MultisigTransaction safe_tx_hash = factory.Sequence( - lambda n: Web3.keccak(text=f"multisig-tx-{n}").hex() + lambda n: fast_keccak_text(f"multisig-tx-{n}").hex() ) safe = factory.LazyFunction(lambda: Account.create().address) ethereum_tx = factory.SubFactory(EthereumTxFactory) @@ -263,7 +263,7 @@ class Meta: ethereum_tx = factory.SubFactory(EthereumTxFactory) multisig_transaction = factory.SubFactory(MultisigTransactionFactory) multisig_transaction_hash = factory.Sequence( - lambda n: Web3.keccak(text=f"multisig-confirmation-tx-{n}").hex() + lambda n: fast_keccak_text(f"multisig-confirmation-tx-{n}").hex() ) owner = factory.LazyFunction(lambda: Account.create().address) signature = None diff --git a/safe_transaction_service/history/tests/mocks/deployments_mock.py b/safe_transaction_service/history/tests/mocks/deployments_mock.py new file mode 100644 index 00000000..1a9c1130 --- /dev/null +++ b/safe_transaction_service/history/tests/mocks/deployments_mock.py @@ -0,0 +1,149 @@ +mainnet_deployments_1_4_1_multisend = { + "contractName": "MultiSend", + "address": "0x38869bf66a61cF6bDB996A6aE40D5853Fd43B526", +} + +mainnet_deployments_1_4_1_safe = { + "contractName": "Safe", + "address": "0x41675C099F32341bf84BFc5382aF534df5C7461a", +} + +mainnet_deployments_1_4_1 = { + "version": "1.4.1", + "contracts": [ + { + "contractName": "CompatibilityFallbackHandler", + "address": "0xfd0732Dc9E303f09fCEf3a7388Ad10A83459Ec99", + }, + { + "contractName": "CreateCall", + "address": "0x9b35Af71d77eaf8d7e40252370304687390A1A52", + }, + mainnet_deployments_1_4_1_multisend, + { + "contractName": "MultiSendCallOnly", + "address": "0x9641d764fc13c8B624c04430C7356C1C7C8102e2", + }, + mainnet_deployments_1_4_1_safe, + { + "contractName": "SafeL2", + "address": "0x29fcB43b46531BcA003ddC8FCB67FFE91900C762", + }, + { + "contractName": "SafeProxyFactory", + "address": "0x4e1DCf7AD4e460CfD30791CCC4F9c8a4f820ec67", + }, + { + "contractName": "SignMessageLib", + "address": "0xd53cd0aB83D845Ac265BE939c57F53AD838012c9", + }, + { + "contractName": "SimulateTxAccessor", + "address": "0x3d4BA2E0884aa488718476ca2FB8Efc291A46199", + }, + ], +} + +mainnet_deployments_1_3_0 = { + "version": "1.3.0", + "contracts": [ + { + "contractName": "CompatibilityFallbackHandler", + "address": "0xf48f2B2d2a534e402487b3ee7C18c33Aec0Fe5e4", + }, + { + "contractName": "CreateCall", + "address": "0x7cbB62EaA69F79e6873cD1ecB2392971036cFAa4", + }, + { + "contractName": "GnosisSafe", + "address": "0xd9Db270c1B5E3Bd161E8c8503c55cEABeE709552", + }, + { + "contractName": "GnosisSafeL2", + "address": "0x3E5c63644E683549055b9Be8653de26E0B4CD36E", + }, + { + "contractName": "MultiSend", + "address": "0xA238CBeb142c10Ef7Ad8442C6D1f9E89e07e7761", + }, + { + "contractName": "MultiSendCallOnly", + "address": "0x40A2aCCbd92BCA938b02010E17A5b8929b49130D", + }, + { + "contractName": "GnosisSafeProxyFactory", + "address": "0xa6B71E26C5e0845f74c812102Ca7114b6a896AB2", + }, + { + "contractName": "SignMessageLib", + "address": "0xA65387F16B013cf2Af4605Ad8aA5ec25a2cbA3a2", + }, + { + "contractName": "SimulateTxAccessor", + "address": "0x59AD6735bCd8152B84860Cb256dD9e96b85F69Da", + }, + ], +} + +mainnet_deployments_1_2_0 = { + "version": "1.2.0", + "contracts": [ + { + "contractName": "GnosisSafe", + "address": "0x6851D6fDFAfD08c0295C392436245E5bc78B0185", + } + ], +} + +mainnet_deployments_1_1_1 = { + "version": "1.1.1", + "contracts": [ + { + "contractName": "CreateAndAddModules", + "address": "0xF61A721642B0c0C8b334bA3763BA1326F53798C0", + }, + { + "contractName": "CreateCall", + "address": "0x8538FcBccba7f5303d2C679Fa5d7A629A8c9bf4A", + }, + { + "contractName": "DefaultCallbackHandler", + "address": "0xd5D82B6aDDc9027B22dCA772Aa68D5d74cdBdF44", + }, + { + "contractName": "GnosisSafe", + "address": "0x34CfAC646f301356fAa8B21e94227e3583Fe3F5F", + }, + { + "contractName": "MultiSend", + "address": "0x8D29bE29923b68abfDD21e541b9374737B49cdAD", + }, + { + "contractName": "ProxyFactory", + "address": "0x76E2cFc1F5Fa8F6a5b3fC4c8F4788F0116861F9B", + }, + ], +} + +mainnet_deployments_1_0_0 = { + "version": "1.0.0", + "contracts": [ + { + "contractName": "GnosisSafe", + "address": "0xb6029EA3B2c51D09a50B53CA8012FeEB05bDa35A", + }, + { + "contractName": "ProxyFactory", + "address": "0x12302fE9c02ff50939BaAaaf415fc226C078613C", + }, + ], +} + +mainnet_deployments = [ + mainnet_deployments_1_0_0, + mainnet_deployments_1_1_1, + mainnet_deployments_1_2_0, + mainnet_deployments_1_3_0, + mainnet_deployments_1_4_1, +] diff --git a/safe_transaction_service/history/tests/mocks/mocks_internal_tx_indexer.py b/safe_transaction_service/history/tests/mocks/mocks_internal_tx_indexer.py index a3d5a7b0..159eb778 100644 --- a/safe_transaction_service/history/tests/mocks/mocks_internal_tx_indexer.py +++ b/safe_transaction_service/history/tests/mocks/mocks_internal_tx_indexer.py @@ -78,7 +78,7 @@ }, ] -# trace_blocks 6067026 on Rinkeby +# trace_blocks([6067026]) on Rinkeby trace_blocks_result = [ [ { diff --git a/safe_transaction_service/history/tests/mocks/mocks_safe_creation.py b/safe_transaction_service/history/tests/mocks/mocks_safe_creation.py new file mode 100644 index 00000000..50def6f5 --- /dev/null +++ b/safe_transaction_service/history/tests/mocks/mocks_safe_creation.py @@ -0,0 +1,130 @@ +create_test_data_v1_0_0 = { + "master_copy": "0xb6029EA3B2c51D09a50B53CA8012FeEB05bDa35A", + "setup_data": "0xa97ab18a00000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000016000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030000000000000000000000006e45d69a383ceca3d54688e833bd0e1388747e6b00000000000000000000000061a0c717d18232711bc788f19c9cd56a43cc88720000000000000000000000007724b234c9099c205f03b458944942bceba134080000000000000000000000000000000000000000000000000000000000000000", + "data": "0x61b69abd000000000000000000000000b6029ea3b2c51d09a50b53ca8012feeb05bda35a00000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000184a97ab18a00000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000016000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030000000000000000000000006e45d69a383ceca3d54688e833bd0e1388747e6b00000000000000000000000061a0c717d18232711bc788f19c9cd56a43cc88720000000000000000000000007724b234c9099c205f03b458944942bceba13408000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", +} # Example from Rinkeby +data_decoded_v1_0_0 = { + "method": "setup", + "parameters": [ + { + "name": "_owners", + "type": "address[]", + "value": [ + "0x6E45d69a383CECa3d54688e833Bd0e1388747e6B", + "0x61a0c717d18232711bC788F19C9Cd56a43cc8872", + "0x7724b234c9099C205F03b458944942bcEBA13408", + ], + }, + {"name": "_threshold", "type": "uint256", "value": "1"}, + { + "name": "to", + "type": "address", + "value": "0x0000000000000000000000000000000000000000", + }, + {"name": "data", "type": "bytes", "value": "0x"}, + { + "name": "paymentToken", + "type": "address", + "value": "0x0000000000000000000000000000000000000000", + }, + {"name": "payment", "type": "uint256", "value": "0"}, + { + "name": "paymentReceiver", + "type": "address", + "value": "0x0000000000000000000000000000000000000000", + }, + ], +} + +create_test_data_v1_1_1 = { + "master_copy": "0x34CfAC646f301356fAa8B21e94227e3583Fe3F5F", + "setup_data": "0xb63e800d0000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000180000000000000000000000000d5d82b6addc9027b22dca772aa68d5d74cdbdf440000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ac9b6dd409ff10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000300000000000000000000000085c26101f353f38e45c72d414b44972831f07be3000000000000000000000000235518798770d7336c5c4908dd1019457fea43a10000000000000000000000007f63c25665ea7e85500eaeb806e552e651b07b9d00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "data": "0x1688f0b900000000000000000000000034cfac646f301356faa8b21e94227e3583fe3f5f0000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000002cecc9e861200000000000000000000000000000000000000000000000000000000000001c4b63e800d0000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000180000000000000000000000000d5d82b6addc9027b22dca772aa68d5d74cdbdf440000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ac9b6dd409ff10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000300000000000000000000000085c26101f353f38e45c72d414b44972831f07be3000000000000000000000000235518798770d7336c5c4908dd1019457fea43a10000000000000000000000007f63c25665ea7e85500eaeb806e552e651b07b9d0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", +} +data_decoded_v1_1_1 = { + "method": "setup", + "parameters": [ + { + "name": "_owners", + "type": "address[]", + "value": [ + "0x85C26101f353f38E45c72d414b44972831f07BE3", + "0x235518798770D7336c5c4908dd1019457FEa43a1", + "0x7F63c25665EA7e85500eAEB806E552e651B07b9d", + ], + }, + {"name": "_threshold", "type": "uint256", "value": "1"}, + { + "name": "to", + "type": "address", + "value": "0x0000000000000000000000000000000000000000", + }, + {"name": "data", "type": "bytes", "value": "0x"}, + { + "name": "fallbackHandler", + "type": "address", + "value": "0xd5D82B6aDDc9027B22dCA772Aa68D5d74cdBdF44", + }, + { + "name": "paymentToken", + "type": "address", + "value": "0x0000000000000000000000000000000000000000", + }, + {"name": "payment", "type": "uint256", "value": "3036537000337393"}, + { + "name": "paymentReceiver", + "type": "address", + "value": "0x0000000000000000000000000000000000000000", + }, + ], +} + +create_cpk_test_data = { + "master_copy": "0x34CfAC646f301356fAa8B21e94227e3583Fe3F5F", + "setup_data": "0x5714713d000000000000000000000000ff54516a7bc1c1ea952a688e72d5b93a80620074", + "data": "0x460868ca00000000000000000000000034cfac646f301356faa8b21e94227e3583fe3f5fcfe33a586323e7325be6aa6ecd8b4600d232a9037e83c8ece69413b777dabe6500000000000000000000000040a930851bd2e590bd5a5c981b436de25742e9800000000000000000000000005ef44de4b98f2bce0e29c344e7b2fb8f0282a0cf000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000245714713d000000000000000000000000ff54516a7bc1c1ea952a688e72d5b93a8062007400000000000000000000000000000000000000000000000000000000", +} +data_decoded_cpk = None + +# Using `createProxyWithNonce` for v1.4.1, example taken from Goerli +create_v1_4_1_test_data = { + "master_copy": "0x29fcB43b46531BcA003ddC8FCB67FFE91900C762", + "setup_data": "0xb63e800d0000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000160000000000000000000000000fd0732dc9e303f09fcef3a7388ad10a83459ec990000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000c7d289db6238596b5a5dbe2f1df9d29c930f959c00000000000000000000000068bbf2084546ccba3cf2f604736e77b3b2a671600000000000000000000000000000000000000000000000000000000000000000", + "data": "0x1688f0b900000000000000000000000029fcb43b46531bca003ddc8fcb67ffe91900c76200000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000018a765221620000000000000000000000000000000000000000000000000000000000000184b63e800d0000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000160000000000000000000000000fd0732dc9e303f09fcef3a7388ad10a83459ec990000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000c7d289db6238596b5a5dbe2f1df9d29c930f959c00000000000000000000000068bbf2084546ccba3cf2f604736e77b3b2a67160000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", +} +data_decoded_v1_4_1 = { + "method": "setup", + "parameters": [ + { + "name": "_owners", + "type": "address[]", + "value": [ + "0xC7D289DB6238596B5A5DBE2f1dF9D29C930F959c", + "0x68bbF2084546ccBA3Cf2F604736e77b3b2a67160", + ], + }, + {"name": "_threshold", "type": "uint256", "value": "2"}, + { + "name": "to", + "type": "address", + "value": "0x0000000000000000000000000000000000000000", + }, + {"name": "data", "type": "bytes", "value": "0x"}, + { + "name": "fallbackHandler", + "type": "address", + "value": "0xfd0732Dc9E303f09fCEf3a7388Ad10A83459Ec99", + }, + { + "name": "paymentToken", + "type": "address", + "value": "0x0000000000000000000000000000000000000000", + }, + {"name": "payment", "type": "uint256", "value": "0"}, + { + "name": "paymentReceiver", + "type": "address", + "value": "0x0000000000000000000000000000000000000000", + }, + ], +} diff --git a/safe_transaction_service/history/tests/mocks/traces.py b/safe_transaction_service/history/tests/mocks/traces.py index 73012cb7..28e0db84 100644 --- a/safe_transaction_service/history/tests/mocks/traces.py +++ b/safe_transaction_service/history/tests/mocks/traces.py @@ -48,7 +48,7 @@ "type": "call", } -rinkeby_traces = [ +testnet_traces = [ { "action": { "from": "0x5aC255889882aCd3da2aA939679E3f3d4cea221e", @@ -138,7 +138,7 @@ "transactionPosition": 0, "type": "call", }, -] +] # Taken from Rinkeby module_traces = [ { diff --git a/safe_transaction_service/history/tests/test_balance_service.py b/safe_transaction_service/history/tests/test_balance_service.py index 7c8bd3ef..1c9922e2 100644 --- a/safe_transaction_service/history/tests/test_balance_service.py +++ b/safe_transaction_service/history/tests/test_balance_service.py @@ -1,23 +1,13 @@ -from typing import Optional -from unittest import mock -from unittest.mock import MagicMock - from django.test import TestCase -from django.utils import timezone from eth_account import Account -from eth_typing import ChecksumAddress from gnosis.eth.tests.ethereum_test_case import EthereumTestCaseMixin -from gnosis.eth.tests.utils import deploy_erc20 from safe_transaction_service.tokens.models import Token -from safe_transaction_service.tokens.services.price_service import PriceService from safe_transaction_service.tokens.tests.factories import TokenFactory from ..services import BalanceServiceProvider -from ..services.balance_service import BalanceWithFiat -from .factories import ERC20TransferFactory, SafeContractFactory class TestBalanceService(EthereumTestCaseMixin, TestCase): @@ -41,222 +31,6 @@ def test_get_token_info(self): self.assertEqual(token_info.symbol, token_db.symbol) self.assertEqual(token_info.decimals, token_db.decimals) - @mock.patch.object( - PriceService, "get_token_eth_value", return_value=0.4, autospec=True - ) - @mock.patch.object( - PriceService, "get_native_coin_usd_price", return_value=123.4, autospec=True - ) - @mock.patch.object(timezone, "now", return_value=timezone.now()) - def test_get_usd_balances( - self, - timezone_now_mock: MagicMock, - get_native_coin_usd_price_mock: MagicMock, - get_token_eth_value_mock: MagicMock, - ): - balance_service = self.balance_service - - safe_address = Account.create().address - SafeContractFactory(address=safe_address) - value = 7 - self.send_ether(safe_address, 7) - - balances = balance_service.get_usd_balances(safe_address) - self.assertEqual(len(balances), 1) - self.assertIsNone(balances[0].token_address) - self.assertEqual(balances[0].balance, value) - - tokens_value = int(12 * 1e18) - erc20 = deploy_erc20(self.w3, "Eurodollar", "EUD", safe_address, tokens_value) - balances = balance_service.get_usd_balances(safe_address) - self.assertEqual(len(balances), 1) - - ERC20TransferFactory(address=erc20.address, to=safe_address) - balances = balance_service.get_usd_balances(safe_address) - token_info = balance_service.get_token_info(erc20.address) - self.assertCountEqual( - balances, - [ - BalanceWithFiat( - None, None, value, 1.0, timezone_now_mock.return_value, 0.0, 123.4 - ), - BalanceWithFiat( - erc20.address, - token_info, - tokens_value, - 0.4, - timezone_now_mock.return_value, - round(123.4 * 0.4 * (tokens_value / 1e18), 4), - round(123.4 * 0.4, 4), - ), - ], - ) - - balances = balance_service.get_usd_balances(safe_address, only_trusted=True) - self.assertCountEqual( - balances, - [ - BalanceWithFiat( - None, None, value, 1.0, timezone_now_mock.return_value, 0.0, 123.4 - ), - ], - ) - - Token.objects.filter(address=erc20.address).update(trusted=True, spam=False) - balances = balance_service.get_usd_balances(safe_address, only_trusted=True) - self.assertCountEqual( - balances, - [ - BalanceWithFiat( - None, None, value, 1.0, timezone_now_mock.return_value, 0.0, 123.4 - ), - BalanceWithFiat( - erc20.address, - token_info, - tokens_value, - 0.4, - timezone_now_mock.return_value, - round(123.4 * 0.4 * (tokens_value / 1e18), 4), - round(123.4 * 0.4, 4), - ), - ], - ) - - # Test sorting - erc20_2 = deploy_erc20(self.w3, "Peseta", "PTA", safe_address, tokens_value) - token_info_2 = balance_service.get_token_info(erc20_2.address) - erc20_3 = deploy_erc20( - self.w3, "Double Dollars", "DD", safe_address, tokens_value - ) - token_info_3 = balance_service.get_token_info(erc20_3.address) - - ERC20TransferFactory(address=erc20_2.address, to=safe_address) - ERC20TransferFactory(address=erc20_3.address, to=safe_address) - for tokens_erc20_get_balances_batch in (1, 2000): - with self.subTest( - TOKENS_ERC20_GET_BALANCES_BATCH=tokens_erc20_get_balances_batch - ): - with self.settings( - TOKENS_ERC20_GET_BALANCES_BATCH=tokens_erc20_get_balances_batch - ): - balances = balance_service.get_usd_balances(safe_address) - token_info = balance_service.get_token_info(erc20.address) - self.assertCountEqual( - balances, - [ - BalanceWithFiat( - None, - None, - value, - 1.0, - timezone_now_mock.return_value, - 0.0, - 123.4, - ), - BalanceWithFiat( - erc20_3.address, - token_info_3, - tokens_value, - 0.4, - timezone_now_mock.return_value, - round(123.4 * 0.4 * (tokens_value / 1e18), 4), - round(123.4 * 0.4, 4), - ), - BalanceWithFiat( - erc20.address, - token_info, - tokens_value, - 0.4, - timezone_now_mock.return_value, - round(123.4 * 0.4 * (tokens_value / 1e18), 4), - round(123.4 * 0.4, 4), - ), - BalanceWithFiat( - erc20_2.address, - token_info_2, - tokens_value, - 0.4, - timezone_now_mock.return_value, - round(123.4 * 0.4 * (tokens_value / 1e18), 4), - round(123.4 * 0.4, 4), - ), - ], - ) - - @mock.patch.object( - PriceService, "get_token_eth_value", return_value=0.4, autospec=True - ) - @mock.patch.object( - PriceService, "get_native_coin_usd_price", return_value=123.4, autospec=True - ) - @mock.patch.object(timezone, "now", return_value=timezone.now()) - def test_get_usd_balances_copy_price( - self, - timezone_now_mock: MagicMock, - get_native_coin_usd_price_mock: MagicMock, - get_token_eth_value_mock: MagicMock, - ): - balance_service = self.balance_service - safe_address = SafeContractFactory().address - random_address = Account.create().address - - balances = balance_service.get_usd_balances(safe_address) - self.assertEqual(len(balances), 1) - self.assertIsNone(balances[0].token_address) - self.assertEqual(balances[0].balance, 0) - - tokens_value = int(12 * 1e18) - erc20 = deploy_erc20( - self.w3, "Galactic Credit Standard", "GCS", safe_address, tokens_value - ) - ERC20TransferFactory(address=erc20.address, to=safe_address) - - def get_token_eth_value( - self, token_address: ChecksumAddress - ) -> Optional[float]: - if token_address == erc20.address: - return 0.4 - elif token_address == random_address: - return 0.1 - - get_token_eth_value_mock.side_effect = get_token_eth_value - for expected_token_eth_value in (0.4, 0.1): - with self.subTest(expected_token_eth_value=expected_token_eth_value): - balances = balance_service.get_usd_balances(safe_address) - self.assertEqual(len(balances), 2) - self.assertCountEqual( - balances, - [ - BalanceWithFiat( - None, - None, - 0, - 1.0, - timezone_now_mock.return_value, - 0.0, - 123.4, - ), - BalanceWithFiat( - erc20.address, - balance_service.get_token_info(erc20.address), - tokens_value, - expected_token_eth_value, - timezone_now_mock.return_value, - round( - 123.4 - * expected_token_eth_value - * (tokens_value / 1e18), - 4, - ), - round(123.4 * expected_token_eth_value, 4), - ), - ], - ) - token = Token.objects.get(address=erc20.address) - token.copy_price = random_address - token.save(update_fields=["copy_price"]) - balance_service.cache_token_info.clear() - def test_filter_addresses(self): balance_service = self.balance_service db_not_trusted_addresses = [ diff --git a/safe_transaction_service/history/tests/test_commands.py b/safe_transaction_service/history/tests/test_commands.py index abd0832c..3931484c 100644 --- a/safe_transaction_service/history/tests/test_commands.py +++ b/safe_transaction_service/history/tests/test_commands.py @@ -8,21 +8,31 @@ from django.test import TestCase from django_celery_beat.models import PeriodicTask +from eth_account import Account +from gnosis.eth.account_abstraction import BundlerClient from gnosis.eth.ethereum_client import EthereumClient, EthereumNetwork +from gnosis.safe.tests.safe_test_case import SafeTestCaseMixin from ..indexers import Erc20EventsIndexer, InternalTxIndexer, SafeEventsIndexer -from ..models import IndexingStatus, ProxyFactory, SafeMasterCopy +from ..models import ( + IndexingStatus, + InternalTxDecoded, + ProxyFactory, + SafeLastStatus, + SafeMasterCopy, +) from ..services import IndexServiceProvider from ..tasks import logger as task_logger from .factories import ( MultisigTransactionFactory, SafeContractFactory, + SafeLastStatusFactory, SafeMasterCopyFactory, ) -class TestCommands(TestCase): +class TestCommands(SafeTestCaseMixin, TestCase): @mock.patch.object(EthereumClient, "get_network", autospec=True) def _test_setup_service( self, @@ -51,13 +61,13 @@ def _test_setup_service( self.assertGreater(PeriodicTask.objects.count(), 0) # Check last master copy was created - last_master_copy_address = "0x6851D6fDFAfD08c0295C392436245E5bc78B0185" + last_master_copy_address = "0x41675C099F32341bf84BFc5382aF534df5C7461a" last_master_copy = SafeMasterCopy.objects.get(address=last_master_copy_address) self.assertGreater(last_master_copy.initial_block_number, 0) self.assertGreater(last_master_copy.tx_block_number, 0) # Check last proxy factory was created - last_proxy_factory_address = "0x76E2cFc1F5Fa8F6a5b3fC4c8F4788F0116861F9B" + last_proxy_factory_address = "0x4e1DCf7AD4e460CfD30791CCC4F9c8a4f820ec67" last_proxy_factory = ProxyFactory.objects.get( address=last_proxy_factory_address ) @@ -359,10 +369,10 @@ def test_setup_service_mainnet(self): last_proxy_factory.tx_block_number, last_proxy_factory_initial_block ) - # At May 2023 we support 9 Master Copies, 2 L2 Master Copies and 5 Proxy Factories - self.assertEqual(SafeMasterCopy.objects.count(), 9) - self.assertEqual(SafeMasterCopy.objects.l2().count(), 2) - self.assertEqual(ProxyFactory.objects.count(), 5) + # At Nov 2023 we support 12 Master Copies, 3 L2 Master Copies and 6 Proxy Factories + self.assertEqual(SafeMasterCopy.objects.count(), 12) + self.assertEqual(SafeMasterCopy.objects.l2().count(), 3) + self.assertEqual(ProxyFactory.objects.count(), 6) def test_setup_service_mainnet_erc20_indexing_setup(self): # Test IndexingStatus ERC20 is not modified if higher than the oldest master copy @@ -381,14 +391,8 @@ def test_setup_service_mainnet_erc20_indexing_setup(self): first_safe_block_deployed + 20, ) - def test_setup_service_rinkeby(self): - self._test_setup_service(EthereumNetwork.RINKEBY) - - def test_setup_service_goerli(self): - self._test_setup_service(EthereumNetwork.GOERLI) - - def test_setup_service_kovan(self): - self._test_setup_service(EthereumNetwork.KOVAN) + def test_setup_service_sepolia(self): + self._test_setup_service(EthereumNetwork.SEPOLIA) @mock.patch.object(EthereumClient, "get_network", autospec=True) def test_setup_service_not_valid_network( @@ -418,10 +422,16 @@ def test_export_multisig_tx_data(self): call_command(command, arguments, stdout=buf) self.assertIn("Start exporting of 1", buf.getvalue()) + @mock.patch( + "safe_transaction_service.history.management.commands.check_chainid_matches.get_bundler_client", + return_value=None, + ) @mock.patch( "safe_transaction_service.history.management.commands.check_chainid_matches.get_chain_id" ) - def test_check_chainid_matches(self, get_chain_id_mock: MagicMock): + def test_check_chainid_matches( + self, get_chain_id_mock: MagicMock, get_bundler_client_mock: MagicMock + ): command = "check_chainid_matches" # Create ChainId model @@ -443,3 +453,101 @@ def test_check_chainid_matches(self, get_chain_id_mock: MagicMock): buf = StringIO() call_command(command, stdout=buf) self.assertIn("EthereumRPC chainId 1 looks good", buf.getvalue()) + + @mock.patch.object(BundlerClient, "get_chain_id", return_value=1234) + @mock.patch( + "safe_transaction_service.history.management.commands.check_chainid_matches.get_bundler_client", + return_value=BundlerClient(""), + ) + @mock.patch( + "safe_transaction_service.history.management.commands.check_chainid_matches.get_chain_id", + return_value=EthereumNetwork.MAINNET.value, + ) + def test_check_chainid_bundler_matches( + self, + get_chain_id_mock: MagicMock, + get_bundler_client_mock: MagicMock, + bundler_get_chain_id_mock: MagicMock, + ): + command = "check_chainid_matches" + with self.assertRaisesMessage( + CommandError, + "ERC4337 BundlerClient chainId 1234 does not match EthereumClient chainId 1", + ): + call_command(command) + + bundler_get_chain_id_mock.return_value = EthereumNetwork.MAINNET.value + buf = StringIO() + call_command(command, stdout=buf) + self.assertEqual( + "EthereumRPC chainId 1 looks good\nERC4337 BundlerClient chainId 1 looks good\n", + buf.getvalue(), + ) + + @mock.patch( + "safe_transaction_service.history.management.commands.check_index_problems.settings.ETH_L2_NETWORK", + return_value=True, + ) # Testing L2 chain as ganache haven't tracing methods + def test_check_index_problems(self, mock_eth_l2_network: MagicMock): + command = "check_index_problems" + buf = StringIO() + # Test empty with empty SafeContract model + call_command(command, stdout=buf) + self.assertIn("Database haven't any address to be checked", buf.getvalue()) + + # Should ignore Safe with nonce 0 + owner = Account.create() + safe = self.deploy_test_safe( + number_owners=1, + threshold=1, + owners=[owner.address], + initial_funding_wei=1000, + ) + SafeContractFactory(address=safe.address) + SafeLastStatusFactory(nonce=0, address=safe.address) + buf = StringIO() + call_command(command, stdout=buf) + self.assertIn("Database haven't any address to be checked", buf.getvalue()) + + # Should detect missing transactions + data = b"" + value = 122 + to = Account.create().address + multisig_tx = safe.build_multisig_tx(to, value, data) + multisig_tx.sign(owner.key) + tx_hash, _ = multisig_tx.execute(self.ethereum_test_account.key) + SafeLastStatus.objects.filter(address=safe.address).update(nonce=1) + self.assertEqual(InternalTxDecoded.objects.count(), 0) + buf = StringIO() + call_command(command, stdout=buf) + self.assertIn( + f"Safe={safe.address} is corrupted, has some old transactions missing", + buf.getvalue(), + ) + self.assertEqual(InternalTxDecoded.objects.count(), 1) + with self.assertRaises(SafeLastStatus.DoesNotExist): + SafeLastStatus.objects.get(address=safe.address) + + # Should works with batch_size option + SafeLastStatusFactory(nonce=1, address=safe.address) + buf = StringIO() + call_command(command, "--batch-size=1", stdout=buf) + self.assertIn( + f"Safe={safe.address} is corrupted, has some old transactions missing", + buf.getvalue(), + ) + self.assertEqual(InternalTxDecoded.objects.count(), 1) + with self.assertRaises(SafeLastStatus.DoesNotExist): + SafeLastStatus.objects.get(address=safe.address) + + # Should detect incorrect nonce + with mock.patch.object(SafeLastStatus, "is_corrupted", return_value=False): + SafeLastStatusFactory(nonce=2, address=safe.address) + buf = StringIO() + call_command(command, stdout=buf) + self.assertIn( + f"Safe={safe.address} stored nonce=2 is different from blockchain-nonce=1", + buf.getvalue(), + ) + with self.assertRaises(SafeLastStatus.DoesNotExist): + SafeLastStatus.objects.get(address=safe.address) diff --git a/safe_transaction_service/history/tests/test_helpers.py b/safe_transaction_service/history/tests/test_helpers.py new file mode 100644 index 00000000..a6950659 --- /dev/null +++ b/safe_transaction_service/history/tests/test_helpers.py @@ -0,0 +1,45 @@ +from unittest.mock import MagicMock + +from django.test import TestCase + +from hexbytes import HexBytes + +from gnosis.safe.tests.safe_test_case import SafeTestCaseMixin + +from safe_transaction_service.history.helpers import DelegateSignatureHelperV2 + + +class TestDelegateSignatureHelperV2(SafeTestCaseMixin, TestCase): + + def test_calculate_hash(self): + # Mock calculate_totp + DelegateSignatureHelperV2.calculate_totp = MagicMock( + side_effect=lambda previous: 123456 if not previous else 654321 + ) + + delegate_address = "0x1234567890123456789012345678901234567890" + chain_id = 1 + + # Hash calculated when totp previous is false + expected_hash_previous_totp_false = HexBytes( + "0xc095ec37d1798b39b8cf9306a3d6788f6118f46a0d18fcfac037c8306bdbf397" + ) + + result_hash = DelegateSignatureHelperV2.calculate_hash( + delegate_address, chain_id, False + ) + + DelegateSignatureHelperV2.calculate_totp.assert_called_once_with(previous=False) + self.assertEqual(result_hash, expected_hash_previous_totp_false) + + # Hash calculated when totp previous is true + expected_hash_previous_totp_true = HexBytes( + "0xbf910dbf371090157231e49e7530c44b5ecf6a24fd4322be85465c13dbcb1459" + ) + + result_hash = DelegateSignatureHelperV2.calculate_hash( + delegate_address, chain_id, True + ) + + DelegateSignatureHelperV2.calculate_totp.assert_called_with(previous=True) + self.assertEqual(result_hash, expected_hash_previous_totp_true) diff --git a/safe_transaction_service/history/tests/test_index_service.py b/safe_transaction_service/history/tests/test_index_service.py index fe98f9bb..90710f5b 100644 --- a/safe_transaction_service/history/tests/test_index_service.py +++ b/safe_transaction_service/history/tests/test_index_service.py @@ -4,10 +4,11 @@ from django.test import TestCase from eth_account import Account -from web3 import Web3 +from requests.exceptions import ConnectionError as RequestsConnectionError from gnosis.eth import EthereumClient from gnosis.eth.tests.ethereum_test_case import EthereumTestCaseMixin +from gnosis.eth.utils import fast_keccak_text from ..models import ( EthereumTx, @@ -87,7 +88,7 @@ def test_create_or_update_from_tx_hashes_existing(self): # Test block hash changes ethereum_tx = ethereum_txs[0] - ethereum_tx.block.block_hash = Web3.keccak(text="aloha") + ethereum_tx.block.block_hash = fast_keccak_text("aloha") ethereum_tx.block.save(update_fields=["block_hash"]) tx_hash = ethereum_tx.tx_hash @@ -121,6 +122,10 @@ def test_is_service_synced(self, current_block_number_mock: PropertyMock): ) self.assertTrue(self.index_service.is_service_synced()) + # Test connection error to the node + current_block_number_mock.side_effect = RequestsConnectionError + self.assertFalse(self.index_service.is_service_synced()) + def test_reprocess_addresses(self): index_service: IndexService = self.index_service self.assertIsNone(index_service.reprocess_addresses([])) diff --git a/safe_transaction_service/history/tests/test_migrations.py b/safe_transaction_service/history/tests/test_migrations.py index ee6a12b8..181e77a8 100644 --- a/safe_transaction_service/history/tests/test_migrations.py +++ b/safe_transaction_service/history/tests/test_migrations.py @@ -5,9 +5,10 @@ from django_test_migrations.migrator import Migrator from eth_account import Account -from web3 import Web3 +from hexbytes import HexBytes -from safe_transaction_service.history.tests.factories import MultisigTransactionFactory +from gnosis.eth.utils import fast_keccak, fast_keccak_text +from gnosis.safe.safe_signature import SafeSignatureApprovedHash class TestMigrations(TestCase): @@ -27,13 +28,13 @@ def build_ethereum_tx(self, ethereum_block_class, ethereum_tx_class): gas_limit=2, gas_used=2, timestamp=timezone.now(), - block_hash=Web3.keccak(b"34"), - parent_hash=Web3.keccak(b"12"), + block_hash=fast_keccak(b"34"), + parent_hash=fast_keccak(b"12"), ) return ethereum_tx_class.objects.create( block=ethereum_block, - tx_hash=Web3.keccak(b"tx-hash"), + tx_hash=fast_keccak(b"tx-hash"), gas=23000, gas_price=1, nonce=0, @@ -55,7 +56,7 @@ def test_migration_forward_0068(self): ] for origin in origins: MultisigTransactionOld.objects.create( - safe_tx_hash=Web3.keccak(text=f"multisig-tx-{origin}").hex(), + safe_tx_hash=fast_keccak_text(f"multisig-tx-{origin}").hex(), safe=Account.create().address, value=0, operation=0, @@ -74,21 +75,21 @@ def test_migration_forward_0068(self): ) # String should keep string - hash = Web3.keccak(text=f"multisig-tx-{origins[0]}").hex() + hash = fast_keccak_text(f"multisig-tx-{origins[0]}").hex() self.assertEqual(MultisigTransactionNew.objects.get(pk=hash).origin, origins[0]) # String json should be converted to json - hash = Web3.keccak(text=f"multisig-tx-{origins[1]}").hex() + hash = fast_keccak_text(f"multisig-tx-{origins[1]}").hex() self.assertEqual( MultisigTransactionNew.objects.get(pk=hash).origin, json.loads(origins[1]) ) # Empty string should be empty object - hash = Web3.keccak(text=f"multisig-tx-{origins[2]}").hex() + hash = fast_keccak_text(f"multisig-tx-{origins[2]}").hex() self.assertEqual(MultisigTransactionNew.objects.get(pk=hash).origin, {}) # None should be empty object - hash = Web3.keccak(text=f"multisig-tx-{origins[2]}").hex() + hash = fast_keccak_text(f"multisig-tx-{origins[2]}").hex() self.assertEqual(MultisigTransactionNew.objects.get(pk=hash).origin, {}) def test_migration_backward_0068(self): @@ -101,7 +102,7 @@ def test_migration_backward_0068(self): origins = ["{ TestString", {"url": "https://example.com", "name": "app"}, {}] for origin in origins: MultisigTransactionNew.objects.create( - safe_tx_hash=Web3.keccak(text=f"multisig-tx-{origin}").hex(), + safe_tx_hash=fast_keccak_text(f"multisig-tx-{origin}").hex(), safe=Account.create().address, value=0, operation=0, @@ -120,17 +121,17 @@ def test_migration_backward_0068(self): ) # String should keep string - hash = Web3.keccak(text=f"multisig-tx-{origins[0]}").hex() + hash = fast_keccak_text(f"multisig-tx-{origins[0]}").hex() self.assertEqual(MultisigTransactionOld.objects.get(pk=hash).origin, origins[0]) # Json should be converted to a string json - hash = Web3.keccak(text=f"multisig-tx-{origins[1]}").hex() + hash = fast_keccak_text(f"multisig-tx-{origins[1]}").hex() self.assertEqual( MultisigTransactionOld.objects.get(pk=hash).origin, json.dumps(origins[1]) ) # Empty object should be None - hash = Web3.keccak(text=f"multisig-tx-{origins[2]}").hex() + hash = fast_keccak_text(f"multisig-tx-{origins[2]}").hex() self.assertEqual(MultisigTransactionOld.objects.get(pk=hash).origin, None) def test_migration_forward_0069(self): @@ -252,23 +253,28 @@ def test_migration_forward_0073_safe_apps_links(self): new_state = self.migrator.apply_initial_migration( ("history", "0072_safecontract_banned_and_more"), ) - - # Factories can be used as there are no database definition changes - # Make sure there are no issues with empty `origin` or `origin` lacking `url` - MultisigTransactionFactory(origin={"not_url": "random"}) - - # Make sure other urls are not affected - MultisigTransactionFactory( - origin={"url": "https://app.zerion.io", "name": "Zerion"} - ) - - # This origin must be replaced - MultisigTransactionFactory( - origin={ + origins = [ + {"not_url": "random"}, + {"url": "https://app.zerion.io", "name": "Zerion"}, + { "url": "https://apps.gnosis-safe.io/tx-builder/", "name": "Transaction Builder", - } - ) + }, + ] + + MultisigTransaction = new_state.apps.get_model("history", "MultisigTransaction") + for origin in origins: + MultisigTransaction.objects.create( + safe_tx_hash=fast_keccak_text(f"multisig-tx-{origin}").hex(), + safe=Account.create().address, + value=0, + operation=0, + safe_tx_gas=0, + base_gas=0, + gas_price=0, + nonce=0, + origin=origin, + ) new_state = self.migrator.apply_tested_migration( ("history", "0073_safe_apps_links"), @@ -295,22 +301,28 @@ def test_migration_backward_0073_safe_apps_links(self): ("history", "0073_safe_apps_links"), ) - # Factories can be used as there are no database definition changes - # Make sure there are no issues with empty `origin` or `origin` lacking `url` - MultisigTransactionFactory(origin={"not_url": "random"}) - - # Make sure other urls are not affected - MultisigTransactionFactory( - origin={"url": "https://app.zerion.io", "name": "Zerion"} - ) - - # This origin must be replaced - MultisigTransactionFactory( - origin={ - "url": "https://apps-portal.safe.global/tx-builder/", + origins = [ + {"not_url": "random"}, + {"url": "https://app.zerion.io", "name": "Zerion"}, + { + "url": "https://apps.gnosis-safe.io/tx-builder/", "name": "Transaction Builder", - } - ) + }, + ] + + MultisigTransaction = new_state.apps.get_model("history", "MultisigTransaction") + for origin in origins: + MultisigTransaction.objects.create( + safe_tx_hash=fast_keccak_text(f"multisig-tx-{origin}").hex(), + safe=Account.create().address, + value=0, + operation=0, + safe_tx_gas=0, + base_gas=0, + gas_price=0, + nonce=0, + origin=origin, + ) new_state = self.migrator.apply_tested_migration( ("history", "0072_safecontract_banned_and_more"), @@ -328,3 +340,75 @@ def test_migration_backward_0073_safe_apps_links(self): }, ], ) + + def test_migration_forward_0080_alter_multisigconfirmation_signature(self): + old_state = self.migrator.apply_initial_migration( + ("history", "0079_alter_erc20transfer_unique_together_and_more"), + ) + + MultisigConfirmation = old_state.apps.get_model( + "history", "MultisigConfirmation" + ) + + owner = Account.create().address + safe_tx_hash = fast_keccak_text("tx-hash") + safe_signature = SafeSignatureApprovedHash.build_for_owner(owner, safe_tx_hash) + + MultisigConfirmation.objects.create( + multisig_transaction_hash=safe_tx_hash, + owner=owner, + signature=safe_signature.export_signature(), + signature_type=safe_signature.signature_type.value, + ) + self.assertEqual( + HexBytes(MultisigConfirmation.objects.get().signature), + safe_signature.export_signature(), + ) + + new_state = self.migrator.apply_tested_migration( + ("history", "0080_alter_multisigconfirmation_signature"), + ) + + MultisigConfirmation = new_state.apps.get_model( + "history", "MultisigConfirmation" + ) + self.assertEqual( + HexBytes(MultisigConfirmation.objects.get().signature), + safe_signature.export_signature(), + ) + + def test_migration_backward_0080_alter_multisigconfirmation_signature(self): + new_state = self.migrator.apply_initial_migration( + ("history", "0080_alter_multisigconfirmation_signature"), + ) + + MultisigConfirmation = new_state.apps.get_model( + "history", "MultisigConfirmation" + ) + + owner = Account.create().address + safe_tx_hash = fast_keccak_text("tx-hash") + safe_signature = SafeSignatureApprovedHash.build_for_owner(owner, safe_tx_hash) + + MultisigConfirmation.objects.create( + multisig_transaction_hash=safe_tx_hash, + owner=owner, + signature=safe_signature.export_signature(), + signature_type=safe_signature.signature_type.value, + ) + self.assertEqual( + HexBytes(MultisigConfirmation.objects.get().signature), + safe_signature.export_signature(), + ) + + old_state = self.migrator.apply_tested_migration( + ("history", "0079_alter_erc20transfer_unique_together_and_more"), + ) + + MultisigConfirmation = old_state.apps.get_model( + "history", "MultisigConfirmation" + ) + self.assertEqual( + HexBytes(MultisigConfirmation.objects.get().signature), + safe_signature.export_signature(), + ) diff --git a/safe_transaction_service/history/tests/test_models.py b/safe_transaction_service/history/tests/test_models.py index e815c19e..dac543d9 100644 --- a/safe_transaction_service/history/tests/test_models.py +++ b/safe_transaction_service/history/tests/test_models.py @@ -9,10 +9,11 @@ from django.utils import timezone from eth_account import Account -from web3 import Web3 +from gnosis.eth.utils import fast_keccak_text from gnosis.safe.safe_signature import SafeSignatureType +from safe_transaction_service.account_abstraction.tests.mocks import aa_tx_receipt_mock from safe_transaction_service.contracts.models import ContractQuerySet from safe_transaction_service.contracts.tests.factories import ContractFactory @@ -35,6 +36,7 @@ SafeStatus, WebHook, ) +from ..utils import clean_receipt_log from .factories import ( ERC20TransferFactory, ERC721TransferFactory, @@ -60,7 +62,7 @@ class TestModelSignals(TestCase): def test_bind_confirmations(self): - safe_tx_hash = Web3.keccak(text="prueba") + safe_tx_hash = fast_keccak_text("prueba") ethereum_tx = EthereumTxFactory() MultisigConfirmation.objects.create( ethereum_tx=ethereum_tx, @@ -87,7 +89,7 @@ def test_bind_confirmations(self): self.assertEqual(multisig_tx.confirmations.count(), 1) def test_bind_confirmations_reverse(self): - safe_tx_hash = Web3.keccak(text="prueba") + safe_tx_hash = fast_keccak_text("prueba") ethereum_tx = EthereumTxFactory() multisig_tx, _ = MultisigTransaction.objects.get_or_create( safe_tx_hash=safe_tx_hash, @@ -362,6 +364,21 @@ def test_create_from_tx_dict(self): ethereum_tx.transaction_index, tx_receipt["transactionIndex"] ) + def test_account_abstraction_tx_hashes(self): + self.assertEqual(len(EthereumTx.objects.account_abstraction_txs()), 0) + + # Insert random transaction + EthereumTxFactory() + self.assertEqual(len(EthereumTx.objects.account_abstraction_txs()), 0) + + # Insert a 4337 transaction + ethereum_tx = EthereumTxFactory( + logs=[clean_receipt_log(log) for log in aa_tx_receipt_mock["logs"]] + ) + ethereum_txs = EthereumTx.objects.account_abstraction_txs() + self.assertEqual(len(ethereum_txs), 1) + self.assertEqual(ethereum_txs[0], ethereum_tx) + class TestTokenTransfer(TestCase): def test_transfer_to_erc721(self): @@ -1000,11 +1017,11 @@ def test_safe_status_previous(self): self.assertEqual(safe_status_5.previous(), safe_status_2) -class TestSafeContract(TestCase): - def test_get_delegates_for_safe(self): +class TestSafeContractDelegate(TestCase): + def test_get_for_safe(self): random_safe = Account.create().address - self.assertEqual( - SafeContractDelegate.objects.get_delegates_for_safe(random_safe), set() + self.assertCountEqual( + SafeContractDelegate.objects.get_for_safe(random_safe, []), [] ) safe_contract_delegate = SafeContractDelegateFactory() @@ -1013,15 +1030,117 @@ def test_get_delegates_for_safe(self): ) safe_contract_delegate_another_safe = SafeContractDelegateFactory() safe_address = safe_contract_delegate.safe_contract.address + self.assertCountEqual( - SafeContractDelegate.objects.get_delegates_for_safe(safe_address), - [safe_contract_delegate.delegate, safe_contract_delegate_2.delegate], + SafeContractDelegate.objects.get_for_safe( + safe_address, + [safe_contract_delegate.delegator, safe_contract_delegate_2.delegator], + ), + [safe_contract_delegate, safe_contract_delegate_2], ) another_safe_address = safe_contract_delegate_another_safe.safe_contract.address + # Use a Safe with an owner not matching + self.assertCountEqual( + SafeContractDelegate.objects.get_for_safe( + another_safe_address, [safe_contract_delegate.delegator] + ), + [], + ) + self.assertCountEqual( + SafeContractDelegate.objects.get_for_safe( + another_safe_address, [safe_contract_delegate_another_safe.delegator] + ), + [safe_contract_delegate_another_safe], + ) + + # Create delegate without Safe + safe_contract_delegate_without_safe = SafeContractDelegateFactory( + safe_contract=None + ) + self.assertCountEqual( + SafeContractDelegate.objects.get_for_safe( + safe_address, + [ + safe_contract_delegate.delegator, + safe_contract_delegate_2.delegator, + safe_contract_delegate_without_safe.delegator, + ], + ), + [ + safe_contract_delegate, + safe_contract_delegate_2, + safe_contract_delegate_without_safe, + ], + ) + self.assertCountEqual( + SafeContractDelegate.objects.get_for_safe( + another_safe_address, + [ + safe_contract_delegate_another_safe.delegator, + safe_contract_delegate_without_safe.delegator, + ], + ), + [safe_contract_delegate_another_safe, safe_contract_delegate_without_safe], + ) + + def test_get_for_safe_and_delegate(self): + delegator = Account.create().address + delegate = Account.create().address + safe_address = Account.create().address + safe_address_2 = Account.create().address + + self.assertCountEqual( + SafeContractDelegate.objects.get_for_safe_and_delegate( + Account.create().address, [delegator], delegate + ), + [], + ) + + safe_contract_delegate = SafeContractDelegateFactory( + safe_contract__address=safe_address, delegator=delegator + ) + self.assertCountEqual( + SafeContractDelegate.objects.get_for_safe_and_delegate( + safe_address, [delegator], delegate + ), + [], + ) + + safe_contract_delegate_2 = SafeContractDelegateFactory( + safe_contract=safe_contract_delegate.safe_contract, + delegator=delegator, + delegate=delegate, + ) + self.assertCountEqual( + SafeContractDelegate.objects.get_for_safe_and_delegate( + safe_address, [delegator], delegate + ), + [safe_contract_delegate_2], + ) + + # Delegate should not be valid for another Safe + self.assertCountEqual( + SafeContractDelegate.objects.get_for_safe_and_delegate( + safe_address_2, [delegator], delegate + ), + [], + ) + + # If Safe is not set, delegate is valid for any Safe which delegator is an owner + safe_contract_delegate_2.safe_contract = None + safe_contract_delegate_2.save() + self.assertCountEqual( + SafeContractDelegate.objects.get_for_safe_and_delegate( + safe_address, [delegator], delegate + ), + [safe_contract_delegate_2], + ) self.assertCountEqual( - SafeContractDelegate.objects.get_delegates_for_safe(another_safe_address), - [safe_contract_delegate_another_safe.delegate], + SafeContractDelegate.objects.get_for_safe_and_delegate( + safe_address_2, [delegator], delegate + ), + [safe_contract_delegate_2], ) def test_get_delegates_for_safe_and_owners(self): @@ -1066,6 +1185,45 @@ def test_get_delegates_for_safe_and_owners(self): set(), ) + def test_remove_delegates_for_owner_in_safe(self): + safe_address = Account.create().address + owner = Account.create().address + self.assertCountEqual( + SafeContractDelegate.objects.get_for_safe(None, [owner]), [] + ) + + safe_contract_delegate = SafeContractDelegateFactory( + delegator=owner, safe_contract=None + ) + self.assertEqual( + SafeContractDelegate.objects.get_delegates_for_safe_and_owners( + Account.create().address, [owner] + ), + {safe_contract_delegate.delegate}, + ) + + safe_specific_delegate = SafeContractDelegateFactory( + delegator=owner, safe_contract__address=safe_address + ) + + self.assertEqual( + SafeContractDelegate.objects.get_delegates_for_safe_and_owners( + safe_address, [owner] + ), + {safe_contract_delegate.delegate, safe_specific_delegate.delegate}, + ) + + SafeContractDelegate.objects.remove_delegates_for_owner_in_safe( + safe_address, owner + ) + + self.assertEqual( + SafeContractDelegate.objects.get_delegates_for_safe_and_owners( + safe_address, [owner] + ), + {safe_contract_delegate.delegate}, + ) + class TestMultisigConfirmations(TestCase): def test_remove_unused_confirmations(self): @@ -1155,7 +1313,7 @@ def test_get_or_create_from_block(self): # Test block with different block-hash but same block number mock_block_2 = dict(mock_block) - mock_block_2["hash"] = Web3.keccak(text="another-hash") + mock_block_2["hash"] = fast_keccak_text("another-hash") self.assertNotEqual(mock_block["hash"], mock_block_2["hash"]) with self.assertRaises(IntegrityError): EthereumBlock.objects.get_or_create_from_block(mock_block_2) @@ -1380,17 +1538,17 @@ def test_not_indexed_metadata_contract_addresses(self): ) def test_with_confirmations_required(self): - # This should never be picked + # This should never be picked, Safe not matching SafeStatusFactory(nonce=0, threshold=4) - multisig_transaction = MultisigTransactionFactory() + multisig_transaction = MultisigTransactionFactory(nonce=0) self.assertIsNone( MultisigTransaction.objects.with_confirmations_required() .first() .confirmations_required ) - # SafeStatus not matching the EthereumTx + # SafeStatus not matching the nonce (looking for threshold in nonce=0) safe_status = SafeStatusFactory( address=multisig_transaction.safe, nonce=1, threshold=8 ) @@ -1400,8 +1558,8 @@ def test_with_confirmations_required(self): .confirmations_required ) - safe_status.internal_tx.ethereum_tx = multisig_transaction.ethereum_tx - safe_status.internal_tx.save(update_fields=["ethereum_tx"]) + safe_status.nonce = 0 + safe_status.save(update_fields=["nonce"]) self.assertEqual( MultisigTransaction.objects.with_confirmations_required() @@ -1410,8 +1568,8 @@ def test_with_confirmations_required(self): 8, ) - # It will not be picked, as EthereumTx is not matching - SafeStatusFactory(nonce=2, threshold=15) + # It will not be picked, as nonce is still matching the previous SafeStatus + SafeStatusFactory(address=multisig_transaction.safe, nonce=1, threshold=15) self.assertEqual( MultisigTransaction.objects.with_confirmations_required() .first() @@ -1419,7 +1577,16 @@ def test_with_confirmations_required(self): 8, ) - # As EthereumTx is empty, the latest safe status will be used if available + multisig_transaction.nonce = 1 + multisig_transaction.save(update_fields=["nonce"]) + self.assertEqual( + MultisigTransaction.objects.with_confirmations_required() + .first() + .confirmations_required, + 15, + ) + + # As EthereumTx is empty, the latest Safe Status will be used if available multisig_transaction.ethereum_tx = None multisig_transaction.save(update_fields=["ethereum_tx"]) self.assertIsNone( diff --git a/safe_transaction_service/history/tests/test_proxy_factory_indexer.py b/safe_transaction_service/history/tests/test_proxy_factory_indexer.py index e5edc9f7..07b97ab3 100644 --- a/safe_transaction_service/history/tests/test_proxy_factory_indexer.py +++ b/safe_transaction_service/history/tests/test_proxy_factory_indexer.py @@ -1,3 +1,4 @@ +from django.conf import settings from django.test import TestCase from gnosis.safe.tests.safe_test_case import SafeTestCaseMixin @@ -13,14 +14,21 @@ def test_proxy_factory_indexer(self): proxy_factory_indexer.confirmations = 0 self.assertEqual(proxy_factory_indexer.start(), (0, 0)) ProxyFactoryFactory(address=self.proxy_factory.address) - ethereum_tx_sent = self.proxy_factory.deploy_proxy_contract( - self.ethereum_test_account, self.safe_contract_address + # Run indexer once to avoid previous events from previous tests + proxy_factory_indexer.start() + safe_contracts_count = SafeContract.objects.count() + ethereum_tx_sent = self.proxy_factory.deploy_proxy_contract_with_nonce( + self.ethereum_test_account, self.safe_contract.address ) safe_contract_address = ethereum_tx_sent.contract_address self.w3.eth.wait_for_transaction_receipt(ethereum_tx_sent.tx_hash) + + blocks_to_reindex_again = settings.ETH_EVENTS_BLOCKS_TO_REINDEX_AGAIN + # We expect 1 event (Safe Creation) and `1 + blocks_to_reindex_again` blocks self.assertEqual( proxy_factory_indexer.start(), - (1, self.ethereum_client.current_block_number + 1), + (1, 1 + blocks_to_reindex_again), ) - self.assertEqual(SafeContract.objects.count(), 1) + # Test if only 1 Safe was created + self.assertEqual(SafeContract.objects.count(), 1 + safe_contracts_count) self.assertTrue(SafeContract.objects.get(address=safe_contract_address)) diff --git a/safe_transaction_service/history/tests/test_safe_events_indexer.py b/safe_transaction_service/history/tests/test_safe_events_indexer.py index 2526dcd0..8ab6f3db 100644 --- a/safe_transaction_service/history/tests/test_safe_events_indexer.py +++ b/safe_transaction_service/history/tests/test_safe_events_indexer.py @@ -1,12 +1,14 @@ from django.test import TestCase from eth_account import Account +from eth_typing import ChecksumAddress from hexbytes import HexBytes +from web3 import Web3 from web3.datastructures import AttributeDict from web3.types import LogReceipt from gnosis.eth.constants import NULL_ADDRESS, SENTINEL_ADDRESS -from gnosis.eth.contracts import get_safe_V1_3_0_contract +from gnosis.eth.contracts import get_safe_V1_3_0_contract, get_safe_V1_4_1_contract from gnosis.safe import Safe from gnosis.safe.tests.safe_test_case import SafeTestCaseMixin @@ -27,16 +29,33 @@ from .mocks.mocks_safe_events_indexer import safe_events_mock -class TestSafeEventsIndexer(SafeTestCaseMixin, TestCase): +class TestSafeEventsIndexerV1_4_1(SafeTestCaseMixin, TestCase): def setUp(self) -> None: self.safe_events_indexer = SafeEventsIndexer( self.ethereum_client, confirmations=0, blocks_to_reindex_again=0 ) - self.safe_tx_processor = SafeTxProcessor(self.ethereum_client, None) + self.safe_tx_processor = SafeTxProcessor(self.ethereum_client, None, None) def tearDown(self) -> None: SafeEventsIndexerProvider.del_singleton() + @property + def safe_contract_version(self) -> str: + return "1.4.1" + + @property + def safe_contract(self): + """ + :return: Last Safe Contract available + """ + return self.safe_contract_V1_4_1 + + def get_safe_contract(self, w3: Web3, address: ChecksumAddress): + """ + :return: Last Safe Contract available + """ + return get_safe_V1_4_1_contract(w3, address=address) + def test_safe_events_indexer_provider(self): safe_events_indexer = SafeEventsIndexerProvider() self.assertEqual(safe_events_indexer.confirmations, 0) @@ -47,54 +66,69 @@ def test_safe_events_indexer_provider(self): def test_invalid_event(self): """ - AddedOwner event broke indexer on BSC. Same signature, but different number of indexed attributes + Events with same name and types, but different indexed elements can break the indexer + We will test the expected: + + event ExecutionSuccess( + bytes32 txHash, + uint256 payment + ); + + With the made out: + event ExecutionSuccess( + bytes32 indexed txHash, + uint256 indexed payment + ); """ valid_event: LogReceipt = AttributeDict( { - "address": "0x384f55D8BD4046461433A56bb87fe4aA615C0cc8", - "blockHash": HexBytes( - "0x551a6e5ca972c453873898be696980d7ff65d27a6f80ddffab17591144c99e01" - ), - "blockNumber": 9205844, - "data": "0x000000000000000000000000a1350318b2907ee0f6c8918eddc778a0b633e774", - "logIndex": 0, - "removed": False, + "address": "0xE618d8147210d45ffCBd2E3b33DD44252a43fF76", "topics": [ HexBytes( - "0x9465fa0c962cc76958e6373a993326400c1c94f8be2fe3a952adfa7f60b2ea26" + "0x442e715f626346e8c54381002da614f62bee8d27386535b2521ec8540898556e" ) ], + "data": HexBytes( + "0x55e61223bfe56101c8243067945cf90da23f0e0a3409eac65dc6e8852833cf440000000000000000000000000000000000000000000000000000000000000000" + ), + "blockNumber": 9727973, "transactionHash": HexBytes( - "0x7e4b2bb0ac5129552908e9c8433ea1746f76616188e8c3597a6bdce88d0b474c" + "0x9afccb1cf5498ae564b5589bf4bbf0b29b486f52952d1270dd51702ed2e29ff9" ), - "transactionIndex": 0, - "transactionLogIndex": "0x0", - "type": "mined", + "transactionIndex": 50, + "blockHash": HexBytes( + "0x3b2a9816f9b4280dc0190f1aafb910c99efbbf836e1865ab068ecbf6c0402fa7" + ), + "logIndex": 129, + "removed": False, } ) dangling_event: LogReceipt = AttributeDict( { - "address": "0x1E44C806f1AfD4f420C10c8088f4e0388F066E7A", + "address": "0xE618d8147210d45ffCBd2E3b33DD44252a43fF76", "topics": [ HexBytes( - "0x9465fa0c962cc76958e6373a993326400c1c94f8be2fe3a952adfa7f60b2ea26" + "0x442e715f626346e8c54381002da614f62bee8d27386535b2521ec8540898556e" ), HexBytes( - "0x00000000000000000000000020212521370dd2dde0b0e3ac25b65eb3e859d303" + "0x55e61223bfe56101c8243067945cf90da23f0e0a3409eac65dc6e8852833cf44" + ), + HexBytes( + "0x0000000000000000000000000000000000000000000000000000000000000000" ), ], - "data": "0x", - "blockNumber": 10129293, + "data": HexBytes("0x"), + "blockNumber": 9727973, "transactionHash": HexBytes( - "0xc19ef099702fb9f7d7962925428683eff534e009210ef2cf23135f43962c192a" + "0x9afccb1cf5498ae564b5589bf4bbf0b29b486f52952d1270dd51702ed2e29ff9" ), - "transactionIndex": 89, + "transactionIndex": 50, "blockHash": HexBytes( - "0x6b41eac9177a1606e1a853adf3f3da018fcf476f7d217acb69b7d130bdfaf2c9" + "0x3b2a9816f9b4280dc0190f1aafb910c99efbbf836e1865ab068ecbf6c0402fa7" ), - "logIndex": 290, + "logIndex": 129, "removed": False, } ) @@ -112,23 +146,26 @@ def test_invalid_event(self): valid_event["topics"][0].hex(), self.safe_events_indexer.events_to_listen ) - # Dangling event cannot be decoded + # Dangling event cannot be decoded, but valid event is expected_event = AttributeDict( { "args": AttributeDict( - {"owner": "0xa1350318b2907ee0f6c8918edDC778A0b633e774"} + { + "txHash": b"U\xe6\x12#\xbf\xe5a\x01\xc8$0g\x94\\\xf9\r\xa2?\x0e\n4\t\xea\xc6]\xc6\xe8\x85(3\xcfD", + "payment": 0, + } ), - "event": "AddedOwner", - "logIndex": 0, - "transactionIndex": 0, + "event": "ExecutionSuccess", + "logIndex": 129, + "transactionIndex": 50, "transactionHash": HexBytes( - "0x7e4b2bb0ac5129552908e9c8433ea1746f76616188e8c3597a6bdce88d0b474c" + "0x9afccb1cf5498ae564b5589bf4bbf0b29b486f52952d1270dd51702ed2e29ff9" ), - "address": "0x384f55D8BD4046461433A56bb87fe4aA615C0cc8", + "address": "0xE618d8147210d45ffCBd2E3b33DD44252a43fF76", "blockHash": HexBytes( - "0x551a6e5ca972c453873898be696980d7ff65d27a6f80ddffab17591144c99e01" + "0x3b2a9816f9b4280dc0190f1aafb910c99efbbf836e1865ab068ecbf6c0402fa7" ), - "blockNumber": 9205844, + "blockNumber": 9727973, } ) self.assertEqual( @@ -162,18 +199,20 @@ def test_safe_events_indexer(self): address=self.safe_contract.address, initial_block_number=initial_block_number, tx_block_number=initial_block_number, - version="1.3.0", + version=self.safe_contract_version, l2=True, ) - ethereum_tx_sent = self.proxy_factory.deploy_proxy_contract( + ethereum_tx_sent = self.proxy_factory.deploy_proxy_contract_with_nonce( self.ethereum_test_account, self.safe_contract.address, initializer=initializer, ) safe_address = ethereum_tx_sent.contract_address safe = Safe(safe_address, self.ethereum_client) - safe_contract = get_safe_V1_3_0_contract(self.w3, safe_address) - self.assertEqual(safe_contract.functions.VERSION().call(), "1.3.0") + safe_contract = self.get_safe_contract(self.w3, safe_address) + self.assertEqual( + safe_contract.functions.VERSION().call(), self.safe_contract_version + ) self.assertEqual(InternalTx.objects.count(), 0) self.assertEqual(InternalTxDecoded.objects.count(), 0) @@ -520,8 +559,8 @@ def test_safe_events_indexer(self): ) self.assertEqual(MultisigConfirmation.objects.count(), 9) - # Set guard (nonce: 7) INVALIDATES SAFE, as no more transactions can be done --------------------------------- - guard_address = Account.create().address + # Set guard (nonce: 7) --------------------------------- + guard_address = self.deploy_example_guard() data = HexBytes( self.safe_contract.functions.setGuard(guard_address).build_transaction( {"gas": 1, "gasPrice": 1} @@ -532,7 +571,8 @@ def test_safe_events_indexer(self): multisig_tx.sign(owner_account_1.key) multisig_tx.execute(self.ethereum_test_account.key) # Process events: SafeMultiSigTransaction, ChangedGuard, ExecutionSuccess - self.assertEqual(self.safe_events_indexer.start(), (3, 1)) + # 2 blocks will be processed due to the guard deployment + self.assertEqual(self.safe_events_indexer.start(), (3, 2)) self.safe_tx_processor.process_decoded_transactions(txs_decoded_queryset.all()) # Add one SafeStatus increasing the nonce and another one changing the guard self.assertEqual(SafeStatus.objects.count(), 17) @@ -666,3 +706,22 @@ def test_auto_adjust_block_limit(self): with self.safe_events_indexer.auto_adjust_block_limit(100, 104): pass self.assertEqual(self.safe_events_indexer.block_process_limit, 5) + + +class TestSafeEventsIndexerV1_3_0(TestSafeEventsIndexerV1_4_1): + @property + def safe_contract_version(self) -> str: + return "1.3.0" + + @property + def safe_contract(self): + """ + :return: Last Safe Contract available + """ + return self.safe_contract_V1_3_0 + + def get_safe_contract(self, w3: Web3, address: ChecksumAddress): + """ + :return: Last Safe Contract available + """ + return get_safe_V1_3_0_contract(w3, address=address) diff --git a/safe_transaction_service/history/tests/test_signals.py b/safe_transaction_service/history/tests/test_signals.py index 2c90826f..b6bf704c 100644 --- a/safe_transaction_service/history/tests/test_signals.py +++ b/safe_transaction_service/history/tests/test_signals.py @@ -1,5 +1,6 @@ from datetime import timedelta from unittest import mock +from unittest.mock import MagicMock from django.db.models.signals import post_save from django.test import TestCase @@ -9,9 +10,9 @@ from gnosis.eth import EthereumNetwork from gnosis.safe.tests.safe_test_case import SafeTestCaseMixin -from safe_transaction_service.events.tasks import send_event_to_queue_task from safe_transaction_service.notifications.tasks import send_notification_task +from ...events.services.queue_service import QueueService from ...safe_messages.models import SafeMessage, SafeMessageConfirmation from ...safe_messages.tests.factories import ( SafeMessageConfirmationFactory, @@ -83,6 +84,14 @@ def test_build_webhook_payload(self): self.assertEqual(payload["type"], WebHookType.PENDING_MULTISIG_TRANSACTION.name) self.assertEqual(payload["chainId"], str(EthereumNetwork.GANACHE.value)) + payload = build_webhook_payload( + MultisigTransaction, + MultisigTransactionFactory(ethereum_tx=None), + deleted=True, + )[0] + self.assertEqual(payload["type"], WebHookType.DELETED_MULTISIG_TRANSACTION.name) + self.assertEqual(payload["chainId"], str(EthereumNetwork.GANACHE.value)) + safe_address = self.deploy_test_safe().address safe_message = SafeMessageFactory(safe=safe_address) payload = build_webhook_payload(SafeMessage, safe_message)[0] @@ -103,28 +112,28 @@ def test_build_webhook_payload(self): @factory.django.mute_signals(post_save) @mock.patch.object(send_webhook_task, "apply_async") @mock.patch.object(send_notification_task, "apply_async") - @mock.patch.object(send_event_to_queue_task, "delay") + @mock.patch.object(QueueService, "send_event") def test_process_webhook( self, - webhook_task_mock, - send_notification_task_mock, - send_event_to_queue_task_mock, + send_event_mock: MagicMock, + webhook_task_mock: MagicMock, + send_notification_task_mock: MagicMock, ): multisig_confirmation = MultisigConfirmationFactory() process_webhook(MultisigConfirmation, multisig_confirmation, True) webhook_task_mock.assert_called() send_notification_task_mock.assert_called() - send_event_to_queue_task_mock.assert_called() + send_event_mock.assert_called() # reset calls webhook_task_mock.reset_mock() send_notification_task_mock.reset_mock() - send_event_to_queue_task_mock.reset_mock() + send_event_mock.reset_mock() multisig_confirmation.created -= timedelta(minutes=75) process_webhook(MultisigConfirmation, multisig_confirmation, True) webhook_task_mock.assert_not_called() + send_event_mock.assert_not_called() send_notification_task_mock.assert_not_called() - send_event_to_queue_task_mock.assert_not_called() @factory.django.mute_signals(post_save) def test_is_relevant_notification_multisig_confirmation(self): @@ -166,3 +175,48 @@ def test_is_relevant_notification_multisig_transaction(self): self.assertFalse( is_relevant_notification(multisig_tx.__class__, multisig_tx, created=False) ) + + @mock.patch.object(send_webhook_task, "apply_async") + @mock.patch.object(QueueService, "send_event") + def test_signals_are_correctly_fired( + self, + send_event_mock: MagicMock, + webhook_task_mock: MagicMock, + ): + # Not trusted txs should not fire any event + MultisigTransactionFactory(trusted=False) + webhook_task_mock.assert_not_called() + send_event_mock.assert_not_called() + + # Trusted txs should fire an event + multisig_tx: MultisigTransaction = MultisigTransactionFactory(trusted=True) + pending_multisig_transaction_payload = { + "address": multisig_tx.safe, + "safeTxHash": multisig_tx.safe_tx_hash, + "type": WebHookType.EXECUTED_MULTISIG_TRANSACTION.name, + "failed": "false", + "txHash": multisig_tx.ethereum_tx_id, + "chainId": str(EthereumNetwork.GANACHE.value), + } + webhook_task_mock.assert_called_with( + args=(multisig_tx.safe, pending_multisig_transaction_payload), priority=2 + ) + send_event_mock.assert_called_with(pending_multisig_transaction_payload) + + # Deleting a tx should fire an event + webhook_task_mock.reset_mock() + send_event_mock.reset_mock() + safe_tx_hash = multisig_tx.safe_tx_hash + multisig_tx.delete() + + deleted_multisig_transaction_payload = { + "address": multisig_tx.safe, + "safeTxHash": safe_tx_hash, + "type": WebHookType.DELETED_MULTISIG_TRANSACTION.name, + "chainId": str(EthereumNetwork.GANACHE.value), + } + + webhook_task_mock.assert_called_with( + args=(multisig_tx.safe, deleted_multisig_transaction_payload), priority=2 + ) + send_event_mock.assert_called_with(deleted_multisig_transaction_payload) diff --git a/safe_transaction_service/history/tests/test_tasks.py b/safe_transaction_service/history/tests/test_tasks.py index 0b713285..f8cbfd59 100644 --- a/safe_transaction_service/history/tests/test_tasks.py +++ b/safe_transaction_service/history/tests/test_tasks.py @@ -218,16 +218,16 @@ def test_process_decoded_internal_txs_for_safe_task(self): addresses=[safe_address], ) self.assertIn( - f"Safe-address={safe_address} A problem was found in SafeStatus " + f"[{safe_address}] A problem was found in SafeStatus " f"with nonce=2 on internal-tx-id={safe_status_2.internal_tx_id}", cm.output[1], ) self.assertIn( - f"Safe-address={safe_address} Processing traces again", + f"[{safe_address}] Processing traces again", cm.output[2], ) self.assertIn( - f"Safe-address={safe_address} Last known not corrupted SafeStatus with nonce=0 on " + f"[{safe_address}] Last known not corrupted SafeStatus with nonce=0 on " f"block={safe_status_0.internal_tx.ethereum_tx.block_id} , " f"reindexing until block={safe_status_5.block_number}", cm.output[3], @@ -238,7 +238,7 @@ def test_process_decoded_internal_txs_for_safe_task(self): cm.output[4], ) self.assertIn( - f"Safe-address={safe_address} Processing traces again after reindexing", + f"[{safe_address}] Processing traces again after reindexing", cm.output[5], ) diff --git a/safe_transaction_service/history/tests/test_transaction_service.py b/safe_transaction_service/history/tests/test_transaction_service.py index 86744a72..7cb3ebd9 100644 --- a/safe_transaction_service/history/tests/test_transaction_service.py +++ b/safe_transaction_service/history/tests/test_transaction_service.py @@ -5,22 +5,15 @@ from eth_account import Account -from ..models import ( - EthereumTx, - EthereumTxCallType, - ModuleTransaction, - MultisigTransaction, -) +from ..models import EthereumTx, ModuleTransaction, MultisigTransaction from ..services.transaction_service import ( TransactionService, TransactionServiceProvider, ) from .factories import ( ERC20TransferFactory, - ERC721TransferFactory, InternalTxFactory, ModuleTransactionFactory, - MultisigConfirmationFactory, MultisigTransactionFactory, ) @@ -36,57 +29,6 @@ def tearDown(self): super().tearDown() self.transaction_service.redis.flushall() - def test_get_count_relevant_txs_for_safe(self): - transaction_service: TransactionService = self.transaction_service - safe_address = Account.create().address - - self.assertEqual( - transaction_service.get_count_relevant_txs_for_safe(safe_address), 0 - ) - - MultisigTransactionFactory(safe=safe_address) - self.assertEqual( - transaction_service.get_count_relevant_txs_for_safe(safe_address), 1 - ) - - multisig_transaction = MultisigTransactionFactory(safe=safe_address) - MultisigConfirmationFactory(multisig_transaction=multisig_transaction) - MultisigConfirmationFactory(multisig_transaction=multisig_transaction) - # Not related MultisigConfirmation should not show - MultisigConfirmationFactory() - ERC20TransferFactory(to=safe_address) - ERC20TransferFactory(_from=safe_address) - ERC721TransferFactory(to=safe_address) - ERC721TransferFactory(_from=safe_address) - ModuleTransactionFactory(safe=safe_address) - InternalTxFactory( - value=5, call_type=EthereumTxCallType.CALL.value, to=safe_address - ) - - self.assertEqual( - transaction_service.get_count_relevant_txs_for_safe(safe_address), 10 - ) - - # InternalTxs without value are not returned - InternalTxFactory( - value=0, call_type=EthereumTxCallType.CALL.value, to=safe_address - ) - - # InternalTxs without proper type are not returned - InternalTxFactory( - value=5, call_type=EthereumTxCallType.DELEGATE_CALL.value, to=safe_address - ) - - self.assertEqual( - transaction_service.get_count_relevant_txs_for_safe(safe_address), 10 - ) - - # A different Safe must be empty - safe_address_2 = Account.create().address - self.assertEqual( - transaction_service.get_count_relevant_txs_for_safe(safe_address_2), 0 - ) - def test_get_all_tx_identifiers(self): transaction_service: TransactionService = self.transaction_service safe_address = Account.create().address diff --git a/safe_transaction_service/history/tests/test_tx_processor.py b/safe_transaction_service/history/tests/test_tx_processor.py index 4a623549..5fd24998 100644 --- a/safe_transaction_service/history/tests/test_tx_processor.py +++ b/safe_transaction_service/history/tests/test_tx_processor.py @@ -5,9 +5,9 @@ from eth_account import Account from eth_utils import keccak -from web3 import Web3 from gnosis.eth.ethereum_client import TracingManager +from gnosis.eth.utils import fast_keccak_text from gnosis.safe.safe_signature import SafeSignatureType from gnosis.safe.tests.safe_test_case import SafeTestCaseMixin @@ -24,6 +24,7 @@ MultisigConfirmation, MultisigTransaction, SafeContract, + SafeContractDelegate, SafeLastStatus, SafeStatus, ) @@ -32,10 +33,11 @@ InternalTxDecodedFactory, MultisigConfirmationFactory, MultisigTransactionFactory, + SafeContractDelegateFactory, SafeLastStatusFactory, SafeMasterCopyFactory, ) -from .mocks.traces import call_trace, module_traces, rinkeby_traces +from .mocks.traces import call_trace, module_traces, testnet_traces logger = logging.getLogger(__name__) @@ -101,6 +103,16 @@ def test_tx_processor_with_factory(self): self.assertEqual(safe_status.nonce, 1) self.assertEqual(safe_status.threshold, threshold) + safe_contract_delegate = SafeContractDelegateFactory( + delegator=owner, safe_contract_id=safe_address + ) + self.assertEqual( + SafeContractDelegate.objects.get_delegates_for_safe_and_owners( + safe_address, [owner] + ), + {safe_contract_delegate.delegate}, + ) + another_owner = Account.create().address tx_processor.process_decoded_transactions( [ @@ -120,6 +132,12 @@ def test_tx_processor_with_factory(self): safe_last_status = SafeLastStatus.objects.get(address=safe_address) self.assertEqual(safe_status, SafeStatus.from_status_instance(safe_last_status)) self.assertEqual(safe_status.owners, [new_owner, another_owner]) + self.assertEqual( + SafeContractDelegate.objects.get_delegates_for_safe_and_owners( + safe_address, [owner] + ), + set(), + ) self.assertEqual(safe_status.nonce, 2) self.assertEqual(safe_status.threshold, threshold) @@ -143,6 +161,15 @@ def test_tx_processor_with_factory(self): ) self.assertEqual(SafeMessageConfirmation.objects.count(), 2) number_confirmations = MultisigConfirmation.objects.count() + safe_contract_delegate_another_owner = SafeContractDelegateFactory( + delegator=another_owner, safe_contract_id=safe_address + ) + self.assertEqual( + SafeContractDelegate.objects.get_delegates_for_safe_and_owners( + safe_address, [another_owner] + ), + {safe_contract_delegate_another_owner.delegate}, + ) tx_processor.process_decoded_transactions( [ InternalTxDecodedFactory( @@ -172,6 +199,12 @@ def test_tx_processor_with_factory(self): safe_last_status = SafeLastStatus.objects.get(address=safe_address) self.assertEqual(safe_status, SafeStatus.from_status_instance(safe_last_status)) self.assertEqual(safe_status.owners, [new_owner]) + self.assertEqual( + SafeContractDelegate.objects.get_delegates_for_safe_and_owners( + safe_address, [another_owner] + ), + set(), + ) self.assertEqual(safe_status.nonce, 3) self.assertEqual(safe_status.threshold, threshold) @@ -256,7 +289,7 @@ def test_tx_processor_with_factory(self): TracingManager, "trace_transaction", autospec=True, - return_value=rinkeby_traces, + return_value=testnet_traces, ): # call_trace has [] as a trace address and module txs need to get the grandfather tx, so [0,0] must # be used @@ -326,7 +359,7 @@ def test_tx_processor_with_factory(self): SafeSignatureType.APPROVED_HASH.value, ) - def test_tx_processor_failed(self): + def test_tx_processor_is_failed(self): tx_processor = self.tx_processor # Event for Safes < 1.1.1 logs = [ @@ -340,7 +373,7 @@ def test_tx_processor_failed(self): ethereum_tx = EthereumTxFactory(logs=logs) self.assertTrue(tx_processor.is_failed(ethereum_tx, logs[0]["data"])) self.assertFalse( - tx_processor.is_failed(ethereum_tx, Web3.keccak(text="hola").hex()) + tx_processor.is_failed(ethereum_tx, fast_keccak_text("hola").hex()) ) # Event for Safes >= 1.1.1 @@ -359,7 +392,26 @@ def test_tx_processor_failed(self): ethereum_tx = EthereumTxFactory(logs=logs) self.assertTrue(tx_processor.is_failed(ethereum_tx, safe_tx_hash)) self.assertFalse( - tx_processor.is_failed(ethereum_tx, Web3.keccak(text="hola").hex()) + tx_processor.is_failed(ethereum_tx, fast_keccak_text("hola").hex()) + ) + + # Event for Safes >= 1.4.1 + safe_tx_hash = ( + "0x4c15b21b9c3b57aebba3c274bf0a437950bd0eea46bc7a7b2df892f91f720311" + ) + logs = [ + { + "data": "0000000000000000000000000000000000000000000000000000000000000000", + "topics": [ + "0x23428b18acfb3ea64b08dc0c1d296ea9c09702c09083ca5272e64d115b687d23", + "0x4c15b21b9c3b57aebba3c274bf0a437950bd0eea46bc7a7b2df892f91f720311", + ], + } + ] + ethereum_tx = EthereumTxFactory(logs=logs) + self.assertTrue(tx_processor.is_failed(ethereum_tx, safe_tx_hash)) + self.assertFalse( + tx_processor.is_failed(ethereum_tx, fast_keccak_text("hola").hex()) ) def test_tx_is_version_breaking_signatures(self): diff --git a/safe_transaction_service/history/tests/test_views.py b/safe_transaction_service/history/tests/test_views.py index ea19d08a..9ad25526 100644 --- a/safe_transaction_service/history/tests/test_views.py +++ b/safe_transaction_service/history/tests/test_views.py @@ -2,7 +2,6 @@ import json import logging import pickle -from dataclasses import asdict from unittest import mock from unittest.mock import MagicMock, PropertyMock @@ -11,31 +10,33 @@ from django.urls import reverse from django.utils import timezone +import eth_abi from eth_account import Account from factory.fuzzy import FuzzyText from hexbytes import HexBytes from requests import ReadTimeout from rest_framework import status +from rest_framework.exceptions import ErrorDetail from rest_framework.test import APIRequestFactory, APITestCase, force_authenticate -from web3 import Web3 from gnosis.eth.constants import NULL_ADDRESS from gnosis.eth.ethereum_client import EthereumClient, TracingManager -from gnosis.eth.utils import fast_is_checksum_address -from gnosis.safe import CannotEstimateGas, Safe, SafeOperation +from gnosis.eth.utils import fast_is_checksum_address, fast_keccak_text +from gnosis.safe import CannotEstimateGas, Safe, SafeOperationEnum from gnosis.safe.safe_signature import SafeSignature, SafeSignatureType from gnosis.safe.signatures import signature_to_bytes from gnosis.safe.tests.safe_test_case import SafeTestCaseMixin +from safe_transaction_service.account_abstraction.tests import factories as aa_factories from safe_transaction_service.contracts.models import ContractQuerySet from safe_transaction_service.contracts.tests.factories import ContractFactory from safe_transaction_service.contracts.tx_decoder import DbTxDecoder from safe_transaction_service.tokens.models import Token -from safe_transaction_service.tokens.services.price_service import PriceService from safe_transaction_service.tokens.tests.factories import TokenFactory +from safe_transaction_service.utils.utils import datetime_to_str from ...utils.redis import get_redis -from ..helpers import DelegateSignatureHelper +from ..helpers import DelegateSignatureHelper, DeleteMultisigTxSignatureHelper from ..models import ( IndexingStatus, MultisigConfirmation, @@ -44,8 +45,7 @@ SafeMasterCopy, ) from ..serializers import TransferType -from ..services import BalanceService -from ..services.balance_service import Erc20InfoWithLogo +from ..services import TransactionServiceProvider from ..views import SafeMultisigTransactionListView from .factories import ( ERC20TransferFactory, @@ -62,12 +62,34 @@ SafeMasterCopyFactory, SafeStatusFactory, ) +from .mocks.deployments_mock import ( + mainnet_deployments, + mainnet_deployments_1_4_1, + mainnet_deployments_1_4_1_multisend, + mainnet_deployments_1_4_1_safe, +) +from .mocks.mocks_safe_creation import ( + create_cpk_test_data, + create_test_data_v1_0_0, + create_test_data_v1_1_1, + create_v1_4_1_test_data, + data_decoded_cpk, + data_decoded_v1_0_0, + data_decoded_v1_1_1, + data_decoded_v1_4_1, +) from .mocks.traces import call_trace logger = logging.getLogger(__name__) class TestViews(SafeTestCaseMixin, APITestCase): + def setUp(self): + get_redis().flushall() + + def tearDown(self): + get_redis().flushall() + def test_about_view(self): url = reverse("v1:history:about") response = self.client.get(url, format="json") @@ -188,6 +210,43 @@ def test_indexing_view(self, current_block_number_mock: PropertyMock): self.assertEqual(response.data["master_copies_synced"], False) self.assertEqual(response.data["synced"], False) + # Mock chain id to mainnet + @mock.patch("safe_transaction_service.history.views.get_chain_id", return_value=1) + def test_safe_deployments_view(self, get_chain_id_mock): + url = reverse("v1:history:deployments") + response = self.client.get(url, format="json") + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.json(), mainnet_deployments) + + response = self.client.get(url + "?version=5.0.0", format="json") + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + response = self.client.get(url + "?version=1.4.1", format="json") + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.json(), [mainnet_deployments_1_4_1]) + + response = self.client.get( + url + "?version=1.4.1&contract=MultiSend", format="json" + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual( + response.json(), + [{"version": "1.4.1", "contracts": [mainnet_deployments_1_4_1_multisend]}], + ) + + response = self.client.get(url + "?contract=Safe", format="json") + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual( + response.json(), + [ + {"version": "1.0.0", "contracts": []}, + {"version": "1.1.1", "contracts": []}, + {"version": "1.2.0", "contracts": []}, + {"version": "1.3.0", "contracts": []}, + {"version": "1.4.1", "contracts": [mainnet_deployments_1_4_1_safe]}, + ], + ) + def test_all_transactions_view(self): safe_address = Account.create().address response = self.client.get( @@ -304,6 +363,7 @@ def test_all_transactions_view(self): "symbol": token.symbol, "decimals": token.decimals, "logo_uri": token.get_full_logo_uri(), + "trusted": token.trusted, }, ) transfers_not_empty = [ @@ -382,18 +442,20 @@ def test_all_transactions_ordering(self): last_result["transaction_hash"], ethereum_tx_2_days_ago.tx_hash ) - def test_all_transactions_cache(self): + def test_all_transactions_cache_view(self): safe_address = "0x54f3c8e4Bf7bFDFF39B36d1FAE4e5ceBdD93C6A9" # Older transaction factory_transactions = [ MultisigTransactionFactory(safe=safe_address), MultisigTransactionFactory(safe=safe_address), ] - # all-txs:{safe}:{executed}{queued}{trusted}:{limit}:{offset}:{ordering}:{relevant_elements} - cache_key = "all-txs:0x54f3c8e4Bf7bFDFF39B36d1FAE4e5ceBdD93C6A9:100:10:0:execution_date:2" + # all-txs:{safe_address} + cache_hash_key = f"all-txs:{safe_address}" + # {executed}{queued}{trusted}:{limit}:{offset}:{ordering} + cache_query_field = "100:10:0:execution_date" redis = get_redis() - redis.delete(cache_key) - cache_result = redis.get(cache_key) + redis.unlink(cache_hash_key) + cache_result = redis.hget(cache_hash_key, cache_query_field) # Should be empty at the beginning self.assertIsNone(cache_result) @@ -404,7 +466,7 @@ def test_all_transactions_cache(self): self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data["count"], 2) - cache_result = redis.get(cache_key) + cache_result = redis.hget(cache_hash_key, cache_query_field) # Should be stored in redis cache self.assertIsNotNone(cache_result) # Cache should content the expected values @@ -423,7 +485,8 @@ def test_all_transactions_cache(self): ) self.assertEqual(cache_value["safe_nonce"], factory_transaction.nonce) # Modify cache to empty list - redis.set(cache_key, pickle.dumps(([], 0)), ex=60 * 10) + redis.hset(cache_hash_key, cache_query_field, pickle.dumps(([], 0))) + redis.expire(cache_hash_key, 60 * 10) response = self.client.get( reverse("v1:history:all-transactions", args=(safe_address,)) + "?executed=True&queued=False&trusted=False&ordering=execution_date" @@ -440,6 +503,33 @@ def test_all_transactions_cache(self): ) self.assertEqual(response.data["count"], 3) + def test_all_transactions_cache_limit_offset_view(self): + """ + Test limit and offset + """ + safe_address = "0x54f3c8e4Bf7bFDFF39B36d1FAE4e5ceBdD93C6A9" + number_transactions = 100 + transaction_service = TransactionServiceProvider() + for _ in range(number_transactions): + MultisigTransactionFactory(safe=safe_address) + + for limit, offset in ((57, 12), (13, 24)): + with self.subTest(limit=limit, offset=offset): + # all-txs:{safe_address} + cache_hash_key = f"all-txs:{safe_address}" + # {executed}{queued}{trusted}:{limit}:{offset}:{ordering}:{relevant_elements} + cache_query_field = f"100:{limit}:{offset}:execution_date" + redis = get_redis() + self.assertFalse(redis.hexists(cache_hash_key, cache_query_field)) + + response = self.client.get( + reverse("v1:history:all-transactions", args=(safe_address,)) + + f"?executed=True&queued=False&trusted=False&ordering=execution_date&limit={limit}&offset={offset}" + ) + self.assertEqual(response.data["count"], number_transactions) + self.assertEqual(len(response.data["results"]), limit) + self.assertTrue(redis.hexists(cache_hash_key, cache_query_field)) + def test_all_transactions_wrong_transfer_type_view(self): # No token in database, so we must trust the event safe_address = Account.create().address @@ -620,11 +710,9 @@ def test_get_module_transaction(self): self.assertEqual( response.json(), { - "created": module_transaction.created.isoformat().replace( - "+00:00", "Z" - ), - "executionDate": module_transaction.internal_tx.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" + "created": datetime_to_str(module_transaction.created), + "executionDate": datetime_to_str( + module_transaction.internal_tx.ethereum_tx.block.timestamp ), "blockNumber": module_transaction.internal_tx.ethereum_tx.block_id, "isSuccessful": not module_transaction.failed, @@ -641,7 +729,7 @@ def test_get_module_transaction(self): ) def test_get_multisig_confirmation(self): - random_safe_tx_hash = Web3.keccak(text="enxebre").hex() + random_safe_tx_hash = fast_keccak_text("enxebre").hex() response = self.client.get( reverse( "v1:history:multisig-transaction-confirmations", @@ -667,7 +755,7 @@ def test_get_multisig_confirmation(self): self.assertEqual(response.data["count"], 2) def test_post_multisig_confirmation(self): - random_safe_tx_hash = Web3.keccak(text="enxebre").hex() + random_safe_tx_hash = fast_keccak_text("enxebre").hex() data = { "signature": Account.create() .signHash(random_safe_tx_hash)["signature"] @@ -776,7 +864,7 @@ def test_post_multisig_confirmation(self): self.assertEqual(MultisigConfirmation.objects.count(), 2) def test_get_multisig_transaction(self): - safe_tx_hash = Web3.keccak(text="gnosis").hex() + safe_tx_hash = fast_keccak_text("gnosis").hex() response = self.client.get( reverse("v1:history:multisig-transaction", args=(safe_tx_hash,)), format="json", @@ -788,6 +876,7 @@ def test_get_multisig_transaction(self): "b1b3b164cf000000000000000000000000000000000000000000000000000000" "0000000001" ) + multisig_tx = MultisigTransactionFactory(data=add_owner_with_threshold_data) safe_tx_hash = multisig_tx.safe_tx_hash response = self.client.get( @@ -804,6 +893,7 @@ def test_get_multisig_transaction(self): self.assertFalse(response.data["trusted"]) self.assertIsNone(response.data["max_fee_per_gas"]) self.assertIsNone(response.data["max_priority_fee_per_gas"]) + self.assertIsNone(response.data["proposer"]) self.assertEqual( response.data["data_decoded"], { @@ -818,6 +908,7 @@ def test_get_multisig_transaction(self): ], }, ) + # Test camelCase self.assertEqual( response.json()["transactionHash"], multisig_tx.ethereum_tx.tx_hash @@ -845,16 +936,158 @@ def test_get_multisig_transaction(self): self.assertEqual(response.data["origin"], json.dumps(origin)) self.assertEqual(json.loads(response.data["origin"]), origin) + # Test proposer + proposer = Account.create().address + multisig_tx.proposer = proposer + multisig_tx.save() + response = self.client.get( + reverse("v1:history:multisig-transaction", args=(safe_tx_hash,)), + format="json", + ) + self.assertEqual(response.data["proposer"], proposer) + + def test_delete_multisig_transaction(self): + owner_account = Account.create() + safe_tx_hash = fast_keccak_text("random-tx").hex() + url = reverse("v1:history:multisig-transaction", args=(safe_tx_hash,)) + data = {"signature": "0x" + "1" * (130 * 2)} # 2 signatures of 65 bytes + response = self.client.delete(url, format="json", data=data) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + # Add our test MultisigTransaction to the database + multisig_transaction = MultisigTransactionFactory(safe_tx_hash=safe_tx_hash) + + # Add other MultisigTransactions to the database to make sure they are not deleted + MultisigTransactionFactory() + MultisigTransactionFactory() + + response = self.client.delete(url, format="json", data=data) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.data, + { + "non_field_errors": [ + ErrorDetail( + string="Executed transactions cannot be deleted", code="invalid" + ) + ] + }, + ) + + multisig_transaction.ethereum_tx = None + multisig_transaction.save(update_fields=["ethereum_tx"]) + response = self.client.delete(url, format="json", data=data) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.data, + { + "non_field_errors": [ + ErrorDetail( + string="Old transactions without proposer cannot be deleted", + code="invalid", + ) + ] + }, + ) + + # Set a random proposer for the transaction + multisig_transaction.proposer = Account.create().address + multisig_transaction.save(update_fields=["proposer"]) + response = self.client.delete(url, format="json", data=data) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.data, + { + "non_field_errors": [ + ErrorDetail( + string="1 owner signature was expected, 2 received", + code="invalid", + ) + ] + }, + ) + + # Use a contract signature + data = {"signature": "0x" + "0" * 130} # 1 signature of 65 bytes + response = self.client.delete(url, format="json", data=data) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.data, + { + "non_field_errors": [ + ErrorDetail( + string="Only EOA and ETH_SIGN signatures are supported", + code="invalid", + ) + ] + }, + ) + + # Use a real not valid signature and set the right proposer + multisig_transaction.proposer = owner_account.address + multisig_transaction.save(update_fields=["proposer"]) + data = { + "signature": owner_account.signHash(safe_tx_hash)[ + "signature" + ].hex() # Random signature + } + response = self.client.delete(url, format="json", data=data) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.data, + { + "non_field_errors": [ + ErrorDetail( + string="Provided owner is not the proposer of the transaction", + code="invalid", + ) + ] + }, + ) + + # Use a proper signature + message_hash = DeleteMultisigTxSignatureHelper.calculate_hash( + multisig_transaction.safe, + safe_tx_hash, + self.ethereum_client.get_chain_id(), + previous_totp=False, + ) + data = { + "signature": owner_account.signHash(message_hash)[ + "signature" + ].hex() # Random signature + } + self.assertEqual(MultisigTransaction.objects.count(), 3) + self.assertTrue( + MultisigTransaction.objects.filter(safe_tx_hash=safe_tx_hash).exists() + ) + response = self.client.delete(url, format="json", data=data) + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + self.assertEqual(MultisigTransaction.objects.count(), 2) + self.assertFalse( + MultisigTransaction.objects.filter(safe_tx_hash=safe_tx_hash).exists() + ) + + # Trying to do the query again should raise a 404 + response = self.client.delete(url, format="json", data=data) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + def test_get_multisig_transactions(self): safe_address = Account.create().address + proposer = Account.create().address response = self.client.get( reverse("v1:history:multisig-transactions", args=(safe_address,)), format="json", ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data["count"], 0) + self.assertEqual(response.data["count_unique_nonce"], 0) - multisig_tx = MultisigTransactionFactory(safe=safe_address) + multisig_tx = MultisigTransactionFactory( + safe=safe_address, proposer=proposer, trusted=True + ) + # Not trusted multisig transaction should not be returned by default + MultisigTransactionFactory(safe=safe_address, proposer=proposer, trusted=False) response = self.client.get( reverse("v1:history:multisig-transactions", args=(safe_address,)), format="json", @@ -878,7 +1111,6 @@ def test_get_multisig_transactions(self): ) # Check Etag header self.assertTrue(response["Etag"]) - MultisigConfirmationFactory(multisig_transaction=multisig_tx) response = self.client.get( reverse("v1:history:multisig-transactions", args=(safe_address,)), @@ -887,8 +1119,20 @@ def test_get_multisig_transactions(self): self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data["results"]), 1) self.assertEqual(len(response.data["results"][0]["confirmations"]), 1) + self.assertEqual(response.data["results"][0]["proposer"], proposer) - MultisigTransactionFactory(safe=safe_address, nonce=multisig_tx.nonce) + # Check not trusted + response = self.client.get( + reverse("v1:history:multisig-transactions", args=(safe_address,)) + + "?trusted=False", + format="json", + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["count"], 2) + + MultisigTransactionFactory( + safe=safe_address, nonce=multisig_tx.nonce, trusted=True + ) response = self.client.get( reverse("v1:history:multisig-transactions", args=(safe_address,)), format="json", @@ -897,6 +1141,41 @@ def test_get_multisig_transactions(self): self.assertEqual(response.data["count"], 2) self.assertEqual(response.data["count_unique_nonce"], 1) + def test_get_multisig_transactions_unique_nonce(self): + """ + Unique nonce should follow the trusted filter + """ + + safe_address = Account.create().address + url = reverse("v1:history:multisig-transactions", args=(safe_address,)) + response = self.client.get( + url, + format="json", + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["count"], 0) + self.assertEqual(response.data["count_unique_nonce"], 0) + + MultisigTransactionFactory(safe=safe_address, nonce=6, trusted=True) + MultisigTransactionFactory(safe=safe_address, nonce=12, trusted=False) + + # Unique nonce ignores not trusted transactions by default + response = self.client.get( + url, + format="json", + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["count"], 1) + self.assertEqual(response.data["count_unique_nonce"], 1) + + response = self.client.get( + url + "?trusted=False", + format="json", + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["count"], 2) + self.assertEqual(response.data["count_unique_nonce"], 2) + @mock.patch.object( DbTxDecoder, "get_data_decoded", return_value={"param1": "value"} ) @@ -906,7 +1185,7 @@ def test_get_multisig_transactions_not_decoded( try: ContractQuerySet.cache_trusted_addresses_for_delegate_call.clear() multisig_transaction = MultisigTransactionFactory( - operation=SafeOperation.CALL.value, data=b"abcd" + operation=SafeOperationEnum.CALL.value, data=b"abcd", trusted=True ) safe_address = multisig_transaction.safe response = self.client.get( @@ -918,7 +1197,7 @@ def test_get_multisig_transactions_not_decoded( response.data["results"][0]["data_decoded"], {"param1": "value"} ) - multisig_transaction.operation = SafeOperation.DELEGATE_CALL.value + multisig_transaction.operation = SafeOperationEnum.DELEGATE_CALL.value multisig_transaction.save() response = self.client.get( reverse("v1:history:multisig-transactions", args=(safe_address,)), @@ -952,7 +1231,7 @@ def test_get_multisig_transactions_filters(self): self.assertEqual(response.data["count"], 0) multisig_transaction = MultisigTransactionFactory( - safe=safe_address, nonce=0, ethereum_tx=None + safe=safe_address, nonce=0, ethereum_tx=None, trusted=True ) response = self.client.get( reverse("v1:history:multisig-transactions", args=(safe_address,)) @@ -1073,13 +1352,15 @@ def test_post_multisig_transactions_null_signature(self): self.assertFalse(multisig_transaction_db.trusted) response = self.client.get( - reverse("v1:history:multisig-transactions", args=(safe_address,)), + reverse( + "v1:history:multisig-transaction", + args=(data["contractTransactionHash"],), + ), format="json", ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data["results"]), 1) - self.assertIsNone(response.data["results"][0]["executor"]) - self.assertEqual(len(response.data["results"][0]["confirmations"]), 0) + self.assertIsNone(response.data["executor"]) + self.assertEqual(len(response.data["confirmations"]), 0) def test_post_multisig_transactions(self): safe_owner_1 = Account.create() @@ -1131,13 +1412,16 @@ def test_post_multisig_transactions(self): self.assertFalse(multisig_transaction_db.trusted) response = self.client.get( - reverse("v1:history:multisig-transactions", args=(safe_address,)), + reverse( + "v1:history:multisig-transaction", + args=(data["contractTransactionHash"],), + ), format="json", ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data["results"]), 1) - self.assertIsNone(response.data["results"][0]["executor"]) - self.assertEqual(len(response.data["results"][0]["confirmations"]), 0) + self.assertIsNone(response.data["executor"]) + self.assertEqual(len(response.data["confirmations"]), 0) + self.assertEqual(response.data["proposer"], data["sender"]) # Test confirmation with signature data["signature"] = safe_owner_1.signHash(safe_tx.safe_tx_hash)[ @@ -1199,6 +1483,125 @@ def test_post_multisig_transactions(self): ) self.assertEqual(response.status_code, status.HTTP_422_UNPROCESSABLE_ENTITY) + def test_post_multisig_transaction_with_zero_to(self): + safe_owner_1 = Account.create() + safe = self.deploy_test_safe(owners=[safe_owner_1.address]) + safe_address = safe.address + + response = self.client.get( + reverse("v1:history:multisig-transactions", args=(safe_address,)), + format="json", + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["count"], 0) + + data = { + "to": NULL_ADDRESS, + "value": 100000000000000000, + "data": None, + "operation": 0, + "nonce": 0, + "safeTxGas": 0, + "baseGas": 0, + "gasPrice": 0, + "gasToken": "0x0000000000000000000000000000000000000000", + "refundReceiver": "0x0000000000000000000000000000000000000000", + # "contractTransactionHash": "0x1c2c77b29086701ccdda7836c399112a9b715c6a153f6c8f75c84da4297f60d3", + "sender": safe_owner_1.address, + } + safe_tx = safe.build_multisig_tx( + data["to"], + data["value"], + data["data"], + data["operation"], + data["safeTxGas"], + data["baseGas"], + data["gasPrice"], + data["gasToken"], + data["refundReceiver"], + safe_nonce=data["nonce"], + ) + data["contractTransactionHash"] = safe_tx.safe_tx_hash.hex() + response = self.client.post( + reverse("v1:history:multisig-transactions", args=(safe_address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + multisig_transaction_db = MultisigTransaction.objects.first() + self.assertFalse(multisig_transaction_db.trusted) + + def test_post_multisig_transaction_with_1271_signature(self): + account = Account.create() + safe_owner = self.deploy_test_safe(owners=[account.address]) + safe = self.deploy_test_safe(owners=[safe_owner.address]) + + data = { + "to": account.address, + "value": 100000000000000000, + "data": None, + "operation": 0, + "nonce": 0, + "safeTxGas": 0, + "baseGas": 0, + "gasPrice": 0, + "gasToken": "0x0000000000000000000000000000000000000000", + "refundReceiver": "0x0000000000000000000000000000000000000000", + "sender": safe_owner.address, + } + safe_tx = safe.build_multisig_tx( + data["to"], + data["value"], + data["data"], + data["operation"], + data["safeTxGas"], + data["baseGas"], + data["gasPrice"], + data["gasToken"], + data["refundReceiver"], + safe_nonce=data["nonce"], + ) + safe_tx_hash = safe_tx.safe_tx_hash + safe_tx_hash_preimage = safe_tx.safe_tx_hash_preimage + + safe_owner_message_hash = safe_owner.get_message_hash(safe_tx_hash_preimage) + safe_owner_signature = account.signHash(safe_owner_message_hash)["signature"] + signature_1271 = ( + signature_to_bytes( + 0, int.from_bytes(HexBytes(safe_owner.address), byteorder="big"), 65 + ) + + eth_abi.encode(["bytes"], [safe_owner_signature])[32:] + ) + + data["contractTransactionHash"] = safe_tx_hash.hex() + data["signature"] = signature_1271.hex() + + response = self.client.post( + reverse("v1:history:multisig-transactions", args=(safe.address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + multisig_transaction_db = MultisigTransaction.objects.get( + safe_tx_hash=safe_tx_hash + ) + self.assertTrue(multisig_transaction_db.trusted) + self.assertEqual(MultisigConfirmation.objects.count(), 1) + + # Test MultisigConfirmation endpoint + confirmation_data = {"signature": data["signature"]} + MultisigConfirmation.objects.all().delete() + response = self.client.post( + reverse( + "v1:history:multisig-transaction-confirmations", + args=(safe_tx_hash.hex(),), + ), + format="json", + data=confirmation_data, + ) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(MultisigConfirmation.objects.count(), 1) + def test_post_multisig_transaction_with_trusted_user(self): safe_owner_1 = Account.create() safe = self.deploy_test_safe(owners=[safe_owner_1.address]) @@ -1618,12 +2021,17 @@ def test_post_multisig_transactions_with_delegate(self): response.data["non_field_errors"][0], ) - # Add delegate - SafeContractDelegateFactory( + # Add delegates (to check there's no issue with delegating twice to the same account) + safe_contract_delegate = SafeContractDelegateFactory( safe_contract__address=safe_address, delegate=safe_delegate.address, delegator=safe_owners[0].address, ) + SafeContractDelegateFactory( + safe_contract=safe_contract_delegate.safe_contract, + delegate=safe_delegate.address, + delegator=safe_owners[1].address, + ) response = self.client.post( reverse("v1:history:multisig-transactions", args=(safe_address,)), format="json", @@ -1632,7 +2040,11 @@ def test_post_multisig_transactions_with_delegate(self): self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(MultisigTransaction.objects.count(), 1) self.assertEqual(MultisigConfirmation.objects.count(), 0) - self.assertTrue(MultisigTransaction.objects.first().trusted) + multisig_transaction = MultisigTransaction.objects.first() + self.assertTrue(multisig_transaction.trusted) + # Proposer should be the owner address not the delegate + self.assertNotEqual(multisig_transaction.proposer, safe_delegate.address) + self.assertEqual(multisig_transaction.proposer, safe_owners[0].address) data["signature"] = data["signature"] + data["signature"][2:] response = self.client.post( @@ -1727,89 +2139,6 @@ def test_safe_balances_view(self): ], ) - @mock.patch.object(BalanceService, "get_token_info", autospec=True) - @mock.patch.object( - PriceService, "get_token_eth_value", return_value=0.4, autospec=True - ) - @mock.patch.object( - PriceService, "get_native_coin_usd_price", return_value=123.4, autospec=True - ) - @mock.patch.object(timezone, "now", return_value=timezone.now()) - def test_safe_balances_usd_view( - self, - timezone_now_mock: MagicMock, - get_native_coin_usd_price_mock: MagicMock, - get_token_eth_value_mock: MagicMock, - get_token_info_mock: MagicMock, - ): - timestamp_str = timezone_now_mock.return_value.isoformat().replace( - "+00:00", "Z" - ) - safe_address = Account.create().address - response = self.client.get( - reverse("v1:history:safe-balances-usd", args=(safe_address,)), format="json" - ) - self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - - SafeContractFactory(address=safe_address) - value = 7 - self.send_ether(safe_address, 7) - response = self.client.get( - reverse("v1:history:safe-balances-usd", args=(safe_address,)), format="json" - ) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data), 1) - self.assertIsNone(response.data[0]["token_address"]) - self.assertEqual(response.data[0]["balance"], str(value)) - self.assertEqual(response.data[0]["eth_value"], "1.0") - - tokens_value = int(12 * 1e18) - erc20 = self.deploy_example_erc20(tokens_value, safe_address) - response = self.client.get( - reverse("v1:history:safe-balances-usd", args=(safe_address,)), format="json" - ) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data), 1) - - erc20_info = Erc20InfoWithLogo( - erc20.address, "UXIO", "UXI", 18, None, "http://logo_uri.es" - ) - get_token_info_mock.return_value = erc20_info - - ERC20TransferFactory(address=erc20.address, to=safe_address) - response = self.client.get( - reverse("v1:history:safe-balances-usd", args=(safe_address,)), format="json" - ) - self.assertEqual(response.status_code, status.HTTP_200_OK) - token_dict = asdict(erc20_info) - del token_dict["address"] - del token_dict["copy_price"] - self.assertCountEqual( - response.data, - [ - { - "token_address": None, - "token": None, - "balance": str(value), - "eth_value": "1.0", - "timestamp": timestamp_str, - "fiat_balance": "0.0", - "fiat_conversion": "123.4", - "fiat_code": "USD", - }, # 7 wei is rounded to 0.0 - { - "token_address": erc20.address, - "token": token_dict, - "balance": str(tokens_value), - "eth_value": "0.4", - "timestamp": timestamp_str, - "fiat_balance": str(round(123.4 * 0.4 * (tokens_value / 1e18), 4)), - "fiat_conversion": str(round(123.4 * 0.4, 4)), - "fiat_code": "USD", - }, - ], - ) - def test_delegates_post(self): url = reverse("v1:history:delegates") safe_address = Account.create().address @@ -1904,9 +2233,13 @@ def test_delegates_post(self): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(SafeContractDelegate.objects.count(), 2) + queryset = SafeContractDelegate.objects.get_for_safe( + safe_address, [delegator.address] + ) + self.assertEqual(len(queryset), 2) self.assertCountEqual( - SafeContractDelegate.objects.get_delegates_for_safe(safe_address), - [delegate.address], + set(safe_contract_delegate.delegate for safe_contract_delegate in queryset), + {delegate.address}, ) def test_delegates_get(self): @@ -2105,7 +2438,9 @@ def test_delete_safe_delegate(self): data=data, ) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertIn("Not found", response.data["detail"]) + self.assertEqual( + "No SafeContractDelegate matches the given query.", response.data["detail"] + ) # Test previous otp hash_to_sign = DelegateSignatureHelper.calculate_hash( @@ -2118,7 +2453,9 @@ def test_delete_safe_delegate(self): data=data, ) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertIn("Not found", response.data["detail"]) + self.assertEqual( + "No SafeContractDelegate matches the given query.", response.data["detail"] + ) hash_to_sign = DelegateSignatureHelper.calculate_hash(delegate_address) data["signature"] = owner_account.signHash(hash_to_sign)["signature"].hex() @@ -2128,7 +2465,9 @@ def test_delete_safe_delegate(self): data=data, ) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertIn("Not found", response.data["detail"]) + self.assertEqual( + "No SafeContractDelegate matches the given query.", response.data["detail"] + ) SafeContractDelegateFactory( safe_contract=safe_contract, delegate=delegate_address @@ -2246,8 +2585,8 @@ def test_incoming_transfers_view(self): [ { "type": TransferType.ERC20_TRANSFER.name, - "executionDate": ethereum_erc_20_event.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" + "executionDate": datetime_to_str( + ethereum_erc_20_event.ethereum_tx.block.timestamp ), "transferId": erc20_transfer_id, "transactionHash": ethereum_erc_20_event.ethereum_tx_id, @@ -2264,12 +2603,13 @@ def test_incoming_transfers_view(self): "symbol": token.symbol, "decimals": token.decimals, "logoUri": token.get_full_logo_uri(), + "trusted": token.trusted, }, }, { "type": TransferType.ETHER_TRANSFER.name, - "executionDate": internal_tx.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" + "executionDate": datetime_to_str( + internal_tx.ethereum_tx.block.timestamp ), "transferId": internal_tx_transfer_id, "transactionHash": internal_tx.ethereum_tx_id, @@ -2306,8 +2646,8 @@ def test_incoming_transfers_view(self): [ { "type": TransferType.ERC721_TRANSFER.name, - "executionDate": ethereum_erc_721_event.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" + "executionDate": datetime_to_str( + ethereum_erc_721_event.ethereum_tx.block.timestamp ), "transferId": erc721_transfer_id, "transactionHash": ethereum_erc_721_event.ethereum_tx_id, @@ -2321,8 +2661,8 @@ def test_incoming_transfers_view(self): }, { "type": TransferType.ERC20_TRANSFER.name, - "executionDate": ethereum_erc_20_event.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" + "executionDate": datetime_to_str( + ethereum_erc_20_event.ethereum_tx.block.timestamp ), "transferId": erc20_transfer_id, "transactionHash": ethereum_erc_20_event.ethereum_tx_id, @@ -2339,12 +2679,13 @@ def test_incoming_transfers_view(self): "symbol": token.symbol, "decimals": token.decimals, "logoUri": token.get_full_logo_uri(), + "trusted": token.trusted, }, }, { "type": TransferType.ETHER_TRANSFER.name, - "executionDate": internal_tx.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" + "executionDate": datetime_to_str( + internal_tx.ethereum_tx.block.timestamp ), "transferId": internal_tx_transfer_id, "transactionHash": internal_tx.ethereum_tx_id, @@ -2475,8 +2816,8 @@ def test_transfers_view(self): expected_results = [ { "type": TransferType.ERC20_TRANSFER.name, - "executionDate": ethereum_erc_20_event_2.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" + "executionDate": datetime_to_str( + ethereum_erc_20_event_2.ethereum_tx.block.timestamp ), "blockNumber": ethereum_erc_20_event_2.ethereum_tx.block_id, "transferId": erc20_transfer_id_2, @@ -2490,8 +2831,8 @@ def test_transfers_view(self): }, { "type": TransferType.ERC20_TRANSFER.name, - "executionDate": ethereum_erc_20_event.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" + "executionDate": datetime_to_str( + ethereum_erc_20_event.ethereum_tx.block.timestamp ), "blockNumber": ethereum_erc_20_event.ethereum_tx.block_id, "transferId": erc20_transfer_id, @@ -2508,12 +2849,13 @@ def test_transfers_view(self): "symbol": token.symbol, "decimals": token.decimals, "logoUri": token.get_full_logo_uri(), + "trusted": token.trusted, }, }, { "type": TransferType.ETHER_TRANSFER.name, - "executionDate": internal_tx_2.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" + "executionDate": datetime_to_str( + internal_tx_2.ethereum_tx.block.timestamp ), "blockNumber": internal_tx_2.ethereum_tx.block_id, "transferId": internal_tx_2_transfer_id, @@ -2527,8 +2869,8 @@ def test_transfers_view(self): }, { "type": TransferType.ETHER_TRANSFER.name, - "executionDate": internal_tx.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" + "executionDate": datetime_to_str( + internal_tx.ethereum_tx.block.timestamp ), "blockNumber": internal_tx.ethereum_tx.block_id, "transferId": internal_tx_transfer_id, @@ -2572,8 +2914,8 @@ def test_transfers_view(self): expected_results = [ { "type": TransferType.ERC721_TRANSFER.name, - "executionDate": ethereum_erc_721_event_2.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" + "executionDate": datetime_to_str( + ethereum_erc_721_event_2.ethereum_tx.block.timestamp ), "transactionHash": ethereum_erc_721_event_2.ethereum_tx_id, "transferId": erc721_transfer_id_2, @@ -2587,8 +2929,8 @@ def test_transfers_view(self): }, { "type": TransferType.ERC721_TRANSFER.name, - "executionDate": ethereum_erc_721_event.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" + "executionDate": datetime_to_str( + ethereum_erc_721_event.ethereum_tx.block.timestamp ), "transactionHash": ethereum_erc_721_event.ethereum_tx_id, "transferId": erc721_transfer_id, @@ -2710,9 +3052,7 @@ def test_get_transfer_view(self): self.assertEqual(response.status_code, status.HTTP_200_OK) expected_result = { "type": TransferType.ETHER_TRANSFER.name, - "executionDate": internal_tx.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" - ), + "executionDate": datetime_to_str(internal_tx.ethereum_tx.block.timestamp), "blockNumber": internal_tx.ethereum_tx.block_id, "transferId": transfer_id, "transactionHash": internal_tx.ethereum_tx_id, @@ -2743,8 +3083,8 @@ def test_get_transfer_view(self): self.assertEqual(response.status_code, status.HTTP_200_OK) expected_result = { "type": TransferType.ETHER_TRANSFER.name, - "executionDate": internal_tx_empty_trace_address.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" + "executionDate": datetime_to_str( + internal_tx_empty_trace_address.ethereum_tx.block.timestamp ), "blockNumber": internal_tx_empty_trace_address.ethereum_tx.block_id, "transferId": transfer_id_empty_trace_address, @@ -2777,8 +3117,8 @@ def test_get_transfer_view(self): self.assertEqual(response.status_code, status.HTTP_200_OK) expected_result = { "type": TransferType.ERC20_TRANSFER.name, - "executionDate": ethereum_erc_20_event.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" + "executionDate": datetime_to_str( + ethereum_erc_20_event.ethereum_tx.block.timestamp ), "blockNumber": ethereum_erc_20_event.ethereum_tx.block_id, "transferId": transfer_id, @@ -2795,6 +3135,7 @@ def test_get_transfer_view(self): "symbol": token.symbol, "decimals": token.decimals, "logoUri": token.get_full_logo_uri(), + "trusted": token.trusted, }, } self.assertEqual(response.json(), expected_result) @@ -2818,8 +3159,8 @@ def test_get_transfer_view(self): self.assertEqual(response.status_code, status.HTTP_200_OK) expected_result = { "type": TransferType.ERC721_TRANSFER.name, - "executionDate": ethereum_erc_721_event.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" + "executionDate": datetime_to_str( + ethereum_erc_721_event.ethereum_tx.block.timestamp ), "transactionHash": ethereum_erc_721_event.ethereum_tx_id, "transferId": transfer_id, @@ -2840,9 +3181,9 @@ def test_safe_creation_view(self): ) self.assertEqual(response.status_code, status.HTTP_422_UNPROCESSABLE_ENTITY) - owner_address = Account.create().address + safe_address = Account.create().address response = self.client.get( - reverse("v1:history:safe-creation", args=(owner_address,)) + reverse("v1:history:safe-creation", args=(safe_address,)) ) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -2851,18 +3192,16 @@ def test_safe_creation_view(self): ): # Insert create contract internal tx internal_tx = InternalTxFactory( - contract_address=owner_address, + contract_address=safe_address, trace_address="0,0", ethereum_tx__status=1, ) response = self.client.get( - reverse("v1:history:safe-creation", args=(owner_address,)), + reverse("v1:history:safe-creation", args=(safe_address,)), format="json", ) self.assertEqual(response.status_code, status.HTTP_200_OK) - created_iso = internal_tx.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" - ) + created_iso = datetime_to_str(internal_tx.ethereum_tx.block.timestamp) expected = { "created": created_iso, "creator": internal_tx.ethereum_tx._from, @@ -2871,8 +3210,9 @@ def test_safe_creation_view(self): "setup_data": None, "data_decoded": None, "transaction_hash": internal_tx.ethereum_tx_id, + "user_operation": None, } - self.assertEqual(response.data, expected) + self.assertDictEqual(response.data, expected) # Next children internal_tx should not alter the result another_trace = dict(call_trace) @@ -2884,11 +3224,54 @@ def test_safe_creation_view(self): return_value=[another_trace], ): response = self.client.get( - reverse("v1:history:safe-creation", args=(owner_address,)), + reverse("v1:history:safe-creation", args=(safe_address,)), format="json", ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data, expected) + self.assertDictEqual(response.data, expected) + + # Test 4337 SafeOperation showing in the creation + safe_operation = aa_factories.SafeOperationFactory( + user_operation__ethereum_tx_id=internal_tx.ethereum_tx_id, + user_operation__sender=safe_address, + user_operation__init_code=HexBytes("0x1234"), + ) + response = self.client.get( + reverse("v1:history:safe-creation", args=(safe_address,)), + format="json", + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + expected["user_operation"] = { + "sender": safe_operation.user_operation.sender, + "nonce": safe_operation.user_operation.nonce, + "user_operation_hash": safe_operation.user_operation.hash, + "ethereum_tx_hash": internal_tx.ethereum_tx_id, + "init_code": "0x1234", + "call_data": "0x", + "call_data_gas_limit": safe_operation.user_operation.call_data_gas_limit, + "verification_gas_limit": safe_operation.user_operation.verification_gas_limit, + "pre_verification_gas": safe_operation.user_operation.pre_verification_gas, + "max_fee_per_gas": safe_operation.user_operation.max_fee_per_gas, + "max_priority_fee_per_gas": safe_operation.user_operation.max_priority_fee_per_gas, + "paymaster": safe_operation.user_operation.paymaster, + "paymaster_data": "0x", + "signature": "0x", + "entry_point": safe_operation.user_operation.entry_point, + "safe_operation": { + "created": datetime_to_str(safe_operation.created), + "modified": datetime_to_str(safe_operation.created), + "safe_operation_hash": safe_operation.hash, + "valid_after": datetime_to_str(safe_operation.valid_after), + "valid_until": datetime_to_str(safe_operation.valid_until), + "module_address": safe_operation.module_address, + "confirmations": [], + "prepared_signature": None, + }, + } + + self.assertIsNotNone(response.data["user_operation"]) + self.assertDictEqual(response.data, expected) + safe_operation.user_operation.delete() another_trace_2 = dict(call_trace) another_trace_2["traceAddress"] = [0] @@ -2899,123 +3282,36 @@ def test_safe_creation_view(self): return_value=[another_trace, another_trace_2], ): # `another_trace_2` should change the `creator` and `master_copy` and `setup_data` should appear - # Taken from rinkeby - create_test_data = { - "master_copy": "0xb6029EA3B2c51D09a50B53CA8012FeEB05bDa35A", - "setup_data": "0xa97ab18a00000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000016000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030000000000000000000000006e45d69a383ceca3d54688e833bd0e1388747e6b00000000000000000000000061a0c717d18232711bc788f19c9cd56a43cc88720000000000000000000000007724b234c9099c205f03b458944942bceba134080000000000000000000000000000000000000000000000000000000000000000", - "data": "0x61b69abd000000000000000000000000b6029ea3b2c51d09a50b53ca8012feeb05bda35a00000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000184a97ab18a00000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000016000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030000000000000000000000006e45d69a383ceca3d54688e833bd0e1388747e6b00000000000000000000000061a0c717d18232711bc788f19c9cd56a43cc88720000000000000000000000007724b234c9099c205f03b458944942bceba13408000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - } - data_decoded_1 = { - "method": "setup", - "parameters": [ - { - "name": "_owners", - "type": "address[]", - "value": [ - "0x6E45d69a383CECa3d54688e833Bd0e1388747e6B", - "0x61a0c717d18232711bC788F19C9Cd56a43cc8872", - "0x7724b234c9099C205F03b458944942bcEBA13408", - ], - }, - {"name": "_threshold", "type": "uint256", "value": "1"}, - { - "name": "to", - "type": "address", - "value": "0x0000000000000000000000000000000000000000", - }, - {"name": "data", "type": "bytes", "value": "0x"}, - { - "name": "paymentToken", - "type": "address", - "value": "0x0000000000000000000000000000000000000000", - }, - {"name": "payment", "type": "uint256", "value": "0"}, - { - "name": "paymentReceiver", - "type": "address", - "value": "0x0000000000000000000000000000000000000000", - }, - ], - } - - create_test_data_2 = { - "master_copy": "0x34CfAC646f301356fAa8B21e94227e3583Fe3F5F", - "setup_data": "0xb63e800d0000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000180000000000000000000000000d5d82b6addc9027b22dca772aa68d5d74cdbdf440000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ac9b6dd409ff10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000300000000000000000000000085c26101f353f38e45c72d414b44972831f07be3000000000000000000000000235518798770d7336c5c4908dd1019457fea43a10000000000000000000000007f63c25665ea7e85500eaeb806e552e651b07b9d00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "data": "0x1688f0b900000000000000000000000034cfac646f301356faa8b21e94227e3583fe3f5f0000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000002cecc9e861200000000000000000000000000000000000000000000000000000000000001c4b63e800d0000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000180000000000000000000000000d5d82b6addc9027b22dca772aa68d5d74cdbdf440000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ac9b6dd409ff10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000300000000000000000000000085c26101f353f38e45c72d414b44972831f07be3000000000000000000000000235518798770d7336c5c4908dd1019457fea43a10000000000000000000000007f63c25665ea7e85500eaeb806e552e651b07b9d0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - } - data_decoded_2 = { - "method": "setup", - "parameters": [ - { - "name": "_owners", - "type": "address[]", - "value": [ - "0x85C26101f353f38E45c72d414b44972831f07BE3", - "0x235518798770D7336c5c4908dd1019457FEa43a1", - "0x7F63c25665EA7e85500eAEB806E552e651B07b9d", - ], - }, - {"name": "_threshold", "type": "uint256", "value": "1"}, - { - "name": "to", - "type": "address", - "value": "0x0000000000000000000000000000000000000000", - }, - {"name": "data", "type": "bytes", "value": "0x"}, - { - "name": "fallbackHandler", - "type": "address", - "value": "0xd5D82B6aDDc9027B22dCA772Aa68D5d74cdBdF44", - }, - { - "name": "paymentToken", - "type": "address", - "value": "0x0000000000000000000000000000000000000000", - }, - {"name": "payment", "type": "uint256", "value": "3036537000337393"}, - { - "name": "paymentReceiver", - "type": "address", - "value": "0x0000000000000000000000000000000000000000", - }, - ], - } - - create_cpk_test_data = { - "master_copy": "0x34CfAC646f301356fAa8B21e94227e3583Fe3F5F", - "setup_data": "0x5714713d000000000000000000000000ff54516a7bc1c1ea952a688e72d5b93a80620074", - "data": "0x460868ca00000000000000000000000034cfac646f301356faa8b21e94227e3583fe3f5fcfe33a586323e7325be6aa6ecd8b4600d232a9037e83c8ece69413b777dabe6500000000000000000000000040a930851bd2e590bd5a5c981b436de25742e9800000000000000000000000005ef44de4b98f2bce0e29c344e7b2fb8f0282a0cf000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000245714713d000000000000000000000000ff54516a7bc1c1ea952a688e72d5b93a8062007400000000000000000000000000000000000000000000000000000000", - } - data_decoded_cpk = None for test_data, data_decoded in [ - (create_test_data, data_decoded_1), - (create_test_data_2, data_decoded_2), + (create_test_data_v1_0_0, data_decoded_v1_0_0), + (create_test_data_v1_1_1, data_decoded_v1_1_1), (create_cpk_test_data, data_decoded_cpk), + (create_v1_4_1_test_data, data_decoded_v1_4_1), ]: - another_trace_2["action"]["input"] = HexBytes(test_data["data"]) - response = self.client.get( - reverse("v1:history:safe-creation", args=(owner_address,)), - format="json", - ) - self.assertEqual(response.status_code, status.HTTP_200_OK) - created_iso = ( - internal_tx.ethereum_tx.block.timestamp.isoformat().replace( - "+00:00", "Z" + with self.subTest(test_data=test_data, data_decoded=data_decoded): + another_trace_2["action"]["input"] = HexBytes(test_data["data"]) + response = self.client.get( + reverse("v1:history:safe-creation", args=(safe_address,)), + format="json", + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + created_iso = datetime_to_str( + internal_tx.ethereum_tx.block.timestamp + ) + self.assertDictEqual( + response.data, + { + "created": created_iso, + "creator": another_trace_2["action"]["from"], + "transaction_hash": internal_tx.ethereum_tx_id, + "factory_address": internal_tx._from, + "master_copy": test_data["master_copy"], + "setup_data": test_data["setup_data"], + "data_decoded": data_decoded, + "user_operation": None, + }, ) - ) - self.assertEqual( - response.data, - { - "created": created_iso, - "creator": another_trace_2["action"]["from"], - "transaction_hash": internal_tx.ethereum_tx_id, - "factory_address": internal_tx._from, - "master_copy": test_data["master_copy"], - "setup_data": test_data["setup_data"], - "data_decoded": data_decoded, - }, - ) def test_safe_info_view(self): invalid_address = "0x2A" @@ -3221,10 +3517,6 @@ def test_singletons_view(self): url = reverse("v1:history:singletons") return self._test_singletons_view(url) - def test_master_copies_view(self): - url = reverse("v1:history:master-copies") - return self._test_singletons_view(url) - def test_modules_view(self): invalid_address = "0x2A" response = self.client.get( @@ -3344,6 +3636,19 @@ def test_estimate_multisig_tx_view(self, estimate_tx_gas_mock: MagicMock): self.assertEqual( response.data, {"safe_tx_gas": str(estimate_tx_gas_mock.return_value)} ) + with mock.patch( + "safe_transaction_service.history.views.settings.ETH_L2_NETWORK", + return_value=True, + ): + response = self.client.post( + reverse( + "v1:history:multisig-transaction-estimate", args=(safe_address,) + ), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, {"safe_tx_gas": "0"}) estimate_tx_gas_mock.side_effect = CannotEstimateGas response = self.client.post( diff --git a/safe_transaction_service/history/tests/test_views_v2.py b/safe_transaction_service/history/tests/test_views_v2.py index 56caac28..0b6e945f 100644 --- a/safe_transaction_service/history/tests/test_views_v2.py +++ b/safe_transaction_service/history/tests/test_views_v2.py @@ -1,16 +1,27 @@ +from unittest import mock + from django.urls import reverse from eth_account import Account +from hexbytes import HexBytes from rest_framework import status from rest_framework.test import APITestCase from gnosis.eth.constants import NULL_ADDRESS +from gnosis.safe.signatures import signature_to_bytes from gnosis.safe.tests.safe_test_case import SafeTestCaseMixin -from .factories import ERC721TransferFactory, SafeContractFactory +from ..helpers import DelegateSignatureHelperV2 +from ..models import SafeContractDelegate +from .factories import ( + ERC721TransferFactory, + SafeContractDelegateFactory, + SafeContractFactory, +) class TestViewsV2(SafeTestCaseMixin, APITestCase): + def test_safe_collectibles_paginated(self): safe_address = Account.create().address @@ -145,3 +156,324 @@ def test_safe_collectibles_paginated(self): ) self.assertEqual(response.data["results"][1]["address"], erc721_full.address) self.assertEqual(int(response.data["results"][1]["id"]), erc721_full.token_id) + + def test_delegates_post(self): + url = reverse("v2:history:delegates") + safe_address = Account.create().address + delegate = Account.create() + delegator = Account.create() + label = "Saul Goodman" + data = { + "delegate": delegate.address, + "delegator": delegator.address, + "label": label, + "signature": "0x" + "1" * 130, + } + response = self.client.post(url, format="json", data=data) + self.assertIn( + "Signature does not match provided delegator", + response.data["non_field_errors"][0], + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + data["safe"] = safe_address + response = self.client.post(url, format="json", data=data) + self.assertIn( + f"Safe={safe_address} does not exist", response.data["non_field_errors"][0] + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + SafeContractFactory(address=safe_address) + with mock.patch( + "safe_transaction_service.history.serializers.get_safe_owners", + return_value=[Account.create().address], + ) as get_safe_owners_mock: + response = self.client.post(url, format="json", data=data) + self.assertIn( + f"Provided delegator={delegator.address} is not an owner of Safe={safe_address}", + response.data["non_field_errors"][0], + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + get_safe_owners_mock.return_value = [delegator.address] + response = self.client.post(url, format="json", data=data) + self.assertIn( + f"Signature does not match provided delegator={delegator.address}", + response.data["non_field_errors"][0], + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + # Create delegate + self.assertEqual(SafeContractDelegate.objects.count(), 0) + chain_id = self.ethereum_client.get_chain_id() + hash_to_sign = DelegateSignatureHelperV2.calculate_hash( + delegate.address, chain_id, False + ) + data["signature"] = delegator.signHash(hash_to_sign)["signature"].hex() + response = self.client.post(url, format="json", data=data) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + safe_contract_delegate = SafeContractDelegate.objects.get() + self.assertEqual(safe_contract_delegate.delegate, delegate.address) + self.assertEqual(safe_contract_delegate.delegator, delegator.address) + self.assertEqual(safe_contract_delegate.label, label) + self.assertEqual(safe_contract_delegate.safe_contract_id, safe_address) + + # Update label + label = "Jimmy McGill" + data["label"] = label + response = self.client.post(url, format="json", data=data) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(SafeContractDelegate.objects.count(), 1) + safe_contract_delegate = SafeContractDelegate.objects.get() + self.assertEqual(safe_contract_delegate.label, label) + + # Create delegate without a Safe + hash_to_sign = DelegateSignatureHelperV2.calculate_hash( + delegate.address, chain_id, False + ) + data = { + "label": "Kim Wexler", + "delegate": delegate.address, + "delegator": delegator.address, + "signature": delegator.signHash(hash_to_sign)["signature"].hex(), + } + response = self.client.post(url, format="json", data=data) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(SafeContractDelegate.objects.count(), 2) + + # Test bad request with an invalid signature + signature = signature_to_bytes(0, int(delegator.address, 16), 65) + HexBytes( + "0" * 65 + ) + data["signature"] = signature.hex() + response = self.client.post(url, format="json", data=data) + self.assertIn( + f"Signature of type=CONTRACT_SIGNATURE for signer={delegator.address} is not valid", + response.data["non_field_errors"][0], + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_delegates_get(self): + url = reverse("v2:history:delegates") + response = self.client.get(url, format="json") + self.assertEqual(response.data[0], "At least one query param must be provided") + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + delegator = Account.create().address + + # Add 2 delegates for the same Safe and delegator and another for a different Safe + safe_contract_delegate_1 = SafeContractDelegateFactory(delegator=delegator) + safe_contract = safe_contract_delegate_1.safe_contract + safe_contract_delegate_2 = SafeContractDelegateFactory( + safe_contract=safe_contract, delegator=delegator + ) + safe_contract_delegate_3 = SafeContractDelegateFactory( + delegate=safe_contract_delegate_1.delegate + ) + + expected = [ + { + "delegate": safe_contract_delegate_1.delegate, + "delegator": safe_contract_delegate_1.delegator, + "label": safe_contract_delegate_1.label, + "safe": safe_contract.address, + }, + { + "delegate": safe_contract_delegate_2.delegate, + "delegator": safe_contract_delegate_2.delegator, + "label": safe_contract_delegate_2.label, + "safe": safe_contract.address, + }, + ] + response = self.client.get( + url + f"?safe={safe_contract.address}", format="json" + ) + self.assertCountEqual(response.data["results"], expected) + self.assertEqual(response.status_code, status.HTTP_200_OK) + response = self.client.get(url + f"?delegator={delegator}", format="json") + self.assertCountEqual(response.data["results"], expected) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + expected = [ + { + "delegate": safe_contract_delegate_1.delegate, + "delegator": safe_contract_delegate_1.delegator, + "label": safe_contract_delegate_1.label, + "safe": safe_contract.address, + }, + { + "delegate": safe_contract_delegate_3.delegate, + "delegator": safe_contract_delegate_3.delegator, + "label": safe_contract_delegate_3.label, + "safe": safe_contract_delegate_3.safe_contract_id, + }, + ] + response = self.client.get( + url + f"?delegate={safe_contract_delegate_1.delegate}", format="json" + ) + self.assertCountEqual(response.data["results"], expected) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + # Test not found delegate address + response = self.client.get( + url + f"?delegate={Account.create().address}", format="json" + ) + self.assertCountEqual(response.data["results"], []) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_delegate_delete(self): + url_name = "v2:history:delegate" + safe_address = Account.create().address + delegate = Account.create() + delegator = Account.create() + chain_id = self.ethereum_client.get_chain_id() + hash_to_sign = DelegateSignatureHelperV2.calculate_hash( + delegate.address, chain_id, False + ) + # Test delete using delegate signature and then delegator signature + for signer in (delegate, delegator): + with self.subTest(signer=signer): + SafeContractDelegateFactory( + delegate=delegate.address, delegator=delegator.address + ) # Expected to be deleted + SafeContractDelegateFactory( + safe_contract=None, + delegate=delegate.address, + delegator=delegator.address, + ) # Expected to be deleted + SafeContractDelegateFactory( + delegate=delegate.address, # random delegator, should not be deleted + ) + self.assertEqual(SafeContractDelegate.objects.count(), 3) + + data = { + "signature": signer.signHash(hash_to_sign)["signature"].hex(), + "delegator": delegator.address, + } + response = self.client.delete( + reverse(url_name, args=(delegate.address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + self.assertEqual(SafeContractDelegate.objects.count(), 1) + response = self.client.delete( + reverse(url_name, args=(delegate.address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + SafeContractDelegate.objects.all().delete() + + # Try to delete delegate for a specific safe + with mock.patch( + "safe_transaction_service.history.serializers.get_safe_owners", + return_value=[Account.create().address], + ) as get_safe_owners_mock: + SafeContractDelegateFactory( + delegate=delegate.address, delegator=delegator.address + ) # Should not be deleted + SafeContractDelegateFactory( + safe_contract__address=safe_address, + delegate=delegate.address, + delegator=delegator.address, + ) # Expected to be deleted + self.assertEqual(SafeContractDelegate.objects.count(), 2) + data = { + "safe": safe_address, + "signature": delegator.signHash(hash_to_sign)["signature"].hex(), + "delegator": delegator.address, + } + response = self.client.delete( + reverse(url_name, args=(delegate.address,)), + format="json", + data=data, + ) + self.assertIn( + f"Provided delegator={delegator.address} is not an owner of Safe={safe_address}", + response.data["non_field_errors"][0], + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(SafeContractDelegate.objects.count(), 2) + # Mock safe owners + get_safe_owners_mock.return_value = [delegator.address] + data = { + "safe": safe_address, + "signature": delegator.signHash(hash_to_sign)["signature"].hex(), + "delegator": delegator.address, + } + response = self.client.delete( + reverse(url_name, args=(delegate.address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + self.assertEqual(SafeContractDelegate.objects.count(), 1) + + # Try an invalid signer + signer = Account.create() + data = { + "signature": signer.signHash(hash_to_sign)["signature"].hex(), + "delegator": delegator.address, + } + response = self.client.delete( + reverse(url_name, args=(delegate.address,)), format="json", data=data + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn( + "Signature does not match provided delegate", + response.data["non_field_errors"][0], + ) + self.assertEqual(SafeContractDelegate.objects.count(), 1) + data = { + "signature": delegator.signHash(hash_to_sign)["signature"].hex(), + "delegator": delegator.address, + } + response = self.client.delete( + reverse(url_name, args=(delegate.address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + self.assertEqual(SafeContractDelegate.objects.count(), 0) + + # Try an invalid delegate_address + response = self.client.delete( + reverse( + url_name, args=("0x00000000000000000000000000000000000000000000000",) + ), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_422_UNPROCESSABLE_ENTITY) + self.assertIn( + "Checksum address validation failed", + response.data["message"], + ) + + # Try an invalid signature + with mock.patch( + "gnosis.safe.safe_signature.SafeSignature.parse_signature", + return_value=[], + ) as parse_signature_mock: + # No signatures + response = self.client.delete( + reverse(url_name, args=(delegate.address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn( + "Signature is not valid", + response.data["non_field_errors"][0], + ) + + # More than 1 signature + parse_signature_mock.return_value = [None, None] + response = self.client.delete( + reverse(url_name, args=(delegate.address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn( + "More than one signatures detected, just one is expected", + response.data["non_field_errors"][0], + ) diff --git a/safe_transaction_service/history/urls.py b/safe_transaction_service/history/urls.py index e95ab111..2f2de02e 100644 --- a/safe_transaction_service/history/urls.py +++ b/safe_transaction_service/history/urls.py @@ -16,28 +16,19 @@ views.AboutEthereumTracingRPCView.as_view(), name="about-ethereum-tracing-rpc", ), - path( - "about/master-copies/", views.MasterCopiesView.as_view(), name="master-copies" - ), # Deprecated path("about/singletons/", views.SingletonsView.as_view(), name="singletons"), path( "about/indexing/", views.IndexingView.as_view(), name="indexing", ), - path("data-decoder/", views.DataDecoderView.as_view(), name="data-decoder"), - path("delegates/", views.DelegateListView.as_view(), name="delegates"), path( - "delegates//", - views.DelegateDeleteView.as_view(), - name="delegate", + "about/deployments/", + views.SafeDeploymentsView.as_view(), + name="deployments", ), + path("data-decoder/", views.DataDecoderView.as_view(), name="data-decoder"), path("safes//", views.SafeInfoView.as_view(), name="safe-info"), - path( - "safes//transactions/", - views.SafeMultisigTransactionDeprecatedListView.as_view(), - name="multisig-transactions-alias", - ), path( "safes//multisig-transactions/", views.SafeMultisigTransactionListView.as_view(), @@ -88,16 +79,6 @@ views.SafeBalanceView.as_view(), name="safe-balances", ), - path( - "safes//balances/usd/", - views.SafeBalanceUsdView.as_view(), - name="safe-balances-usd", - ), - path( - "safes//delegates//", - views.SafeDelegateDestroyView.as_view(), - name="safe-delegate", - ), path( "multisig-transactions//", views.SafeMultisigTransactionDetailView.as_view(), @@ -111,8 +92,16 @@ path("modules//safes/", views.ModulesView.as_view(), name="modules"), path("owners//safes/", views.OwnersView.as_view(), name="owners"), path( - "transactions//", - views.SafeMultisigTransactionDeprecatedDetailView.as_view(), - name="multisig-transaction-alias", - ), + "delegates/", views.DelegateListView.as_view(), name="delegates" + ), # Deprecated + path( + "delegates//", + views.DelegateDeleteView.as_view(), + name="delegate", + ), # Deprecated + path( + "safes//delegates//", + views.SafeDelegateDestroyView.as_view(), + name="safe-delegate", + ), # Deprecated ] diff --git a/safe_transaction_service/history/urls_v2.py b/safe_transaction_service/history/urls_v2.py index 3f6a7f61..93c217f5 100644 --- a/safe_transaction_service/history/urls_v2.py +++ b/safe_transaction_service/history/urls_v2.py @@ -10,4 +10,10 @@ views_v2.SafeCollectiblesView.as_view(), name="safe-collectibles", ), + path("delegates/", views_v2.DelegateListView.as_view(), name="delegates"), + path( + "delegates//", + views_v2.DelegateDeleteView.as_view(), + name="delegate", + ), ] diff --git a/safe_transaction_service/history/utils.py b/safe_transaction_service/history/utils.py index 90a11793..f503f325 100644 --- a/safe_transaction_service/history/utils.py +++ b/safe_transaction_service/history/utils.py @@ -5,6 +5,7 @@ from django.utils.translation import gettext as _ from hexbytes import HexBytes +from web3.types import LogReceipt class HexField(forms.CharField): @@ -36,7 +37,7 @@ def prepare_value(self, value: memoryview) -> str: return "0x" + bytes(value).hex() if value else "" -def clean_receipt_log(receipt_log: Dict[str, Any]) -> Optional[Dict[str, Any]]: +def clean_receipt_log(receipt_log: LogReceipt) -> Optional[Dict[str, Any]]: """ Clean receipt log and make them JSON compliant diff --git a/safe_transaction_service/history/views.py b/safe_transaction_service/history/views.py index 947e898b..be642359 100644 --- a/safe_transaction_service/history/views.py +++ b/safe_transaction_service/history/views.py @@ -29,6 +29,7 @@ from gnosis.eth.constants import NULL_ADDRESS from gnosis.eth.utils import fast_is_checksum_address from gnosis.safe import CannotEstimateGas +from gnosis.safe.safe_deployments import safe_deployments from safe_transaction_service import __version__ from safe_transaction_service.utils.ethereum import get_chain_id @@ -50,7 +51,7 @@ SafeMasterCopy, TransferDict, ) -from .pagination import ListPagination +from .pagination import DummyPagination from .serializers import get_data_decoded_from_data from .services import ( BalanceServiceProvider, @@ -175,14 +176,72 @@ def get_queryset(self): return SafeMasterCopy.objects.relevant() -class MasterCopiesView(SingletonsView): +class SafeDeploymentsView(ListAPIView): + """ + Returns a list of safe deployments by version. + """ + + serializer_class = serializers.SafeDeploymentSerializer + pagination_class = None # Don't show limit/offset in swagger + + _schema_version_param = openapi.Parameter( + "version", + openapi.IN_QUERY, + type=openapi.TYPE_STRING, + default=None, + description="Filter by Safe version", + ) + _schema_contract_param = openapi.Parameter( + "contract", + openapi.IN_QUERY, + type=openapi.TYPE_STRING, + default=None, + description="Filter by Safe contract name", + ) + @swagger_auto_schema( - deprecated=True, - operation_description="Use `singletons` instead of `master-copies`", - responses={200: "Ok"}, + responses={404: "Provided version does not exist"}, + manual_parameters=[ + _schema_version_param, + _schema_contract_param, + ], ) - def get(self, *args, **kwargs): - return super().get(*args, **kwargs) + @method_decorator(cache_page(60)) # 60 seconds + def get(self, request): + filter_version = self.request.query_params.get("version") + filter_contract = self.request.query_params.get("contract") + + if filter_version and filter_version not in safe_deployments.keys(): + return Response(status=status.HTTP_404_NOT_FOUND) + + versions = [filter_version] if filter_version else list(safe_deployments.keys()) + chain_id = str(get_chain_id()) + data_response = [] + for version in versions: + contracts = [] + if filter_contract: + # Filter by contract name + if addresses := safe_deployments[version].get(filter_contract): + contracts.append( + { + "contract_name": filter_contract, + "address": addresses.get(str(chain_id)), + } + ) + else: + for contract_name, addresses in safe_deployments[version].items(): + contracts.append( + { + "contract_name": contract_name, + "address": addresses.get(chain_id), + } + ) + + data_response.append({"version": version, "contracts": contracts}) + + serializer = self.serializer_class(data=data_response, many=True) + serializer.is_valid(raise_exception=True) + return Response(status=status.HTTP_200_OK, data=serializer.data) class AllTransactionsListView(ListAPIView): @@ -334,10 +393,21 @@ def get_cached_page_tx_identifiers( redis = get_redis() # Get all relevant elements for a Safe to be cached - relevant_elements = transaction_service.get_count_relevant_txs_for_safe(safe) - cache_key = f"all-txs:{safe}:{int(executed)}{int(queued)}{int(trusted)}:{limit}:{offset}:{ordering}:{relevant_elements}" - lock_key = f"locks:{cache_key}" + cache_hash_key = transaction_service.get_all_txs_cache_hash_key(safe) + cache_query_field = ( + f"{int(executed)}{int(queued)}{int(trusted)}:{limit}:{offset}:{ordering}" + ) + lock_key = f"locks:{cache_hash_key}:{cache_query_field}" + logger.debug( + "%s: All txs from identifiers for Safe=%s executed=%s queued=%s trusted=%s lock-key=%s", + self.__class__.__name__, + safe, + executed, + queued, + trusted, + lock_key, + ) if not cache_timeout: # Cache disabled return self.get_page_tx_identifiers( @@ -348,7 +418,7 @@ def get_cached_page_tx_identifiers( lock_key, timeout=settings.GUNICORN_REQUEST_TIMEOUT, # This prevents a service restart to leave a lock forever ): - if result := redis.get(cache_key): + if result := redis.hget(cache_hash_key, cache_query_field): # Count needs to be retrieved to set it up the paginator page, count = pickle.loads(result) # Setting the paginator like this is not very elegant and needs to be tested really well @@ -357,13 +427,16 @@ def get_cached_page_tx_identifiers( self.paginator.offset = offset self.paginator.request = self.request return page + page = self.get_page_tx_identifiers( safe, executed, queued, trusted, ordering, limit, offset ) - redis.set( - cache_key, pickle.dumps((page, self.paginator.count)), ex=cache_timeout + redis.hset( + cache_hash_key, + cache_query_field, + pickle.dumps((page, self.paginator.count)), ) - + redis.expire(cache_hash_key, cache_timeout) return page def list(self, request, *args, **kwargs): @@ -372,7 +445,7 @@ def list(self, request, *args, **kwargs): executed, queued, trusted = self.get_parameters() ordering = self.get_ordering_parameter() # Trick to get limit and offset - list_pagination = ListPagination(self.request) + list_pagination = DummyPagination(self.request) limit, offset = list_pagination.limit, list_pagination.offset tx_identifiers_page = self.get_cached_page_tx_identifiers( @@ -406,7 +479,17 @@ def list(self, request, *args, **kwargs): queued, trusted, ) - return self.get_paginated_response(all_txs_serialized) + paginated_response = self.get_paginated_response(all_txs_serialized) + logger.debug( + "%s: All txs from identifiers for Safe=%s executed=%s queued=%s trusted=%s: %s", + self.__class__.__name__, + safe, + executed, + queued, + trusted, + paginated_response.data["results"], + ) + return paginated_response @swagger_auto_schema( responses={ @@ -420,7 +503,7 @@ def list(self, request, *args, **kwargs): ], ) def get(self, request, *args, **kwargs): - """ + f""" Returns a paginated list of transactions for a Safe. The list has different structures depending on the transaction type: - Multisig Transactions for a Safe. `tx_type=MULTISIG_TRANSACTION`. If the query parameter `queued=False` is @@ -429,6 +512,7 @@ def get(self, request, *args, **kwargs): by a delegate). If you need that behaviour to be disabled set the query parameter `trusted=False` - Module Transactions for a Safe. `tx_type=MODULE_TRANSACTION` - Incoming Transfers of Ether/ERC20 Tokens/ERC721 Tokens. `tx_type=ETHEREUM_TRANSACTION` + Only `{settings.TX_SERVICE_ALL_TXS_ENDPOINT_LIMIT_TRANSFERS}` newest transfers will be returned. Ordering_fields: ["execution_date"] eg: `execution_date` or `-execution_date` """ address = kwargs["address"] @@ -586,15 +670,59 @@ def get_queryset(self): .select_related("ethereum_tx__block") ) - -class SafeMultisigTransactionDeprecatedDetailView(SafeMultisigTransactionDetailView): @swagger_auto_schema( - deprecated=True, - operation_description="Use `multisig-transactions` instead of `transactions`", - responses={200: "Ok", 404: "Not found"}, + request_body=serializers.SafeMultisigTransactionDeleteSerializer(), + responses={ + 204: "Deleted", + 404: "Transaction not found", + 400: "Error processing data", + }, ) - def get(self, *args, **kwargs): - return super().get(*args, **kwargs) + def delete(self, request, safe_tx_hash: HexStr): + """ + Delete a queued but not executed multisig transaction. Only the proposer can delete the transaction. + Delegates are not valid, if the transaction was proposed by a delegator the owner who delegated to + the delegate must be used. + An EOA is required to sign the following EIP712 data: + + ```python + { + "types": { + "EIP712Domain": [ + {"name": "name", "type": "string"}, + {"name": "version", "type": "string"}, + {"name": "chainId", "type": "uint256"}, + {"name": "verifyingContract", "type": "address"}, + ], + "DeleteRequest": [ + {"name": "safeTxHash", "type": "bytes32"}, + {"name": "totp", "type": "uint256"}, + ], + }, + "primaryType": "DeleteRequest", + "domain": { + "name": "Safe Transaction Service", + "version": "1.0", + "chainId": chain_id, + "verifyingContract": safe_address, + }, + "message": { + "safeTxHash": safe_tx_hash, + "totp": totp, + }, + } + ``` + + `totp` parameter is calculated with `T0=0` and `Tx=3600`. `totp` is calculated by taking the + Unix UTC epoch time (no milliseconds) and dividing by 3600 (natural division, no decimals) + """ + request.data["safe_tx_hash"] = safe_tx_hash + serializer = serializers.SafeMultisigTransactionDeleteSerializer( + data=request.data + ) + serializer.is_valid(raise_exception=True) + MultisigTransaction.objects.filter(safe_tx_hash=safe_tx_hash).delete() + return Response(status=status.HTTP_204_NO_CONTENT) class SafeMultisigTransactionListView(ListAPIView): @@ -616,9 +744,17 @@ def get_queryset(self): ) def get_unique_nonce(self, address: str): - return ( - MultisigTransaction.objects.filter(safe=address).distinct("nonce").count() + """ + :param address: + :return: Number of Multisig Transactions with different nonce + """ + only_trusted = parse_boolean_query_param( + self.request.query_params.get("trusted", True) ) + queryset = MultisigTransaction.objects.filter(safe=address) + if only_trusted: + queryset = queryset.filter(trusted=True) + return queryset.distinct("nonce").count() def get_serializer_class(self): """ @@ -634,7 +770,8 @@ def get_serializer_class(self): ) def get(self, request, *args, **kwargs): """ - Returns the history of a multisig tx (safe) + Returns a paginated list of Multisig Transactions for a Safe. + By default only ``trusted`` multisig transactions are returned. """ address = kwargs["address"] if not fast_is_checksum_address(address): @@ -685,22 +822,6 @@ def post(self, request, address, format=None): return Response(status=status.HTTP_201_CREATED) -class SafeMultisigTransactionDeprecatedListView(SafeMultisigTransactionListView): - @swagger_auto_schema( - deprecated=True, - operation_description="Use `multisig-transactions` instead of `transactions`", - ) - def get(self, *args, **kwargs): - return super().get(*args, **kwargs) - - @swagger_auto_schema( - deprecated=True, - operation_description="Use `multisig-transactions` instead of `transactions`", - ) - def post(self, *args, **kwargs): - return super().post(*args, **kwargs) - - def swagger_safe_balance_schema(serializer_class, deprecated: bool = False): _schema_token_trusted_param = openapi.Parameter( "trusted", @@ -778,152 +899,6 @@ def get(self, request, address): return Response(status=status.HTTP_200_OK, data=serializer.data) -class SafeBalanceUsdView(SafeBalanceView): - serializer_class = serializers.SafeBalanceUsdResponseSerializer - - def get_result(self, *args, **kwargs): - return BalanceServiceProvider().get_usd_balances(*args, **kwargs) - - @swagger_safe_balance_schema(serializer_class) - def get(self, *args, **kwargs): - """ - Get balance for Ether and ERC20 tokens with USD fiat conversion - """ - return super().get(*args, **kwargs) - - -class SafeDelegateDestroyView(DestroyAPIView): - serializer_class = serializers.SafeDelegateDeleteSerializer - - def get_object(self): - return get_object_or_404( - SafeContractDelegate, - safe_contract_id=self.kwargs["address"], - delegate=self.kwargs["delegate_address"], - ) - - @swagger_auto_schema( - request_body=serializer_class(), - responses={ - 204: "Deleted", - 400: "Malformed data", - 404: "Delegate not found", - 422: "Invalid Ethereum address/Error processing data", - }, - ) - def delete(self, request, address, delegate_address, *args, **kwargs): - """ - Delete a delegate for a Safe. Signature is built the same way that for adding a delegate. - Check `POST /delegates/` - """ - if not fast_is_checksum_address(address) or not fast_is_checksum_address( - delegate_address - ): - return Response( - status=status.HTTP_422_UNPROCESSABLE_ENTITY, - data={ - "code": 1, - "message": "Checksum address validation failed", - "arguments": [address, delegate_address], - }, - ) - - body_delegate = request.data.get("delegate", delegate_address) - if ( - body_delegate != delegate_address - ): # Check delegate in body matches the one in url - return Response( - status=status.HTTP_422_UNPROCESSABLE_ENTITY, - data={ - "code": 2, - "message": "Delegate address in body should match the one in the url", - "arguments": [body_delegate, delegate_address], - }, - ) - - request.data["safe"] = address - request.data["delegate"] = delegate_address - serializer = self.get_serializer(data=request.data) - serializer.is_valid(raise_exception=True) - return super().delete(request, address, delegate_address, *args, **kwargs) - - -class DelegateListView(ListCreateAPIView): - filter_backends = (django_filters.rest_framework.DjangoFilterBackend,) - filterset_class = filters.DelegateListFilter - pagination_class = pagination.DefaultPagination - queryset = SafeContractDelegate.objects.all() - - def get_serializer_class(self): - if self.request.method == "GET": - return serializers.SafeDelegateResponseSerializer - elif self.request.method == "POST": - return serializers.DelegateSerializer - - @swagger_auto_schema(responses={400: "Invalid data"}) - def get(self, request, **kwargs): - """ - Get list of delegates - """ - return super().get(request, **kwargs) - - @swagger_auto_schema(responses={202: "Accepted", 400: "Malformed data"}) - def post(self, request, **kwargs): - """ - Create a delegate for a Safe address with a custom label. Calls with same delegate but different label or - signer will update the label or delegator if different. - For the signature we are using TOTP with `T0=0` and `Tx=3600`. TOTP is calculated by taking the - Unix UTC epoch time (no milliseconds) and dividing by 3600 (natural division, no decimals) - For signature this hash need to be signed: keccak(checksummed address + str(int(current_epoch // 3600))) - For example: - - We want to add the delegate `0x132512f995866CcE1b0092384A6118EDaF4508Ff` and `epoch=1586779140`. - - `TOTP = epoch // 3600 = 1586779140 // 3600 = 440771` - - The hash to sign by a Safe owner would be `keccak("0x132512f995866CcE1b0092384A6118EDaF4508Ff440771")` - """ - return super().post(request, **kwargs) - - -class DelegateDeleteView(GenericAPIView): - serializer_class = serializers.DelegateDeleteSerializer - - @swagger_auto_schema( - request_body=serializer_class(), - responses={ - 204: "Deleted", - 400: "Malformed data", - 404: "Delegate not found", - 422: "Invalid Ethereum address/Error processing data", - }, - ) - def delete(self, request, delegate_address, *args, **kwargs): - """ - Delete every pair delegate/delegator found. Signature is built the same way as for adding a delegate, - but in this case the signer can be either the `delegator` (owner) or the `delegate` itself. - Check `POST /delegates/` - """ - if not fast_is_checksum_address(delegate_address): - return Response( - status=status.HTTP_422_UNPROCESSABLE_ENTITY, - data={ - "code": 1, - "message": "Checksum address validation failed", - "arguments": [delegate_address], - }, - ) - - request.data["delegate"] = delegate_address - serializer = self.get_serializer(data=request.data) - serializer.is_valid(raise_exception=True) - deleted, _ = SafeContractDelegate.objects.filter( - delegate=serializer.validated_data["delegate"], - delegator=serializer.validated_data["delegator"], - ).delete() - if deleted: - return Response(status=status.HTTP_204_NO_CONTENT) - else: - return Response(status=status.HTTP_404_NOT_FOUND) - - class TransferView(RetrieveAPIView): serializer_class = serializers.TransferWithTokenInfoResponseSerializer pagination_class = None @@ -1020,13 +995,13 @@ class SafeTransferListView(ListAPIView): def get_transfers(self, address: str): erc20_queryset = self.filter_queryset( ERC20Transfer.objects.to_or_from(address).token_txs() - ) + )[: settings.TX_SERVICE_ALL_TXS_ENDPOINT_LIMIT_TRANSFERS] erc721_queryset = self.filter_queryset( ERC721Transfer.objects.to_or_from(address).token_txs() - ) + )[: settings.TX_SERVICE_ALL_TXS_ENDPOINT_LIMIT_TRANSFERS] ether_queryset = self.filter_queryset( InternalTx.objects.ether_txs_for_address(address) - ) + )[: settings.TX_SERVICE_ALL_TXS_ENDPOINT_LIMIT_TRANSFERS] return InternalTx.objects.union_ether_and_token_txs( erc20_queryset, erc721_queryset, ether_queryset ) @@ -1055,8 +1030,9 @@ def list(self, request, *args, **kwargs): } ) def get(self, request, address, format=None): - """ - Returns ether/tokens transfers for a Safe + f""" + Returns ether/tokens transfers for a Safe. + Only `{settings.TX_SERVICE_ALL_TXS_ENDPOINT_LIMIT_TRANSFERS}` newest transfers will be returned. """ if not fast_is_checksum_address(address): return Response( @@ -1079,21 +1055,23 @@ class SafeIncomingTransferListView(SafeTransferListView): } ) def get(self, *args, **kwargs): - """ - Returns incoming ether/tokens transfers for a Safe + f""" + Returns incoming ether/tokens transfers for a Safe. + Only `{settings.TX_SERVICE_ALL_TXS_ENDPOINT_LIMIT_TRANSFERS}` newest transfers will be returned. """ return super().get(*args, **kwargs) def get_transfers(self, address: str): erc20_queryset = self.filter_queryset( ERC20Transfer.objects.incoming(address).token_txs() - ) + )[: settings.TX_SERVICE_ALL_TXS_ENDPOINT_LIMIT_TRANSFERS] erc721_queryset = self.filter_queryset( ERC721Transfer.objects.incoming(address).token_txs() - ) + )[: settings.TX_SERVICE_ALL_TXS_ENDPOINT_LIMIT_TRANSFERS] ether_queryset = self.filter_queryset( InternalTx.objects.ether_incoming_txs_for_address(address) - ) + )[: settings.TX_SERVICE_ALL_TXS_ENDPOINT_LIMIT_TRANSFERS] + return InternalTx.objects.union_ether_and_token_txs( erc20_queryset, erc721_queryset, ether_queryset ) @@ -1313,6 +1291,13 @@ def post(self, request, address, *args, **kwargs): if not SafeContract.objects.filter(address=address).exists(): return Response(status=status.HTTP_404_NOT_FOUND) + # This endpoint is only needed for Safes < 1.3.0, so it should be disabled for L2 chains as they + # don't support Safes below that version + if settings.ETH_L2_NETWORK: + response_serializer = self.response_serializer(data={"safe_tx_gas": 0}) + response_serializer.is_valid() + return Response(status=status.HTTP_200_OK, data=response_serializer.data) + serializer = self.get_serializer(data=request.data) if serializer.is_valid(): try: @@ -1337,3 +1322,160 @@ def post(self, request, address, *args, **kwargs): ) else: return Response(status=status.HTTP_400_BAD_REQUEST, data=serializer.errors) + + +# Deprecated --------------------------------------------------------------- + + +class DelegateListView(ListCreateAPIView): + """ + + .. deprecated:: 4.38.0 + Deprecated in favor of V2 view supporting EIP712 signatures + """ + + filter_backends = (django_filters.rest_framework.DjangoFilterBackend,) + filterset_class = filters.DelegateListFilter + pagination_class = pagination.DefaultPagination + queryset = SafeContractDelegate.objects.all() + + def get_serializer_class(self): + if self.request.method == "GET": + return serializers.SafeDelegateResponseSerializer + elif self.request.method == "POST": + return serializers.DelegateSerializer + + @swagger_auto_schema(deprecated=True, responses={400: "Invalid data"}) + def get(self, request, **kwargs): + """ + Get list of delegates + """ + return super().get(request, **kwargs) + + @swagger_auto_schema( + deprecated=True, responses={202: "Accepted", 400: "Malformed data"} + ) + def post(self, request, **kwargs): + """ + Create a delegate for a Safe address with a custom label. Calls with same delegate but different label or + signer will update the label or delegator if different. + For the signature we are using TOTP with `T0=0` and `Tx=3600`. TOTP is calculated by taking the + Unix UTC epoch time (no milliseconds) and dividing by 3600 (natural division, no decimals) + For signature this hash need to be signed: keccak(checksummed address + str(int(current_epoch // 3600))) + For example: + - We want to add the delegate `0x132512f995866CcE1b0092384A6118EDaF4508Ff` and `epoch=1586779140`. + - `TOTP = epoch // 3600 = 1586779140 // 3600 = 440771` + - The hash to sign by a Safe owner would be `keccak("0x132512f995866CcE1b0092384A6118EDaF4508Ff440771")` + """ + return super().post(request, **kwargs) + + +class DelegateDeleteView(GenericAPIView): + """ + + .. deprecated:: 4.38.0 + Deprecated in favor of V2 view supporting EIP712 signatures + """ + + serializer_class = serializers.DelegateDeleteSerializer + + @swagger_auto_schema( + deprecated=True, + request_body=serializer_class(), + responses={ + 204: "Deleted", + 400: "Malformed data", + 404: "Delegate not found", + 422: "Invalid Ethereum address/Error processing data", + }, + ) + def delete(self, request, delegate_address, *args, **kwargs): + """ + Delete every pair delegate/delegator found. Signature is built the same way as for adding a delegate, + but in this case the signer can be either the `delegator` (owner) or the `delegate` itself. + Check `POST /delegates/` + """ + if not fast_is_checksum_address(delegate_address): + return Response( + status=status.HTTP_422_UNPROCESSABLE_ENTITY, + data={ + "code": 1, + "message": "Checksum address validation failed", + "arguments": [delegate_address], + }, + ) + + request.data["delegate"] = delegate_address + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + deleted, _ = SafeContractDelegate.objects.filter( + delegate=serializer.validated_data["delegate"], + delegator=serializer.validated_data["delegator"], + ).delete() + if deleted: + return Response(status=status.HTTP_204_NO_CONTENT) + else: + return Response(status=status.HTTP_404_NOT_FOUND) + + +class SafeDelegateDestroyView(DestroyAPIView): + """ + + .. deprecated:: 4.38.0 + Deprecated in favor of V2 view supporting EIP712 signatures + """ + + serializer_class = serializers.SafeDelegateDeleteSerializer + + def get_object(self): + return get_object_or_404( + SafeContractDelegate, + safe_contract_id=self.kwargs["address"], + delegate=self.kwargs["delegate_address"], + ) + + @swagger_auto_schema( + deprecated=True, + request_body=serializer_class(), + responses={ + 204: "Deleted", + 400: "Malformed data", + 404: "Delegate not found", + 422: "Invalid Ethereum address/Error processing data", + }, + ) + def delete(self, request, address, delegate_address, *args, **kwargs): + """ + Delete a delegate for a Safe. Signature is built the same way that for adding a delegate. + Check `POST /delegates/` + """ + if not fast_is_checksum_address(address) or not fast_is_checksum_address( + delegate_address + ): + return Response( + status=status.HTTP_422_UNPROCESSABLE_ENTITY, + data={ + "code": 1, + "message": "Checksum address validation failed", + "arguments": [address, delegate_address], + }, + ) + + body_delegate = request.data.get("delegate", delegate_address) + if ( + body_delegate != delegate_address + ): # Check delegate in body matches the one in url + return Response( + status=status.HTTP_422_UNPROCESSABLE_ENTITY, + data={ + "code": 2, + "message": "Delegate address in body should match the one in the url", + "arguments": [body_delegate, delegate_address], + }, + ) + + request.data["safe"] = address + request.data["delegate"] = delegate_address + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + return super().delete(request, address, delegate_address, *args, **kwargs) diff --git a/safe_transaction_service/history/views_v2.py b/safe_transaction_service/history/views_v2.py index 55ddc862..f9d34a4d 100644 --- a/safe_transaction_service/history/views_v2.py +++ b/safe_transaction_service/history/views_v2.py @@ -1,15 +1,19 @@ import logging +from django.db.models import Q + +import django_filters +from drf_yasg.utils import swagger_auto_schema from rest_framework import status -from rest_framework.generics import GenericAPIView +from rest_framework.generics import GenericAPIView, ListCreateAPIView from rest_framework.response import Response from gnosis.eth.utils import fast_is_checksum_address from safe_transaction_service.utils.utils import parse_boolean_query_param -from . import pagination, serializers -from .models import SafeContract +from . import filters, pagination, serializers +from .models import SafeContract, SafeContractDelegate from .services.collectibles_service import CollectiblesServiceProvider from .views import swagger_safe_balance_schema @@ -58,3 +62,108 @@ def get(self, request, address): paginator.set_count(count) serializer = self.get_serializer(safe_collectibles, many=True) return paginator.get_paginated_response(serializer.data) + + +class DelegateListView(ListCreateAPIView): + filter_backends = (django_filters.rest_framework.DjangoFilterBackend,) + filterset_class = filters.DelegateListFilter + pagination_class = pagination.DefaultPagination + queryset = SafeContractDelegate.objects.all() + + def get_serializer_class(self): + if self.request.method == "GET": + return serializers.SafeDelegateResponseSerializer + elif self.request.method == "POST": + return serializers.DelegateSerializerV2 + + @swagger_auto_schema(responses={400: "Invalid data"}) + def get(self, request, **kwargs): + """ + Get list of delegates + """ + return super().get(request, **kwargs) + + @swagger_auto_schema(responses={202: "Accepted", 400: "Malformed data"}) + def post(self, request, **kwargs): + """ + Create a delegate for a Safe address with a custom label. Calls with same delegate but different label or + signer will update the label or delegator if different. + An EOA is required to sign the following EIP712 data: + + ```python + { + "types": { + "EIP712Domain": [ + {"name": "name", "type": "string"}, + {"name": "version", "type": "string"}, + {"name": "chainId", "type": "uint256"}, + ], + "AddDelegate": [ + {"name": "delegateAddress", "type": "bytes32"}, + {"name": "totp", "type": "uint256"}, + ], + }, + "primaryType": "AddDelegate", + "domain": { + "name": "Safe Transaction Service", + "version": "1.0", + "chainId": chain_id, + }, + "message": { + "delegateAddress": delegate_address, + "totp": totp, + }, + } + ``` + + `totp` parameter is calculated with `T0=0` and `Tx=3600`. `totp` is calculated by taking the + Unix UTC epoch time (no milliseconds) and dividing by 3600 (natural division, no decimals) + """ + return super().post(request, **kwargs) + + +class DelegateDeleteView(GenericAPIView): + serializer_class = serializers.DelegateDeleteSerializerV2 + + @swagger_auto_schema( + request_body=serializer_class(), + responses={ + 204: "Deleted", + 400: "Malformed data", + 404: "Delegate not found", + 422: "Invalid Ethereum address/Error processing data", + }, + ) + def delete(self, request, delegate_address, *args, **kwargs): + """ + Removes all delegate/delegator pairs found or combinations of safe/delegate/delegator/delegate. The signature + is constructed in the same way as for adding a delegate, but in this case the signer can be either the + `delegator` (owner) or the `delegate` itself. Check `POST /delegates/`. + """ + if not fast_is_checksum_address(delegate_address): + return Response( + status=status.HTTP_422_UNPROCESSABLE_ENTITY, + data={ + "code": 1, + "message": "Checksum address validation failed", + "arguments": [delegate_address], + }, + ) + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + deleted, _ = SafeContractDelegate.objects.filter( + Q( + safe_contract_id=serializer.validated_data["safe"], + delegate=delegate_address, + delegator=serializer.validated_data["delegator"], + ) + if serializer.validated_data.get("safe", None) + else Q( + delegate=delegate_address, + delegator=serializer.validated_data["delegator"], + ) + ).delete() + if deleted: + return Response(status=status.HTTP_204_NO_CONTENT) + else: + return Response(status=status.HTTP_404_NOT_FOUND) diff --git a/safe_transaction_service/notifications/admin.py b/safe_transaction_service/notifications/admin.py index 51a649ae..76fb74dc 100644 --- a/safe_transaction_service/notifications/admin.py +++ b/safe_transaction_service/notifications/admin.py @@ -2,13 +2,13 @@ from django.contrib import admin -from gnosis.eth.django.admin import BinarySearchAdmin +from safe_transaction_service.utils.admin import AdvancedAdminSearchMixin from .models import FirebaseDevice, FirebaseDeviceOwner @admin.register(FirebaseDevice) -class FirebaseDeviceAdmin(BinarySearchAdmin): +class FirebaseDeviceAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): list_display = ( "uuid", "cloud_messaging_token", @@ -20,7 +20,7 @@ class FirebaseDeviceAdmin(BinarySearchAdmin): ordering = ["uuid"] raw_id_fields = ("safes",) readonly_fields = ("owners",) - search_fields = ["uuid", "cloud_messaging_token", "=safes__address"] + search_fields = ["==uuid", "==cloud_messaging_token", "==safes__address"] def get_queryset(self, request): qs = super().get_queryset(request) @@ -34,7 +34,10 @@ def safe_addresses(self, obj: FirebaseDevice) -> List[str]: @admin.register(FirebaseDeviceOwner) -class FirebaseDeviceOwnerAdmin(BinarySearchAdmin): +class FirebaseDeviceOwnerAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): list_display = ("firebase_device_id", "owner") ordering = ["firebase_device_id"] - search_fields = ["firebase_device_id__uuid", "=owner"] + search_fields = [ + "==firebase_device_id__uuid", + "==owner", + ] diff --git a/safe_transaction_service/notifications/migrations/0001_initial.py b/safe_transaction_service/notifications/migrations/0001_initial.py index ec9ca567..24687173 100644 --- a/safe_transaction_service/notifications/migrations/0001_initial.py +++ b/safe_transaction_service/notifications/migrations/0001_initial.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [ diff --git a/safe_transaction_service/notifications/migrations/0002_auto_20200806_1534.py b/safe_transaction_service/notifications/migrations/0002_auto_20200806_1534.py index 2218cb73..b0836c0a 100644 --- a/safe_transaction_service/notifications/migrations/0002_auto_20200806_1534.py +++ b/safe_transaction_service/notifications/migrations/0002_auto_20200806_1534.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("notifications", "0001_initial"), ] diff --git a/safe_transaction_service/notifications/migrations/0003_firebasedeviceowner.py b/safe_transaction_service/notifications/migrations/0003_firebasedeviceowner.py index aabff7e5..b22deaea 100644 --- a/safe_transaction_service/notifications/migrations/0003_firebasedeviceowner.py +++ b/safe_transaction_service/notifications/migrations/0003_firebasedeviceowner.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("notifications", "0002_auto_20200806_1534"), ] diff --git a/safe_transaction_service/notifications/migrations/0004_alter_firebasedeviceowner_id.py b/safe_transaction_service/notifications/migrations/0004_alter_firebasedeviceowner_id.py index d814f914..70d4bd52 100644 --- a/safe_transaction_service/notifications/migrations/0004_alter_firebasedeviceowner_id.py +++ b/safe_transaction_service/notifications/migrations/0004_alter_firebasedeviceowner_id.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("notifications", "0003_firebasedeviceowner"), ] diff --git a/safe_transaction_service/notifications/migrations/0005_ethereum_address_field_v2.py b/safe_transaction_service/notifications/migrations/0005_ethereum_address_field_v2.py index d3a52950..a291bbb9 100644 --- a/safe_transaction_service/notifications/migrations/0005_ethereum_address_field_v2.py +++ b/safe_transaction_service/notifications/migrations/0005_ethereum_address_field_v2.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("notifications", "0004_alter_firebasedeviceowner_id"), ] diff --git a/safe_transaction_service/notifications/migrations/0006_alter_firebasedeviceowner_unique_together_and_more.py b/safe_transaction_service/notifications/migrations/0006_alter_firebasedeviceowner_unique_together_and_more.py new file mode 100644 index 00000000..1b3a182a --- /dev/null +++ b/safe_transaction_service/notifications/migrations/0006_alter_firebasedeviceowner_unique_together_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 4.2.10 on 2024-02-27 15:58 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("notifications", "0005_ethereum_address_field_v2"), + ] + + operations = [ + migrations.AlterUniqueTogether( + name="firebasedeviceowner", + unique_together=set(), + ), + migrations.AddConstraint( + model_name="firebasedeviceowner", + constraint=models.UniqueConstraint( + fields=("firebase_device", "owner"), name="unique_firebase_device_owner" + ), + ), + ] diff --git a/safe_transaction_service/notifications/models.py b/safe_transaction_service/notifications/models.py index c50f5bd2..4d2ad341 100644 --- a/safe_transaction_service/notifications/models.py +++ b/safe_transaction_service/notifications/models.py @@ -70,7 +70,11 @@ class FirebaseDeviceOwner(models.Model): class Meta: verbose_name = "Firebase Device Owner" verbose_name_plural = "Firebase Device Owners" - unique_together = (("firebase_device", "owner"),) + constraints = [ + models.UniqueConstraint( + fields=["firebase_device", "owner"], name="unique_firebase_device_owner" + ) + ] def __str__(self): return f"{self.owner} for device {self.firebase_device_id}" diff --git a/safe_transaction_service/notifications/tests/test_tasks.py b/safe_transaction_service/notifications/tests/test_tasks.py index 40747a0a..6f2a44e6 100644 --- a/safe_transaction_service/notifications/tests/test_tasks.py +++ b/safe_transaction_service/notifications/tests/test_tasks.py @@ -3,7 +3,8 @@ from django.test import TestCase from eth_account import Account -from web3 import Web3 + +from gnosis.eth.utils import fast_keccak_text from safe_transaction_service.history.models import ( EthereumTxCallType, @@ -91,7 +92,7 @@ def test_send_notification_owner_task(self): safe_address = safe_contract.address threshold = 2 owners = [Account.create().address for _ in range(2)] - safe_tx_hash = Web3.keccak(text="hola").hex() + safe_tx_hash = fast_keccak_text("hola").hex() with self.assertLogs(logger=task_logger) as cm: self.assertEqual( send_notification_owner_task.delay(safe_address, safe_tx_hash).result, @@ -183,7 +184,7 @@ def test_send_notification_owner_task(self): self.assertIn("does not require more confirmations", cm.output[0]) def test_send_notification_owner_delegate_task(self): - safe_tx_hash = Web3.keccak(text="aloha").hex() + safe_tx_hash = fast_keccak_text("aloha").hex() safe_contract = SafeContractFactory() safe_address = safe_contract.address safe_status = SafeLastStatusFactory(address=safe_address, threshold=3) @@ -218,7 +219,7 @@ def test_send_notification_owner_delegate_task(self): def test_send_notification_owner_task_called(self): safe_address = Account.create().address - safe_tx_hash = Web3.keccak(text="hola").hex() + safe_tx_hash = fast_keccak_text("hola").hex() payload = { "address": safe_address, "type": WebHookType.PENDING_MULTISIG_TRANSACTION.name, diff --git a/safe_transaction_service/safe_messages/admin.py b/safe_transaction_service/safe_messages/admin.py index 8ca928dc..8e441397 100644 --- a/safe_transaction_service/safe_messages/admin.py +++ b/safe_transaction_service/safe_messages/admin.py @@ -1,21 +1,21 @@ from django.contrib import admin -from gnosis.eth.django.admin import BinarySearchAdmin +from safe_transaction_service.utils.admin import AdvancedAdminSearchMixin from .models import SafeMessage, SafeMessageConfirmation @admin.register(SafeMessage) -class SafeMessageAdmin(BinarySearchAdmin): +class SafeMessageAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): date_hierarchy = "created" list_display = ("safe", "message_hash", "proposed_by", "message") ordering = ["-created"] readonly_fields = ("message_hash",) - search_fields = ["=safe", "=message_hash", "=proposed_by", "message"] + search_fields = ["==safe", "==message_hash", "==proposed_by", "message"] @admin.register(SafeMessageConfirmation) -class SafeMessageConfirmationAdmin(BinarySearchAdmin): +class SafeMessageConfirmationAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): date_hierarchy = "created" list_display = ( "safe_message", @@ -26,7 +26,6 @@ class SafeMessageConfirmationAdmin(BinarySearchAdmin): list_select_related = ("safe_message",) ordering = ["-created"] search_fields = [ - "=safe_message__safe", - "=owner", - "safe_message__description", + "==safe_message__safe", + "==owner", ] diff --git a/safe_transaction_service/safe_messages/migrations/0001_initial.py b/safe_transaction_service/safe_messages/migrations/0001_initial.py index b2416ad1..01c68ac8 100644 --- a/safe_transaction_service/safe_messages/migrations/0001_initial.py +++ b/safe_transaction_service/safe_messages/migrations/0001_initial.py @@ -10,7 +10,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [] diff --git a/safe_transaction_service/safe_messages/migrations/0002_alter_safemessageconfirmation_unique_together_and_more.py b/safe_transaction_service/safe_messages/migrations/0002_alter_safemessageconfirmation_unique_together_and_more.py new file mode 100644 index 00000000..4e359a18 --- /dev/null +++ b/safe_transaction_service/safe_messages/migrations/0002_alter_safemessageconfirmation_unique_together_and_more.py @@ -0,0 +1,24 @@ +# Generated by Django 4.2.10 on 2024-02-27 15:58 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("safe_messages", "0001_initial"), + ] + + operations = [ + migrations.AlterUniqueTogether( + name="safemessageconfirmation", + unique_together=set(), + ), + migrations.AddConstraint( + model_name="safemessageconfirmation", + constraint=models.UniqueConstraint( + fields=("safe_message", "owner"), + name="unique_safe_message_confirmation_owner", + ), + ), + ] diff --git a/safe_transaction_service/safe_messages/migrations/0003_alter_safemessageconfirmation_signature.py b/safe_transaction_service/safe_messages/migrations/0003_alter_safemessageconfirmation_signature.py new file mode 100644 index 00000000..be8c1476 --- /dev/null +++ b/safe_transaction_service/safe_messages/migrations/0003_alter_safemessageconfirmation_signature.py @@ -0,0 +1,31 @@ +# Generated by Django 5.0.3 on 2024-03-20 12:54 + +from django.db import migrations + +import gnosis.eth.django.models + + +class Migration(migrations.Migration): + + dependencies = [ + ( + "safe_messages", + "0002_alter_safemessageconfirmation_unique_together_and_more", + ), + ] + + operations = [ + migrations.RunSQL( + """ + ALTER TABLE "safe_messages_safemessageconfirmation" ALTER COLUMN "signature" TYPE bytea USING DECODE("signature", 'hex'); + """, + reverse_sql=""" + ALTER TABLE "safe_messages_safemessageconfirmation" ALTER COLUMN "signature" TYPE text USING ENCODE("signature"::bytea, 'hex'); + """, + ), + migrations.AlterField( + model_name="safemessageconfirmation", + name="signature", + field=gnosis.eth.django.models.HexV2Field(max_length=5000), + ), + ] diff --git a/safe_transaction_service/safe_messages/models.py b/safe_transaction_service/safe_messages/models.py index 84f0dfc5..0dfe90f1 100644 --- a/safe_transaction_service/safe_messages/models.py +++ b/safe_transaction_service/safe_messages/models.py @@ -6,9 +6,11 @@ from hexbytes import HexBytes from model_utils.models import TimeStampedModel -from gnosis.eth.django.models import EthereumAddressV2Field, HexField, Keccak256Field +from gnosis.eth.django.models import EthereumAddressV2Field, HexV2Field, Keccak256Field from gnosis.safe.safe_signature import SafeSignatureType +from safe_transaction_service.utils.constants import SIGNATURE_LENGTH + logger = getLogger(__name__) @@ -61,13 +63,18 @@ class SafeMessageConfirmation(TimeStampedModel): related_name="confirmations", ) owner = EthereumAddressV2Field(db_index=True) - signature = HexField(max_length=5000) + signature = HexV2Field(max_length=SIGNATURE_LENGTH) signature_type = models.PositiveSmallIntegerField( choices=[(tag.value, tag.name) for tag in SafeSignatureType], db_index=True ) class Meta: - unique_together = (("safe_message", "owner"),) + constraints = [ + models.UniqueConstraint( + fields=["safe_message", "owner"], + name="unique_safe_message_confirmation_owner", + ) + ] ordering = ["created"] def __str__(self): diff --git a/safe_transaction_service/safe_messages/serializers.py b/safe_transaction_service/safe_messages/serializers.py index 2eab8cbb..47a04d22 100644 --- a/safe_transaction_service/safe_messages/serializers.py +++ b/safe_transaction_service/safe_messages/serializers.py @@ -1,6 +1,6 @@ -from typing import Any, Dict, Optional, Union +from typing import Any, Dict, Optional, Sequence, Tuple, Union -from eth_typing import HexStr +from eth_typing import ChecksumAddress, HexStr from hexbytes import HexBytes from rest_framework import serializers from rest_framework.exceptions import ValidationError @@ -12,15 +12,59 @@ from safe_transaction_service.utils.serializers import get_safe_owners -from .models import SafeMessage, SafeMessageConfirmation -from .utils import get_safe_message_hash_for_message +from .models import SIGNATURE_LENGTH, SafeMessage, SafeMessageConfirmation +from .utils import get_hash_for_message, get_safe_message_hash_for_message # Request serializers -class SafeMessageSerializer(serializers.Serializer): +class SafeMessageSignatureParserMixin: + def get_valid_owner_from_signatures( + self, + safe_signatures: Sequence[SafeSignature], + safe_address: ChecksumAddress, + safe_message: Optional[SafeMessage], + ) -> Tuple[ChecksumAddress, SafeSignatureType]: + """ + :param safe_signatures: + :param safe_address: + :param safe_message: Safe message database object (if already created) + :return: + :raises ValidationError: + """ + if len(safe_signatures) != 1: + raise ValidationError( + f"1 owner signature was expected, {len(safe_signatures)} received" + ) + + ethereum_client = EthereumClientProvider() + for safe_signature in safe_signatures: + if not safe_signature.is_valid(ethereum_client, safe_address): + raise ValidationError( + f"Signature={safe_signature.signature.hex()} for owner={safe_signature.owner} is not valid" + ) + + owner = safe_signatures[0].owner + signature_type = safe_signatures[0].signature_type + if safe_message: + # Check signature is not already in database + if SafeMessageConfirmation.objects.filter( + safe_message=safe_message, owner=owner + ).exists(): + raise ValidationError(f"Signature for owner {owner} already exists") + + owners = get_safe_owners(safe_address) + if owner not in owners: + raise ValidationError(f"{owner} is not an owner of the Safe") + + return owner, signature_type + + +class SafeMessageSerializer(SafeMessageSignatureParserMixin, serializers.Serializer): message = serializers.JSONField() - safe_app_id = serializers.IntegerField(allow_null=True, default=None) - signature = eth_serializers.HexadecimalField(max_length=65) + safe_app_id = serializers.IntegerField(allow_null=True, default=None, min_value=0) + signature = eth_serializers.HexadecimalField( + min_length=65, max_length=SIGNATURE_LENGTH + ) def validate_message(self, value: Union[str, Dict[str, Any]]): if isinstance(value, str): @@ -39,11 +83,14 @@ def validate_message(self, value: Union[str, Dict[str, Any]]): def validate(self, attrs): attrs = super().validate(attrs) - message = attrs["message"] safe_address = self.context["safe_address"] + message = attrs["message"] signature = attrs["signature"] attrs["safe"] = safe_address - safe_message_hash = get_safe_message_hash_for_message(safe_address, message) + message_hash = get_hash_for_message(message) + safe_message_hash = get_safe_message_hash_for_message( + safe_address, message_hash + ) attrs["message_hash"] = safe_message_hash if SafeMessage.objects.filter(message_hash=safe_message_hash).exists(): @@ -51,26 +98,15 @@ def validate(self, attrs): f"Message with hash {safe_message_hash.hex()} for safe {safe_address} already exists in DB" ) - safe_signatures = SafeSignature.parse_signature(signature, safe_message_hash) - if len(safe_signatures) != 1: - raise ValidationError( - f"1 owner signature was expected, {len(safe_signatures)} received" - ) - - ethereum_client = EthereumClientProvider() - for safe_signature in safe_signatures: - if not safe_signature.is_valid(ethereum_client, safe_address): - raise ValidationError( - f"Signature={safe_signature.signature.hex()} for owner={safe_signature.owner} is not valid" - ) - - owners = get_safe_owners(safe_address) - proposed_by = safe_signatures[0].owner - if proposed_by not in owners: - raise ValidationError(f"{proposed_by} is not an owner of the Safe") + safe_signatures = SafeSignature.parse_signature( + signature, safe_message_hash, message_hash + ) + owner, signature_type = self.get_valid_owner_from_signatures( + safe_signatures, safe_address, None + ) - attrs["proposed_by"] = proposed_by - attrs["signature_type"] = safe_signatures[0].signature_type.value + attrs["proposed_by"] = owner + attrs["signature_type"] = signature_type.value return attrs def create(self, validated_data): @@ -87,8 +123,12 @@ def create(self, validated_data): return safe_message -class SafeMessageSignatureSerializer(serializers.Serializer): - signature = eth_serializers.HexadecimalField(max_length=65) +class SafeMessageSignatureSerializer( + SafeMessageSignatureParserMixin, serializers.Serializer +): + signature = eth_serializers.HexadecimalField( + min_length=65, max_length=SIGNATURE_LENGTH + ) def validate(self, attrs): attrs = super().validate(attrs) @@ -97,33 +137,18 @@ def validate(self, attrs): attrs["safe_message"] = safe_message signature: HexStr = attrs["signature"] safe_address = safe_message.safe + message_hash = get_hash_for_message(safe_message.message) safe_message_hash = safe_message.message_hash - safe_signatures = SafeSignature.parse_signature(signature, safe_message_hash) - if len(safe_signatures) != 1: - raise ValidationError( - f"1 owner signature was expected, {len(safe_signatures)} received" - ) - - ethereum_client = EthereumClientProvider() - for safe_signature in safe_signatures: - if not safe_signature.is_valid(ethereum_client, safe_address): - raise ValidationError( - f"Signature={safe_signature.signature.hex()} for owner={safe_signature.owner} is not valid" - ) - - owner = safe_signatures[0].owner - if SafeMessageConfirmation.objects.filter( - safe_message=safe_message, owner=owner - ).exists(): - raise ValidationError(f"Signature for owner {owner} already exists") - - owners = get_safe_owners(safe_address) - if owner not in owners: - raise ValidationError(f"{owner} is not an owner of the Safe") + safe_signatures = SafeSignature.parse_signature( + signature, safe_message_hash, message_hash + ) + owner, signature_type = self.get_valid_owner_from_signatures( + safe_signatures, safe_address, safe_message + ) attrs["owner"] = owner - attrs["signature_type"] = safe_signatures[0].signature_type.value + attrs["signature_type"] = signature_type.value return attrs def create(self, validated_data): @@ -137,7 +162,7 @@ def create(self, validated_data): return safe_message_confirmation -# Reponse serializers +# Response serializers class SafeMessageConfirmationResponseSerializer(serializers.Serializer): created = serializers.DateTimeField() modified = serializers.DateTimeField() diff --git a/safe_transaction_service/safe_messages/signals.py b/safe_transaction_service/safe_messages/signals.py index 6248debf..18aa01be 100644 --- a/safe_transaction_service/safe_messages/signals.py +++ b/safe_transaction_service/safe_messages/signals.py @@ -5,7 +5,7 @@ from django.db.models.signals import post_save from django.dispatch import receiver -from safe_transaction_service.events.tasks import send_event_to_queue_task +from safe_transaction_service.events.services.queue_service import get_queue_service from safe_transaction_service.history.services.webhooks import build_webhook_payload from safe_transaction_service.history.tasks import send_webhook_task from safe_transaction_service.safe_messages.models import ( @@ -46,7 +46,8 @@ def process_webhook( send_webhook_task.apply_async( args=(address, payload), priority=2 # Almost lowest priority ) # Almost the lowest priority - send_event_to_queue_task.delay(payload) + queue_service = get_queue_service() + queue_service.send_event(payload) else: logger.debug( "Notification will not be sent for created=%s object=%s", diff --git a/safe_transaction_service/safe_messages/tests/factories.py b/safe_transaction_service/safe_messages/tests/factories.py index 4443305f..08921b1a 100644 --- a/safe_transaction_service/safe_messages/tests/factories.py +++ b/safe_transaction_service/safe_messages/tests/factories.py @@ -2,8 +2,10 @@ from eth_account import Account from factory.django import DjangoModelFactory +from gnosis.safe.safe_signature import SafeSignatureType + from ..models import SafeMessage, SafeMessageConfirmation -from ..utils import get_safe_message_hash_for_message +from ..utils import get_hash_for_message, get_safe_message_hash_for_message class SafeMessageFactory(DjangoModelFactory): @@ -17,7 +19,9 @@ class Meta: @factory.lazy_attribute def message_hash(self): - return get_safe_message_hash_for_message(self.safe, self.message).hex() + return get_safe_message_hash_for_message( + self.safe, get_hash_for_message(self.message) + ).hex() class SafeMessageConfirmationFactory(DjangoModelFactory): @@ -30,8 +34,6 @@ class Params: safe_message = factory.SubFactory(SafeMessageFactory) owner = factory.LazyAttribute(lambda o: o.signing_owner.address) signature = factory.LazyAttribute( - lambda o: o.signing_owner.signHash(o.safe_message.message_hash)[ - "signature" - ].hex() + lambda o: o.signing_owner.signHash(o.safe_message.message_hash)["signature"] ) - signature_type = 2 + signature_type = SafeSignatureType.EOA.value diff --git a/safe_transaction_service/safe_messages/tests/test_migrations.py b/safe_transaction_service/safe_messages/tests/test_migrations.py new file mode 100644 index 00000000..85eb686b --- /dev/null +++ b/safe_transaction_service/safe_messages/tests/test_migrations.py @@ -0,0 +1,91 @@ +from django.test import TestCase + +from django_test_migrations.migrator import Migrator +from eth_account import Account +from hexbytes import HexBytes + +from gnosis.eth.utils import fast_keccak_text +from gnosis.safe.safe_signature import SafeSignatureApprovedHash + + +class TestMigrations(TestCase): + def setUp(self) -> None: + self.migrator = Migrator(database="default") + + def test_migration_forward_0003_alter_safemessageconfirmation_signature(self): + old_state = self.migrator.apply_initial_migration( + ( + "safe_messages", + "0002_alter_safemessageconfirmation_unique_together_and_more", + ), + ) + + SafeMessageConfirmation = old_state.apps.get_model( + "safe_messages", "SafeMessageConfirmation" + ) + + owner = Account.create().address + safe_tx_hash = fast_keccak_text("tx-hash") + safe_signature = SafeSignatureApprovedHash.build_for_owner(owner, safe_tx_hash) + + SafeMessageConfirmation.objects.create( + owner=Account.create().address, + signature=safe_signature.export_signature(), + signature_type=safe_signature.signature_type.value, + ) + + self.assertEqual( + HexBytes(SafeMessageConfirmation.objects.get().signature), + safe_signature.export_signature(), + ) + + new_state = self.migrator.apply_tested_migration( + ("safe_messages", "0003_alter_safemessageconfirmation_signature"), + ) + + SafeMessageConfirmation = new_state.apps.get_model( + "safe_messages", "SafeMessageConfirmation" + ) + self.assertEqual( + HexBytes(SafeMessageConfirmation.objects.get().signature), + safe_signature.export_signature(), + ) + + def test_migration_backward_0003_alter_safemessageconfirmation_signature(self): + new_state = self.migrator.apply_initial_migration( + ("safe_messages", "0003_alter_safemessageconfirmation_signature"), + ) + + SafeMessageConfirmation = new_state.apps.get_model( + "safe_messages", "SafeMessageConfirmation" + ) + + owner = Account.create().address + safe_tx_hash = fast_keccak_text("tx-hash") + safe_signature = SafeSignatureApprovedHash.build_for_owner(owner, safe_tx_hash) + + SafeMessageConfirmation.objects.create( + owner=Account.create().address, + signature=safe_signature.export_signature(), + signature_type=safe_signature.signature_type.value, + ) + + self.assertEqual( + HexBytes(SafeMessageConfirmation.objects.get().signature), + safe_signature.export_signature(), + ) + + old_state = self.migrator.apply_tested_migration( + ( + "safe_messages", + "0002_alter_safemessageconfirmation_unique_together_and_more", + ), + ) + + SafeMessageConfirmation = old_state.apps.get_model( + "safe_messages", "SafeMessageConfirmation" + ) + self.assertEqual( + HexBytes(SafeMessageConfirmation.objects.get().signature), + safe_signature.export_signature(), + ) diff --git a/safe_transaction_service/safe_messages/tests/test_models.py b/safe_transaction_service/safe_messages/tests/test_models.py index 02b30219..b3b4335b 100644 --- a/safe_transaction_service/safe_messages/tests/test_models.py +++ b/safe_transaction_service/safe_messages/tests/test_models.py @@ -8,7 +8,7 @@ from gnosis.safe.tests.safe_test_case import SafeTestCaseMixin -from ..utils import get_safe_message_hash_for_message +from ..utils import get_hash_for_message, get_safe_message_hash_for_message from .factories import SafeMessageConfirmationFactory, SafeMessageFactory from .mocks import get_eip712_payload_mock @@ -35,7 +35,7 @@ def test_str(self): ]: with self.subTest(input=input): with mock.patch( - "gnosis.safe.Safe.domain_separator", + "gnosis.safe.safe.Safe.domain_separator", return_value=mock_domain_separator, new_callable=PropertyMock, ): @@ -60,7 +60,9 @@ def test_factory(self): message_hash = safe_message.message_hash self.assertEqual( message_hash, - get_safe_message_hash_for_message(safe_message.safe, message).hex(), + get_safe_message_hash_for_message( + safe_message.safe, get_hash_for_message(message) + ).hex(), ) recovered_owner = Account._recover_hash( safe_message.message_hash, diff --git a/safe_transaction_service/safe_messages/tests/test_signals.py b/safe_transaction_service/safe_messages/tests/test_signals.py index f4351f94..f823b2c7 100644 --- a/safe_transaction_service/safe_messages/tests/test_signals.py +++ b/safe_transaction_service/safe_messages/tests/test_signals.py @@ -1,4 +1,5 @@ from unittest import mock +from unittest.mock import MagicMock from django.db.models.signals import post_save from django.test import TestCase @@ -8,7 +9,7 @@ from gnosis.eth import EthereumNetwork from gnosis.safe.tests.safe_test_case import SafeTestCaseMixin -from safe_transaction_service.events.tasks import send_event_to_queue_task +from safe_transaction_service.events.services.queue_service import QueueService from safe_transaction_service.history.models import WebHookType from safe_transaction_service.history.tasks import send_webhook_task from safe_transaction_service.safe_messages.models import ( @@ -25,11 +26,11 @@ class TestSafeMessageSignals(SafeTestCaseMixin, TestCase): @factory.django.mute_signals(post_save) @mock.patch.object(send_webhook_task, "apply_async") - @mock.patch.object(send_event_to_queue_task, "delay") + @mock.patch.object(QueueService, "send_event") def test_process_webhook( self, - send_event_to_queue_task_mock, - webhook_task_mock, + send_event_to_queue_task_mock: MagicMock, + webhook_task_mock: MagicMock, ): safe_address = self.deploy_test_safe().address safe_message = SafeMessageFactory(safe=safe_address) @@ -58,14 +59,13 @@ def test_process_webhook( webhook_task_mock.assert_called_with( args=(safe_address, message_confirmation_payload), priority=2 ) - send_event_to_queue_task_mock.assert_called_with(message_confirmation_payload) @mock.patch.object(send_webhook_task, "apply_async") - @mock.patch.object(send_event_to_queue_task, "delay") + @mock.patch.object(QueueService, "send_event") def test_signals_are_correctly_fired( self, - send_event_to_queue_task_mock, - webhook_task_mock, + send_event_mock: MagicMock, + webhook_task_mock: MagicMock, ): safe_address = self.deploy_test_safe().address # Create a confirmation should fire a signal and webhooks should be sended @@ -79,8 +79,7 @@ def test_signals_are_correctly_fired( webhook_task_mock.assert_called_with( args=(safe_address, message_created_payload), priority=2 ) - send_event_to_queue_task_mock.assert_called_with(message_created_payload) - + send_event_mock.assert_called_with(message_created_payload) message_confirmation_payload = { "address": safe_address, "type": WebHookType.MESSAGE_CONFIRMATION.name, @@ -92,4 +91,4 @@ def test_signals_are_correctly_fired( webhook_task_mock.assert_called_with( args=(safe_address, message_confirmation_payload), priority=2 ) - send_event_to_queue_task_mock.assert_called_with(message_confirmation_payload) + send_event_mock.assert_called_with(message_confirmation_payload) diff --git a/safe_transaction_service/safe_messages/tests/test_views.py b/safe_transaction_service/safe_messages/tests/test_views.py index 73feca1b..466b2eb5 100644 --- a/safe_transaction_service/safe_messages/tests/test_views.py +++ b/safe_transaction_service/safe_messages/tests/test_views.py @@ -1,18 +1,20 @@ -import datetime import logging from unittest import mock from unittest.mock import MagicMock from django.urls import reverse +import eth_abi from eth_account import Account from eth_account.messages import defunct_hash_message +from hexbytes import HexBytes from rest_framework import status from rest_framework.exceptions import ErrorDetail from rest_framework.test import APITestCase from gnosis.eth.eip712 import eip712_encode_hash from gnosis.safe.safe_signature import SafeSignatureEOA +from gnosis.safe.signatures import signature_to_bytes from gnosis.safe.tests.safe_test_case import SafeTestCaseMixin from safe_transaction_service.safe_messages.models import ( @@ -23,19 +25,13 @@ SafeMessageConfirmationFactory, SafeMessageFactory, ) +from safe_transaction_service.utils.utils import datetime_to_str from .mocks import get_eip712_payload_mock logger = logging.getLogger(__name__) -def datetime_to_str(value: datetime.datetime) -> str: - value = value.isoformat() - if value.endswith("+00:00"): - value = value[:-6] + "Z" - return value - - class TestMessageViews(SafeTestCaseMixin, APITestCase): def test_safe_message_view(self): random_safe_message_hash = ( @@ -45,7 +41,9 @@ def test_safe_message_view(self): reverse("v1:safe_messages:message", args=(random_safe_message_hash,)) ) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(response.json(), {"detail": "Not found."}) + self.assertEqual( + response.json(), {"detail": "No SafeMessage matches the given query."} + ) safe_message = SafeMessageFactory(safe=self.deploy_test_safe().address) response = self.client.get( reverse("v1:safe_messages:message", args=(safe_message.message_hash,)) @@ -84,13 +82,13 @@ def test_safe_message_view(self): "message": safe_message.message, "proposedBy": safe_message.proposed_by, "safeAppId": safe_message.safe_app_id, - "preparedSignature": safe_message_confirmation.signature, + "preparedSignature": safe_message_confirmation.signature.hex(), "confirmations": [ { "created": datetime_to_str(safe_message_confirmation.created), "modified": datetime_to_str(safe_message_confirmation.modified), "owner": safe_message_confirmation.owner, - "signature": safe_message_confirmation.signature, + "signature": safe_message_confirmation.signature.hex(), "signatureType": "EOA", } ], @@ -120,13 +118,13 @@ def test_safe_message_not_camel_case_view(self): "message": safe_message.message, "proposedBy": safe_message.proposed_by, "safeAppId": safe_message.safe_app_id, - "preparedSignature": safe_message_confirmation.signature, + "preparedSignature": safe_message_confirmation.signature.hex(), "confirmations": [ { "created": datetime_to_str(safe_message_confirmation.created), "modified": datetime_to_str(safe_message_confirmation.modified), "owner": safe_message_confirmation.owner, - "signature": safe_message_confirmation.signature, + "signature": safe_message_confirmation.signature.hex(), "signatureType": "EOA", } ], @@ -172,6 +170,7 @@ def test_safe_messages_create_view(self, get_owners_mock: MagicMock): "message": message, "description": description, "signature": signature, + "safeAppId": -1, } response = self.client.post( reverse("v1:safe_messages:safe-messages", args=(safe_address,)), @@ -179,6 +178,25 @@ def test_safe_messages_create_view(self, get_owners_mock: MagicMock): data=data, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.data, + { + "safe_app_id": [ + ErrorDetail( + string="Ensure this value is greater than or equal to 0.", + code="min_value", + ) + ] + }, + ) + + data.pop("safeAppId") + response = self.client.post( + reverse("v1:safe_messages:safe-messages", args=(safe_address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.data, { @@ -241,6 +259,40 @@ def test_safe_messages_create_view(self, get_owners_mock: MagicMock): }, ) + def test_safe_messages_create_using_1271_signature_view(self): + account = Account.create() + safe_owner = self.deploy_test_safe(owners=[account.address]) + safe = self.deploy_test_safe(owners=[safe_owner.address]) + + safe_address = safe.address + message = get_eip712_payload_mock() + description = "Testing EIP712 message signing" + message_hash = eip712_encode_hash(message) + safe_owner_message_hash = safe_owner.get_message_hash(message_hash) + safe_owner_signature = account.signHash(safe_owner_message_hash)["signature"] + + # Build EIP1271 signature v=0 r=safe v=dynamic_part dynamic_part=size+owner_signature + signature_1271 = ( + signature_to_bytes( + 0, int.from_bytes(HexBytes(safe_owner.address), byteorder="big"), 65 + ) + + eth_abi.encode(["bytes"], [safe_owner_signature])[32:] + ) + + data = { + "message": message, + "description": description, + "signature": HexBytes(signature_1271).hex(), + } + response = self.client.post( + reverse("v1:safe_messages:safe-messages", args=(safe_address,)), + format="json", + data=data, + ) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(SafeMessage.objects.count(), 1) + self.assertEqual(SafeMessageConfirmation.objects.count(), 1) + @mock.patch( "safe_transaction_service.safe_messages.serializers.get_safe_owners", return_value=[], @@ -269,20 +321,20 @@ def test_safe_message_add_signature_view(self, get_owners_mock: MagicMock): data=data, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( + self.assertDictEqual( response.data, { - "non_field_errors": [ + "signature": [ ErrorDetail( - string="1 owner signature was expected, 0 received", - code="invalid", + string="Ensure this field has at least 65 hexadecimal chars (not counting 0x).", + code="min_length", ) ] }, ) # Test same signature - data["signature"] = safe_message_confirmation.signature + data["signature"] = safe_message_confirmation.signature.hex() response = self.client.post( reverse("v1:safe_messages:signatures", args=(safe_message.message_hash,)), format="json", @@ -413,7 +465,7 @@ def test_safe_messages_list_view(self): "message": safe_message.message, "proposedBy": safe_message.proposed_by, "safeAppId": safe_message.safe_app_id, - "preparedSignature": safe_message_confirmation.signature, + "preparedSignature": safe_message_confirmation.signature.hex(), "confirmations": [ { "created": datetime_to_str( @@ -423,7 +475,7 @@ def test_safe_messages_list_view(self): safe_message_confirmation.modified ), "owner": safe_message_confirmation.owner, - "signature": safe_message_confirmation.signature, + "signature": safe_message_confirmation.signature.hex(), "signatureType": "EOA", } ], @@ -460,7 +512,7 @@ def test_safe_messages_list_not_camel_case_view(self): "message": safe_message.message, "proposedBy": safe_message.proposed_by, "safeAppId": safe_message.safe_app_id, - "preparedSignature": safe_message_confirmation.signature, + "preparedSignature": safe_message_confirmation.signature.hex(), "confirmations": [ { "created": datetime_to_str( @@ -470,7 +522,7 @@ def test_safe_messages_list_not_camel_case_view(self): safe_message_confirmation.modified ), "owner": safe_message_confirmation.owner, - "signature": safe_message_confirmation.signature, + "signature": safe_message_confirmation.signature.hex(), "signatureType": "EOA", } ], @@ -487,7 +539,9 @@ def test_safe_message_view_v1_1_1(self): reverse("v1:safe_messages:message", args=(random_safe_message_hash,)) ) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(response.json(), {"detail": "Not found."}) + self.assertEqual( + response.json(), {"detail": "No SafeMessage matches the given query."} + ) safe_message = SafeMessageFactory(safe=self.deploy_test_safe_v1_1_1().address) response = self.client.get( reverse("v1:safe_messages:message", args=(safe_message.message_hash,)) @@ -526,13 +580,13 @@ def test_safe_message_view_v1_1_1(self): "message": safe_message.message, "proposedBy": safe_message.proposed_by, "safeAppId": safe_message.safe_app_id, - "preparedSignature": safe_message_confirmation.signature, + "preparedSignature": safe_message_confirmation.signature.hex(), "confirmations": [ { "created": datetime_to_str(safe_message_confirmation.created), "modified": datetime_to_str(safe_message_confirmation.modified), "owner": safe_message_confirmation.owner, - "signature": safe_message_confirmation.signature, + "signature": safe_message_confirmation.signature.hex(), "signatureType": "EOA", } ], diff --git a/safe_transaction_service/safe_messages/utils.py b/safe_transaction_service/safe_messages/utils.py index dce72d8f..894cf3cc 100644 --- a/safe_transaction_service/safe_messages/utils.py +++ b/safe_transaction_service/safe_messages/utils.py @@ -8,13 +8,16 @@ from gnosis.safe import Safe -def get_safe_message_hash_for_message( - safe_address: ChecksumAddress, message: str | Dict[str, Any] -) -> Hash32: - message_hash: Hash32 = ( +def get_hash_for_message(message: str | Dict[str, Any]) -> Hash32: + return ( defunct_hash_message(text=message) if isinstance(message, str) else eip712_encode_hash(message) ) + + +def get_safe_message_hash_for_message( + safe_address: ChecksumAddress, message_hash: Hash32 +) -> Hash32: safe = Safe(safe_address, EthereumClientProvider()) return safe.get_message_hash(message_hash) diff --git a/safe_transaction_service/static/safe/safe_contract_logo.png b/safe_transaction_service/static/safe/safe_contract_logo.png new file mode 100644 index 00000000..14eb5bb0 Binary files /dev/null and b/safe_transaction_service/static/safe/safe_contract_logo.png differ diff --git a/safe_transaction_service/tokens/admin.py b/safe_transaction_service/tokens/admin.py index 9edbf071..37c6858d 100644 --- a/safe_transaction_service/tokens/admin.py +++ b/safe_transaction_service/tokens/admin.py @@ -1,14 +1,15 @@ from django.contrib import admin -from gnosis.eth.django.admin import BinarySearchAdmin - -from safe_transaction_service.utils.admin import HasLogoFilterAdmin +from safe_transaction_service.utils.admin import ( + AdvancedAdminSearchMixin, + HasLogoFilterAdmin, +) from .models import Token, TokenList @admin.register(Token) -class TokenAdmin(BinarySearchAdmin): +class TokenAdmin(AdvancedAdminSearchMixin, admin.ModelAdmin): list_display = ( "address", "trusted", @@ -21,7 +22,7 @@ class TokenAdmin(BinarySearchAdmin): ) list_filter = ("trusted", "spam", "events_bugged", "decimals", HasLogoFilterAdmin) ordering = ("address",) - search_fields = ["=address", "symbol", "name", "=copy_price"] + search_fields = ["==address", "symbol", "name", "==copy_price"] @admin.register(TokenList) @@ -32,4 +33,4 @@ class TokenListAdmin(admin.ModelAdmin): "description", ) ordering = ("pk",) - search_fields = ["description"] + search_fields = ["url", "description"] diff --git a/safe_transaction_service/tokens/clients/__init__.py b/safe_transaction_service/tokens/clients/__init__.py index 7fddb40b..494386c2 100644 --- a/safe_transaction_service/tokens/clients/__init__.py +++ b/safe_transaction_service/tokens/clients/__init__.py @@ -1,8 +1,4 @@ # flake8: noqa F401 -from .binance_client import BinanceClient from .coingecko_client import CoingeckoClient from .coinmarketcap_client import CoinMarketCapClient, CoinMarketCapToken -from .exceptions import CannotGetPrice from .kleros_client import KlerosClient, KlerosToken -from .kraken_client import KrakenClient -from .kucoin_client import KucoinClient diff --git a/safe_transaction_service/tokens/clients/binance_client.py b/safe_transaction_service/tokens/clients/binance_client.py deleted file mode 100644 index 7581036e..00000000 --- a/safe_transaction_service/tokens/clients/binance_client.py +++ /dev/null @@ -1,47 +0,0 @@ -import logging - -from .base_client import BaseHTTPClient -from .exceptions import CannotGetPrice - -logger = logging.getLogger(__name__) - - -class BinanceClient(BaseHTTPClient): # pragma: no cover - def _get_price(self, symbol: str) -> float: - url = f"https://api.binance.com/api/v3/avgPrice?symbol={symbol}" - try: - response = self.http_session.get(url, timeout=self.request_timeout) - api_json = response.json() - if not response.ok: - logger.warning("Cannot get price from url=%s", url) - raise CannotGetPrice(api_json.get("msg")) - - price = float(api_json["price"]) - if not price: - raise CannotGetPrice(f"Price from url={url} is {price}") - return price - except (ValueError, IOError) as e: - raise CannotGetPrice from e - - def get_ada_usd_price(self) -> float: - return self._get_price("ADAUSDT") - - def get_aurora_usd_price(self): - return self._get_price("NEARUSDT") - - def get_bnb_usd_price(self) -> float: - return self._get_price("BNBUSDT") - - def get_ether_usd_price(self) -> float: - """ - :return: current USD price for Ethereum - :raises: CannotGetPrice - """ - return self._get_price("ETHUSDT") - - def get_matic_usd_price(self) -> float: - """ - :return: current USD price for MATIC - :raises: CannotGetPrice - """ - return self._get_price("MATICUSDT") diff --git a/safe_transaction_service/tokens/clients/coingecko_client.py b/safe_transaction_service/tokens/clients/coingecko_client.py index 9b5c6ce7..7c26ec42 100644 --- a/safe_transaction_service/tokens/clients/coingecko_client.py +++ b/safe_transaction_service/tokens/clients/coingecko_client.py @@ -9,7 +9,6 @@ from safe_transaction_service.tokens.clients.base_client import BaseHTTPClient from safe_transaction_service.tokens.clients.exceptions import ( - CannotGetPrice, Coingecko404, CoingeckoRateLimitError, CoingeckoRequestError, @@ -23,7 +22,7 @@ class CoingeckoClient(BaseHTTPClient): EthereumNetwork.ARBITRUM_ONE: "arbitrum-one", EthereumNetwork.AURORA_MAINNET: "aurora", EthereumNetwork.AVALANCHE_C_CHAIN: "avalanche", - EthereumNetwork.BINANCE_SMART_CHAIN_MAINNET: "binance-smart-chain", + EthereumNetwork.BNB_SMART_CHAIN_MAINNET: "binance-smart-chain", EthereumNetwork.FUSE_MAINNET: "fuse", EthereumNetwork.GNOSIS: "xdai", EthereumNetwork.KCC_MAINNET: "kucoin-community-chain", @@ -61,44 +60,6 @@ def _do_request(self, url: str) -> Dict[str, Any]: logger.warning("Problem fetching %s", url) raise CoingeckoRequestError from e - def _get_price(self, url: str, name: str): - try: - result = self._do_request(url) - - # Result is returned with lowercased `name` (if querying by contract address, then `token_address`) - price = result.get(name) - if price and price.get("usd"): - return price["usd"] - else: - raise CannotGetPrice(f"Price from url={url} is {price}") - except CoingeckoRequestError as e: - raise CannotGetPrice( - f"Cannot get price from Coingecko for token={name}" - ) from e - - def get_price(self, name: str) -> float: - """ - :param name: coin name - :return: usd price for token name, 0. if not found - """ - name = name.lower() - url = urljoin( - self.base_url, f"/api/v3/simple/price?ids={name}&vs_currencies=usd" - ) - return self._get_price(url, name) - - def get_token_price(self, token_address: ChecksumAddress) -> float: - """ - :param token_address: - :return: usd price for token address, 0. if not found - """ - token_address = token_address.lower() - url = urljoin( - self.base_url, - f"api/v3/simple/token_price/{self.asset_platform}?contract_addresses={token_address}&vs_currencies=usd", - ) - return self._get_price(url, token_address) - @lru_cache(maxsize=128) def get_token_info( self, token_address: ChecksumAddress @@ -153,4 +114,3 @@ def get_btc_usd_price(self) -> float: def get_mtr_usd_price(self) -> float: return self.get_price("meter-stable") - diff --git a/safe_transaction_service/tokens/clients/exceptions.py b/safe_transaction_service/tokens/clients/exceptions.py index 15046cf3..ca0d1e29 100644 --- a/safe_transaction_service/tokens/clients/exceptions.py +++ b/safe_transaction_service/tokens/clients/exceptions.py @@ -15,7 +15,3 @@ class CoingeckoRateLimitError(CoingeckoRequestError): } } """ - - -class CannotGetPrice(CoingeckoRequestError): - pass diff --git a/safe_transaction_service/tokens/clients/kraken_client.py b/safe_transaction_service/tokens/clients/kraken_client.py deleted file mode 100644 index 8e593c95..00000000 --- a/safe_transaction_service/tokens/clients/kraken_client.py +++ /dev/null @@ -1,72 +0,0 @@ -import logging - -from .base_client import BaseHTTPClient -from .exceptions import CannotGetPrice - -logger = logging.getLogger(__name__) - - -class KrakenClient(BaseHTTPClient): - def _get_price(self, symbol: str) -> float: - url = f"https://api.kraken.com/0/public/Ticker?pair={symbol}" - try: - response = self.http_session.get(url, timeout=self.request_timeout) - api_json = response.json() - error = api_json.get("error") - if not response.ok or error: - logger.warning("Cannot get price from url=%s", url) - raise CannotGetPrice(str(api_json["error"])) - - result = api_json["result"] - for new_ticker in result: - price = float(result[new_ticker]["c"][0]) - if not price: - raise CannotGetPrice(f"Price from url={url} is {price}") - return price - except (ValueError, IOError) as e: - raise CannotGetPrice from e - - def get_ada_usd_price(self) -> float: - return self._get_price("ADAUSD") - - def get_avax_usd_price(self) -> float: - """ - :return: current USD price for AVAX - :raises: CannotGetPrice - """ - return self._get_price("AVAXUSD") - - def get_dai_usd_price(self) -> float: - """ - :return: current USD price for DAI - :raises: CannotGetPrice - """ - return self._get_price("DAIUSD") - - def get_ether_usd_price(self) -> float: - """ - :return: current USD price for Ethereum - :raises: CannotGetPrice - """ - return self._get_price("ETHUSD") - - def get_matic_usd_price(self): - """ - :return: current USD price for MATIC - :raises: CannotGetPrice - """ - return self._get_price("MATICUSD") - - def get_ewt_usd_price(self) -> float: - """ - :return: current USD price for Energy Web Token - :raises: CannotGetPrice - """ - return self._get_price("EWTUSD") - - def get_algo_usd_price(self): - """ - :return: current USD price for Algorand - :raises: CannotGetPrice - """ - return self._get_price("ALGOUSD") diff --git a/safe_transaction_service/tokens/clients/kucoin_client.py b/safe_transaction_service/tokens/clients/kucoin_client.py deleted file mode 100644 index b4aa4a46..00000000 --- a/safe_transaction_service/tokens/clients/kucoin_client.py +++ /dev/null @@ -1,89 +0,0 @@ -import logging - -from .base_client import BaseHTTPClient -from .exceptions import CannotGetPrice - -logger = logging.getLogger(__name__) - - -class KucoinClient(BaseHTTPClient): - def _get_price(self, symbol: str): - url = f"https://api.kucoin.com/api/v1/market/orderbook/level1?symbol={symbol}" - - try: - response = self.http_session.get(url, timeout=self.request_timeout) - result = response.json() - return float(result["data"]["price"]) - except (ValueError, IOError) as e: - logger.warning("Cannot get price from url=%s", url) - raise CannotGetPrice from e - - def get_ether_usd_price(self) -> float: - """ - :return: current USD price for ETH Coin - :raises: CannotGetPrice - """ - return self._get_price("ETH-USDT") - - def get_aurora_usd_price(self) -> float: - """ - :return: current USD price for Aurora Coin - :raises: CannotGetPrice - """ - return self._get_price("AURORA-USDT") - - def get_bnb_usd_price(self) -> float: - """ - :return: current USD price for Binance Coin - :raises: CannotGetPrice - """ - return self._get_price("BNB-USDT") - - def get_celo_usd_price(self) -> float: - """ - :return: current USD price for Celo - :raises: CannotGetPrice - """ - return self._get_price("CELO-USDT") - - def get_cro_usd_price(self) -> float: - """ - :return: current USD price for Cronos - :raises: CannotGetPrice - """ - return self._get_price("CRO-USDT") - - def get_ewt_usd_price(self) -> float: - """ - :return: current USD price for Energy Web Token - :raises: CannotGetPrice - """ - return self._get_price("EWT-USDT") - - def get_kcs_usd_price(self) -> float: - """ - :return: current USD price for KuCoin Token - :raises: CannotGetPrice - """ - return self._get_price("KCS-USDT") - - def get_matic_usd_price(self) -> float: - """ - :return: current USD price for MATIC Token - :raises: CannotGetPrice - """ - return self._get_price("MATIC-USDT") - - def get_xdc_usd_price(self) -> float: - """ - :return: current USD price for XDC Token - :raises: CannotGetPrice - """ - return self._get_price("XDC-USDT") - - def get_ftm_usd_price(self) -> float: - """ - :return: current USD price for FTM Token - :raises: CannotGetPrice - """ - return self._get_price("FTM-USDT") diff --git a/safe_transaction_service/tokens/constants.py b/safe_transaction_service/tokens/constants.py index 855eb602..17dc4b40 100644 --- a/safe_transaction_service/tokens/constants.py +++ b/safe_transaction_service/tokens/constants.py @@ -1,6 +1,5 @@ CRYPTO_KITTIES_CONTRACT_ADDRESSES = { "0x06012c8cf97BEaD5deAe237070F9587f8E7A266d", # Mainnet - "0x16baF0dE678E52367adC69fD067E5eDd1D33e3bF", # Rinkeby } ENS_CONTRACTS_WITH_TLD = { diff --git a/safe_transaction_service/tokens/migrations/0001_initial.py b/safe_transaction_service/tokens/migrations/0001_initial.py index 50d64121..2e799222 100644 --- a/safe_transaction_service/tokens/migrations/0001_initial.py +++ b/safe_transaction_service/tokens/migrations/0001_initial.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [] diff --git a/safe_transaction_service/tokens/migrations/0002_auto_20200903_1045.py b/safe_transaction_service/tokens/migrations/0002_auto_20200903_1045.py index d6e24cb2..ff913d6b 100644 --- a/safe_transaction_service/tokens/migrations/0002_auto_20200903_1045.py +++ b/safe_transaction_service/tokens/migrations/0002_auto_20200903_1045.py @@ -24,7 +24,6 @@ def fix_token_decimals(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("tokens", "0001_initial"), ] diff --git a/safe_transaction_service/tokens/migrations/0003_auto_20201222_1053.py b/safe_transaction_service/tokens/migrations/0003_auto_20201222_1053.py index 7dca9f56..4ae51d4a 100644 --- a/safe_transaction_service/tokens/migrations/0003_auto_20201222_1053.py +++ b/safe_transaction_service/tokens/migrations/0003_auto_20201222_1053.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("tokens", "0002_auto_20200903_1045"), ] diff --git a/safe_transaction_service/tokens/migrations/0004_ethereum_address_field_v2_20211201_1512.py b/safe_transaction_service/tokens/migrations/0004_ethereum_address_field_v2_20211201_1512.py index 4a092720..52285392 100644 --- a/safe_transaction_service/tokens/migrations/0004_ethereum_address_field_v2_20211201_1512.py +++ b/safe_transaction_service/tokens/migrations/0004_ethereum_address_field_v2_20211201_1512.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("tokens", "0003_auto_20201222_1053"), ] diff --git a/safe_transaction_service/tokens/migrations/0005_add_logo_file_20220201_1335.py b/safe_transaction_service/tokens/migrations/0005_add_logo_file_20220201_1335.py index fa35bf6e..5fe84600 100644 --- a/safe_transaction_service/tokens/migrations/0005_add_logo_file_20220201_1335.py +++ b/safe_transaction_service/tokens/migrations/0005_add_logo_file_20220201_1335.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("tokens", "0004_ethereum_address_field_v2_20211201_1512"), ] diff --git a/safe_transaction_service/tokens/migrations/0006_auto_20220214_1629.py b/safe_transaction_service/tokens/migrations/0006_auto_20220214_1629.py index b00b1842..eb389895 100644 --- a/safe_transaction_service/tokens/migrations/0006_auto_20220214_1629.py +++ b/safe_transaction_service/tokens/migrations/0006_auto_20220214_1629.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("tokens", "0005_add_logo_file_20220201_1335"), ] diff --git a/safe_transaction_service/tokens/migrations/0007_alter_token_logo.py b/safe_transaction_service/tokens/migrations/0007_alter_token_logo.py index 4a42fbd7..6e2dbdd5 100644 --- a/safe_transaction_service/tokens/migrations/0007_alter_token_logo.py +++ b/safe_transaction_service/tokens/migrations/0007_alter_token_logo.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("tokens", "0006_auto_20220214_1629"), ] diff --git a/safe_transaction_service/tokens/migrations/0008_alter_token_copy_price.py b/safe_transaction_service/tokens/migrations/0008_alter_token_copy_price.py index 5c3a7b62..acf0ef18 100644 --- a/safe_transaction_service/tokens/migrations/0008_alter_token_copy_price.py +++ b/safe_transaction_service/tokens/migrations/0008_alter_token_copy_price.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("tokens", "0007_alter_token_logo"), ] diff --git a/safe_transaction_service/tokens/migrations/0009_token_token_spam_idx.py b/safe_transaction_service/tokens/migrations/0009_token_token_spam_idx.py index a2f9deff..c9786861 100644 --- a/safe_transaction_service/tokens/migrations/0009_token_token_spam_idx.py +++ b/safe_transaction_service/tokens/migrations/0009_token_token_spam_idx.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("tokens", "0008_alter_token_copy_price"), ] diff --git a/safe_transaction_service/tokens/migrations/0010_tokenlist.py b/safe_transaction_service/tokens/migrations/0010_tokenlist.py index be7dd604..75a0fdba 100644 --- a/safe_transaction_service/tokens/migrations/0010_tokenlist.py +++ b/safe_transaction_service/tokens/migrations/0010_tokenlist.py @@ -19,7 +19,7 @@ "Official", ), EthereumNetwork.GNOSIS: ("https://tokens.honeyswap.org/", "HoneySwap"), - EthereumNetwork.BINANCE_SMART_CHAIN_MAINNET: ( + EthereumNetwork.BNB_SMART_CHAIN_MAINNET: ( "https://tokens.pancakeswap.finance/pancakeswap-extended.json", "PancakeSwap", ), @@ -38,7 +38,6 @@ def add_default_token_lists(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("tokens", "0009_token_token_spam_idx"), ] diff --git a/safe_transaction_service/tokens/models.py b/safe_transaction_service/tokens/models.py index 83214e51..3c088553 100644 --- a/safe_transaction_service/tokens/models.py +++ b/safe_transaction_service/tokens/models.py @@ -1,7 +1,7 @@ import logging import os from json import JSONDecodeError -from typing import Any, Dict, List, Optional +from typing import Optional, TypedDict from urllib.parse import urljoin from django.conf import settings @@ -128,7 +128,7 @@ def create_from_blockchain( ) return None - name_and_symbol: List[str] = [] + name_and_symbol: list[str] = [] for text in (erc_info.name, erc_info.symbol): if isinstance(text, str): text = text.encode() @@ -302,6 +302,16 @@ def get_price_address(self) -> ChecksumAddress: return self.copy_price or self.address +class TokenListToken(TypedDict): + symbol: str + name: str + address: str # Can be an ENS address + decimals: int + chainId: int + logoURI: str + tags: list[str] | None + + class TokenList(models.Model): url = models.URLField(unique=True) description = models.CharField(max_length=200) @@ -309,7 +319,7 @@ class TokenList(models.Model): def __str__(self): return f"{self.description} token list" - def get_tokens(self) -> List[Dict[str, Any]]: + def get_tokens(self) -> list[TokenListToken]: try: response = requests.get(self.url, timeout=5) if response.ok: diff --git a/safe_transaction_service/tokens/serializers.py b/safe_transaction_service/tokens/serializers.py index 371b666c..035491d1 100644 --- a/safe_transaction_service/tokens/serializers.py +++ b/safe_transaction_service/tokens/serializers.py @@ -20,6 +20,7 @@ class TokenInfoResponseSerializer(serializers.Serializer): symbol = serializers.CharField() decimals = serializers.IntegerField() logo_uri = serializers.SerializerMethodField() + trusted = serializers.BooleanField() def get_type(self, obj: Token) -> str: if obj.is_erc20(): diff --git a/safe_transaction_service/tokens/services/__init__.py b/safe_transaction_service/tokens/services/__init__.py deleted file mode 100644 index c50f7533..00000000 --- a/safe_transaction_service/tokens/services/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# flake8: noqa F401 -from .price_service import PriceService, PriceServiceProvider diff --git a/safe_transaction_service/tokens/signals.py b/safe_transaction_service/tokens/signals.py index d7b01b66..3a21b786 100644 --- a/safe_transaction_service/tokens/signals.py +++ b/safe_transaction_service/tokens/signals.py @@ -11,7 +11,6 @@ ) from .models import Token -from .services import PriceServiceProvider logger = logging.getLogger(__name__) @@ -34,9 +33,3 @@ def clear_cache(sender: Type[Model], instance: Token, created: bool, **kwargs) - collectibles_service = CollectiblesServiceProvider() collectibles_service.cache_token_info.clear() - - price_service = PriceServiceProvider() - price_service.cache_token_eth_value.clear() - price_service.cache_token_info.clear() - price_service.cache_token_usd_value.clear() - price_service.cache_underlying_token.clear() diff --git a/safe_transaction_service/tokens/tasks.py b/safe_transaction_service/tokens/tasks.py index 051cf25c..3f56405c 100644 --- a/safe_transaction_service/tokens/tasks.py +++ b/safe_transaction_service/tokens/tasks.py @@ -1,25 +1,22 @@ -import random from dataclasses import dataclass from datetime import datetime from typing import Optional -from django.conf import settings from django.db import transaction from django.utils import timezone from celery import app from celery.utils.log import get_task_logger -from eth_typing import ChecksumAddress +from web3.exceptions import Web3Exception -from gnosis.eth.ethereum_client import EthereumNetwork +from gnosis.eth.ethereum_client import EthereumClientProvider, EthereumNetwork from gnosis.eth.utils import fast_to_checksum_address from safe_transaction_service.utils.ethereum import get_ethereum_network -from safe_transaction_service.utils.redis import get_redis from safe_transaction_service.utils.utils import close_gevent_db_connection_decorator from .exceptions import TokenListRetrievalException -from .models import Token, TokenList +from .models import Token, TokenList, TokenListToken logger = get_task_logger(__name__) @@ -46,70 +43,6 @@ def __str__(self): return f"{self.eth_value}:{self.timestamp.timestamp()}" -@app.shared_task(soft_time_limit=TASK_SOFT_TIME_LIMIT, time_limit=TASK_TIME_LIMIT) -@close_gevent_db_connection_decorator -def calculate_token_eth_price_task( - token_address: ChecksumAddress, redis_key: str, force_recalculation: bool = False -) -> Optional[EthValueWithTimestamp]: - """ - Do price calculation for token in an async way and store it with its timestamp on redis - - :param token_address: Token address - :param redis_key: Redis key for token price - :param force_recalculation: Force a new calculation even if an old one is on cache - :return: token price (in ether) when calculated - """ - from .services.price_service import PriceServiceProvider - - redis = get_redis() - now = timezone.now() - current_timestamp = int(now.timestamp()) - key_was_set = redis.set( - redis_key, f"0:{current_timestamp}", ex=60 * 15, nx=True - ) # Expire in 15 minutes - # Only calculate the price if key was not set previously or if `force_recalculation` is `True` - if key_was_set or force_recalculation: - price_service = PriceServiceProvider() - eth_price = price_service.get_token_eth_price_from_oracles(token_address) - if not eth_price: - eth_price = price_service.get_token_eth_price_from_composed_oracles( - token_address - ) - - logger.debug("Calculated eth-price=%f for token=%s", eth_price, token_address) - if not eth_price: - logger.warning( - "Cannot calculate eth price for token=%s - Trying to use previous price", - token_address, - ) - last_redis_value = redis.get(redis_key) - if last_redis_value: - logger.warning("Using previous eth price for token=%s", token_address) - eth_price = EthValueWithTimestamp.from_string( - last_redis_value.decode() - ).eth_value - else: - logger.warning("Cannot calculate eth price for token=%s", token_address) - return EthValueWithTimestamp(eth_price, now) - - eth_value_with_timestamp = EthValueWithTimestamp(eth_price, now) - redis.setex( - redis_key, settings.TOKEN_ETH_PRICE_TTL, str(eth_value_with_timestamp) - ) - if not getattr(settings, "CELERY_ALWAYS_EAGER", False): - # Recalculate price before cache expires and prevents recursion checking Celery Eager property - # Use randint to prevent triggering all the tasks at the same time - calculate_token_eth_price_task.apply_async( - (token_address, redis_key), - {"force_recalculation": True}, - countdown=settings.TOKEN_ETH_PRICE_TTL - random.randint(60, 300), - ) - - return EthValueWithTimestamp(eth_price, now) - else: - return EthValueWithTimestamp.from_string(redis.get(redis_key).decode()) - - @app.shared_task(soft_time_limit=TASK_SOFT_TIME_LIMIT, time_limit=TASK_TIME_LIMIT) @close_gevent_db_connection_decorator def fix_pool_tokens_task() -> Optional[int]: @@ -134,7 +67,7 @@ def update_token_info_from_token_list_task() -> int: :return: Number of tokens marked as `trusted` """ - tokens = [] + tokens: list[TokenListToken] = [] for token_list in TokenList.objects.all(): try: tokens += token_list.get_tokens() @@ -145,13 +78,24 @@ def update_token_info_from_token_list_task() -> int: return 0 # Make sure current chainId matches the one in the list - ethereum_network = get_ethereum_network() - - token_addresses = [ - fast_to_checksum_address(token["address"]) - for token in tokens - if token["chainId"] == ethereum_network.value - ] + current_chain_id = get_ethereum_network().value + ethereum_client = EthereumClientProvider() + + token_addresses = [] + for token in tokens: + if token.get("chainId") == current_chain_id: + token_address = token["address"] + if token_address.startswith("0x"): + token_addresses.append(fast_to_checksum_address(token_address)) + else: + # Try ENS resolve + try: + if resolved_address := ethereum_client.w3.ens.address( + token_address + ): + token_addresses.append(resolved_address) + except (ValueError, Web3Exception): + logger.warning("Cannot resolve %s ENS address", token_address) with transaction.atomic(): Token.objects.update(trusted=False) diff --git a/safe_transaction_service/tokens/tests/clients/test_clients.py b/safe_transaction_service/tokens/tests/clients/test_clients.py deleted file mode 100644 index d83b3482..00000000 --- a/safe_transaction_service/tokens/tests/clients/test_clients.py +++ /dev/null @@ -1,88 +0,0 @@ -from unittest import mock - -from django.test import TestCase - -from requests import Session - -from gnosis.eth.tests.utils import just_test_if_mainnet_node - -from ...clients import CannotGetPrice, CoingeckoClient, KrakenClient, KucoinClient - - -class TestClients(TestCase): - def test_get_bnb_usd_price(self) -> float: - just_test_if_mainnet_node() - kucoin_client = KucoinClient() - coingecko_client = CoingeckoClient() - - price = kucoin_client.get_bnb_usd_price() - self.assertIsInstance(price, float) - self.assertGreater(price, 0) - - price = coingecko_client.get_bnb_usd_price() - self.assertIsInstance(price, float) - self.assertGreater(price, 0) - - def test_get_dai_usd_price_kraken(self) -> float: - just_test_if_mainnet_node() - kraken_client = KrakenClient() - - # Kraken is used - price = kraken_client.get_dai_usd_price() - self.assertIsInstance(price, float) - self.assertGreater(price, 0) - - def test_get_ether_usd_price_kraken(self): - just_test_if_mainnet_node() - kraken_client = KrakenClient() - - # Kraken is used - eth_usd_price = kraken_client.get_ether_usd_price() - self.assertIsInstance(eth_usd_price, float) - self.assertGreater(eth_usd_price, 0) - - def test_get_ewt_usd_price_kraken(self) -> float: - just_test_if_mainnet_node() - kraken_client = KrakenClient() - - # Kraken is used - price = kraken_client.get_ewt_usd_price() - self.assertIsInstance(price, float) - self.assertGreater(price, 0) - - def test_get_ether_usd_price_kucoin(self): - just_test_if_mainnet_node() - kucoin_client = KucoinClient() - - eth_usd_price = kucoin_client.get_ether_usd_price() - self.assertIsInstance(eth_usd_price, float) - self.assertGreater(eth_usd_price, 0) - - def test_get_matic_usd_price(self) -> float: - just_test_if_mainnet_node() - - for provider in [KucoinClient(), KrakenClient(), CoingeckoClient()]: - with self.subTest(provider=provider): - price = provider.get_matic_usd_price() - self.assertIsInstance(price, float) - self.assertGreater(price, 0) - - def test_get_ewt_usd_price_coingecko(self) -> float: - just_test_if_mainnet_node() - coingecko_client = CoingeckoClient() - - price = coingecko_client.get_ewt_usd_price() - self.assertIsInstance(price, float) - self.assertGreater(price, 0) - - def test_get_ewt_usd_price_kucoin(self) -> float: - just_test_if_mainnet_node() - kucoin_client = KucoinClient() - - price = kucoin_client.get_ewt_usd_price() - self.assertIsInstance(price, float) - self.assertGreater(price, 0) - - with mock.patch.object(Session, "get", side_effect=IOError("Connection Error")): - with self.assertRaises(CannotGetPrice): - kucoin_client.get_ewt_usd_price() diff --git a/safe_transaction_service/tokens/tests/clients/test_coingecko_client.py b/safe_transaction_service/tokens/tests/clients/test_coingecko_client.py index a08ae820..00a799d0 100644 --- a/safe_transaction_service/tokens/tests/clients/test_coingecko_client.py +++ b/safe_transaction_service/tokens/tests/clients/test_coingecko_client.py @@ -4,7 +4,6 @@ from safe_transaction_service.history.tests.utils import skip_on -from ...clients import CannotGetPrice from ...clients.coingecko_client import CoingeckoClient from ...clients.exceptions import CoingeckoRateLimitError @@ -13,38 +12,6 @@ class TestCoingeckoClient(TestCase): GNO_TOKEN_ADDRESS = "0x6810e776880C02933D47DB1b9fc05908e5386b96" GNO_GNOSIS_CHAIN_ADDRESS = "0x9C58BAcC331c9aa871AFD802DB6379a98e80CEdb" - @skip_on(CannotGetPrice, reason="Cannot get price from Coingecko") - def test_coingecko_client(self): - self.assertTrue(CoingeckoClient.supports_network(EthereumNetwork.MAINNET)) - self.assertTrue( - CoingeckoClient.supports_network( - EthereumNetwork.BINANCE_SMART_CHAIN_MAINNET - ) - ) - self.assertTrue(CoingeckoClient.supports_network(EthereumNetwork.POLYGON)) - self.assertTrue(CoingeckoClient.supports_network(EthereumNetwork.GNOSIS)) - - # Test Mainnet - coingecko_client = CoingeckoClient() - non_existing_token_address = "0xda2f8b8386302C354a90DB670E40beA3563AF454" - self.assertGreater(coingecko_client.get_token_price(self.GNO_TOKEN_ADDRESS), 0) - with self.assertRaises(CannotGetPrice): - coingecko_client.get_token_price(non_existing_token_address) - - # Test Binance - bsc_coingecko_client = CoingeckoClient( - EthereumNetwork.BINANCE_SMART_CHAIN_MAINNET - ) - binance_peg_ethereum_address = "0x2170Ed0880ac9A755fd29B2688956BD959F933F8" - self.assertGreater( - bsc_coingecko_client.get_token_price(binance_peg_ethereum_address), 0 - ) - - # Test Polygon - polygon_coingecko_client = CoingeckoClient(EthereumNetwork.POLYGON) - bnb_pos_address = "0xb33EaAd8d922B1083446DC23f610c2567fB5180f" - self.assertGreater(polygon_coingecko_client.get_token_price(bnb_pos_address), 0) - @skip_on(CoingeckoRateLimitError, reason="Coingecko rate limit reached") def test_get_logo_url(self): # Test Mainnet diff --git a/safe_transaction_service/tokens/tests/test_commands.py b/safe_transaction_service/tokens/tests/test_commands.py index 9bb5654a..b6b487fb 100644 --- a/safe_transaction_service/tokens/tests/test_commands.py +++ b/safe_transaction_service/tokens/tests/test_commands.py @@ -7,9 +7,9 @@ from eth_account import Account -from gnosis.eth import EthereumClientProvider from gnosis.eth.ethereum_client import Erc20Info, Erc20Manager -from gnosis.eth.tests.utils import deploy_example_erc20 +from gnosis.eth.tests.ethereum_test_case import EthereumTestCaseMixin +from gnosis.eth.tests.utils import deploy_erc20 from ..clients import CoinMarketCapClient, CoinMarketCapToken from ..models import Token @@ -51,7 +51,7 @@ ] -class TestCommands(TestCase): +class TestCommands(EthereumTestCaseMixin, TestCase): def test_add_token(self): command = "add_token" buf = StringIO() @@ -63,8 +63,14 @@ def test_add_token(self): token.refresh_from_db() self.assertTrue(token.trusted) - ethereum_client = EthereumClientProvider() - erc20 = deploy_example_erc20(ethereum_client.w3, 10, Account.create().address) + erc20 = deploy_erc20( + self.ethereum_client.w3, + self.ethereum_test_account, + "Uxio", + "UXI", + Account.create().address, + 10, + ) call_command(command, erc20.address, "--no-prompt", stdout=buf) self.assertIn("Created token", buf.getvalue()) self.assertTrue(Token.objects.get(address=erc20.address).trusted) diff --git a/safe_transaction_service/tokens/tests/test_price_service.py b/safe_transaction_service/tokens/tests/test_price_service.py deleted file mode 100644 index 8e4c519e..00000000 --- a/safe_transaction_service/tokens/tests/test_price_service.py +++ /dev/null @@ -1,305 +0,0 @@ -from unittest import mock -from unittest.mock import MagicMock - -from django.test import TestCase - -from eth_account import Account - -from gnosis.eth import EthereumClient, EthereumClientProvider, EthereumNetwork -from gnosis.eth.oracles import KyberOracle, OracleException, UnderlyingToken - -from safe_transaction_service.history.tests.utils import just_test_if_mainnet_node -from safe_transaction_service.utils.redis import get_redis - -from ..clients import CannotGetPrice, CoingeckoClient, KrakenClient, KucoinClient -from ..services.price_service import PriceService, PriceServiceProvider - - -class TestPriceService(TestCase): - @classmethod - def setUpClass(cls) -> None: - cls.redis = get_redis() - cls.ethereum_client = EthereumClientProvider() - - @classmethod - def tearDownClass(cls) -> None: - PriceServiceProvider.del_singleton() - - def setUp(self) -> None: - self.price_service = PriceServiceProvider() - - def tearDown(self) -> None: - PriceServiceProvider.del_singleton() - - def test_available_price_oracles(self): - # Ganache should have no oracle enabled - self.assertEqual(len(self.price_service.enabled_price_oracles), 0) - self.assertEqual(len(self.price_service.enabled_price_pool_oracles), 0) - self.assertEqual(len(self.price_service.enabled_composed_price_oracles), 0) - - def test_available_price_oracles_mainnet(self): - # Mainnet should have every oracle enabled - mainnet_node = just_test_if_mainnet_node() - price_service = PriceService(EthereumClient(mainnet_node), self.redis) - self.assertEqual(len(price_service.enabled_price_oracles), 6) - self.assertEqual(len(price_service.enabled_price_pool_oracles), 3) - self.assertEqual(len(price_service.enabled_composed_price_oracles), 4) - - @mock.patch.object(KrakenClient, "get_ether_usd_price", return_value=0.4) - @mock.patch.object(KucoinClient, "get_ether_usd_price", return_value=0.5) - def test_get_ether_usd_price(self, kucoin_mock: MagicMock, kraken_mock: MagicMock): - price_service = self.price_service - eth_usd_price = price_service.get_ether_usd_price() - self.assertEqual(eth_usd_price, kraken_mock.return_value) - kucoin_mock.assert_not_called() - - kraken_mock.side_effect = CannotGetPrice - - # cache_ether_usd_price is working - eth_usd_price = price_service.get_native_coin_usd_price() - self.assertEqual(eth_usd_price, kraken_mock.return_value) - - # Clear cache_ether_usd_price - price_service.cache_ether_usd_price.clear() - self.assertEqual(eth_usd_price, kraken_mock.return_value) - kucoin_mock.assert_not_called() - - def test_get_native_coin_usd_price(self): - price_service = self.price_service - - # Unsupported network (Ganache) - with mock.patch.object( - KrakenClient, "get_ether_usd_price", return_value=1_600 - ) as kraken_mock: - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_native_coin_usd_price(), 1_600) - - # Test cache is working - kraken_mock.side_effect = CannotGetPrice - self.assertEqual(price_service.get_native_coin_usd_price(), 1_600) - - # Gnosis Chain - price_service.ethereum_network = EthereumNetwork.GNOSIS - with mock.patch.object(KrakenClient, "get_dai_usd_price", return_value=1.5): - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_native_coin_usd_price(), 1.5) - - with mock.patch.object( - KrakenClient, "get_dai_usd_price", side_effect=CannotGetPrice - ): - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_native_coin_usd_price(), 1) - - # POLYGON - price_service.ethereum_network = EthereumNetwork.POLYGON - with mock.patch.object(KrakenClient, "get_matic_usd_price", return_value=0.7): - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_native_coin_usd_price(), 0.7) - - # EWT - price_service.ethereum_network = EthereumNetwork.ENERGY_WEB_CHAIN - with mock.patch.object(KrakenClient, "get_ewt_usd_price", return_value=0.9): - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_native_coin_usd_price(), 0.9) - - # BINANCE - price_service.ethereum_network = EthereumNetwork.BINANCE_SMART_CHAIN_MAINNET - with mock.patch.object(KucoinClient, "get_bnb_usd_price", return_value=1.2): - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_native_coin_usd_price(), 1.2) - - # Gather - price_service.ethereum_network = EthereumNetwork.GATHER_MAINNET_NETWORK - with mock.patch.object( - CoingeckoClient, "get_gather_usd_price", return_value=1.7 - ): - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_native_coin_usd_price(), 1.7) - - # Avalanche - price_service.ethereum_network = EthereumNetwork.AVALANCHE_C_CHAIN - with mock.patch.object(KrakenClient, "get_avax_usd_price", return_value=6.5): - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_native_coin_usd_price(), 6.5) - - # Aurora - price_service.ethereum_network = EthereumNetwork.AURORA_MAINNET - with mock.patch.object(KucoinClient, "get_aurora_usd_price", return_value=1.3): - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_native_coin_usd_price(), 1.3) - - # Cronos - with mock.patch.object(KucoinClient, "get_cro_usd_price", return_value=4.4): - price_service.ethereum_network = EthereumNetwork.CRONOS_MAINNET_BETA - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_native_coin_usd_price(), 4.4) - - # KuCoin - with mock.patch.object(KucoinClient, "get_kcs_usd_price", return_value=4.4): - price_service.ethereum_network = EthereumNetwork.KCC_MAINNET - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_native_coin_usd_price(), 4.4) - - # Milkomeda Cardano - with mock.patch.object(KrakenClient, "get_ada_usd_price", return_value=5.5): - price_service.ethereum_network = EthereumNetwork.MILKOMEDA_C1_MAINNET - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_native_coin_usd_price(), 5.5) - - # Milkomeda Algorand - with mock.patch.object(KrakenClient, "get_algo_usd_price", return_value=6.6): - price_service.ethereum_network = EthereumNetwork.MILKOMEDA_A1_MAINNET - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_algorand_usd_price(), 6.6) - - # XDC - with mock.patch.object(KucoinClient, "get_xdc_usd_price", return_value=7.7): - price_service.ethereum_network = EthereumNetwork.XINFIN_XDC_NETWORK - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_xdc_usd_price(), 7.7) - - price_service.ethereum_network = EthereumNetwork.XDC_APOTHEM_NETWORK - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_xdc_usd_price(), 7.7) - - # Meter - with mock.patch.object(CoingeckoClient, "get_mtr_usd_price", return_value=8.0): - price_service.ethereum_network = EthereumNetwork.METER_MAINNET - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_mtr_usd_price(), 8.0) - - price_service.ethereum_network = EthereumNetwork.METER_TESTNET - price_service.cache_native_coin_usd_price.clear() - self.assertEqual(price_service.get_mtr_usd_price(), 8.0) - - @mock.patch.object(CoingeckoClient, "get_bnb_usd_price", return_value=3.0) - @mock.patch.object(KucoinClient, "get_bnb_usd_price", return_value=5.0) - def test_get_binance_usd_price( - self, - get_bnb_usd_price_binance_mock: MagicMock, - get_bnb_usd_price_coingecko: MagicMock, - ): - price_service = self.price_service - - price = price_service.get_binance_usd_price() - self.assertEqual(price, 5.0) - - get_bnb_usd_price_binance_mock.side_effect = CannotGetPrice - price = price_service.get_binance_usd_price() - self.assertEqual(price, 3.0) - - @mock.patch.object(CoingeckoClient, "get_ewt_usd_price", return_value=3.0) - @mock.patch.object(KucoinClient, "get_ewt_usd_price", return_value=7.0) - @mock.patch.object(KrakenClient, "get_ewt_usd_price", return_value=5.0) - def test_get_ewt_usd_price( - self, - get_ewt_usd_price_kraken_mock: MagicMock, - get_ewt_usd_price_kucoin_mock: MagicMock, - get_ewt_usd_price_coingecko_mock: MagicMock, - ): - price_service = self.price_service - - price = price_service.get_ewt_usd_price() - self.assertEqual(price, 5.0) - - get_ewt_usd_price_kraken_mock.side_effect = CannotGetPrice - price = price_service.get_ewt_usd_price() - self.assertEqual(price, 7.0) - - get_ewt_usd_price_kucoin_mock.side_effect = CannotGetPrice - price = price_service.get_ewt_usd_price() - self.assertEqual(price, 3.0) - - @mock.patch.object(CoingeckoClient, "get_matic_usd_price", return_value=3.0) - @mock.patch.object(KucoinClient, "get_matic_usd_price", return_value=7.0) - @mock.patch.object(KrakenClient, "get_matic_usd_price", return_value=5.0) - def test_get_matic_usd_price( - self, - get_matic_usd_price_kraken_mock: MagicMock, - get_matic_usd_price_binance_mock: MagicMock, - get_matic_usd_price_coingecko_mock: MagicMock, - ): - price_service = self.price_service - - price = price_service.get_matic_usd_price() - self.assertEqual(price, 5.0) - - get_matic_usd_price_kraken_mock.side_effect = CannotGetPrice - price = price_service.get_matic_usd_price() - self.assertEqual(price, 7.0) - - get_matic_usd_price_binance_mock.side_effect = CannotGetPrice - price = price_service.get_matic_usd_price() - self.assertEqual(price, 3.0) - - def test_get_token_eth_value(self): - mainnet_node = just_test_if_mainnet_node() - price_service = PriceService(EthereumClient(mainnet_node), self.redis) - gno_token_address = "0x6810e776880C02933D47DB1b9fc05908e5386b96" - token_eth_value = price_service.get_token_eth_value(gno_token_address) - self.assertIsInstance(token_eth_value, float) - self.assertGreater(token_eth_value, 0) - - @mock.patch.object(KyberOracle, "get_price", return_value=1.23, autospec=True) - def test_get_token_eth_value_mocked(self, kyber_get_price_mock: MagicMock): - price_service = self.price_service - oracle_1 = mock.MagicMock() - oracle_1.get_price.return_value = 1.23 - oracle_2 = mock.MagicMock() - oracle_3 = mock.MagicMock() - price_service.enabled_price_oracles = (oracle_1, oracle_2, oracle_3) - self.assertEqual(len(price_service.enabled_price_oracles), 3) - random_address = Account.create().address - self.assertEqual(len(price_service.cache_token_eth_value), 0) - - self.assertEqual(price_service.get_token_eth_value(random_address), 1.23) - self.assertEqual(price_service.cache_token_eth_value[(random_address,)], 1.23) - - # Make every oracle fail - oracle_1.get_price.side_effect = OracleException - oracle_2.get_price.side_effect = OracleException - oracle_3.get_price.side_effect = OracleException - - # Check cache - self.assertEqual(price_service.get_token_eth_value(random_address), 1.23) - random_address_2 = Account.create().address - self.assertEqual(price_service.get_token_eth_value(random_address_2), 0.0) - self.assertEqual(price_service.cache_token_eth_value[(random_address,)], 1.23) - self.assertEqual(price_service.cache_token_eth_value[(random_address_2,)], 0.0) - - @mock.patch.object( - PriceService, "get_underlying_tokens", return_value=[], autospec=True - ) - @mock.patch.object( - PriceService, "get_token_eth_value", autospec=True, return_value=1.0 - ) - def test_get_token_eth_price_from_composed_oracles( - self, get_token_eth_value_mock: MagicMock, price_service_mock: MagicMock - ): - price_service = self.price_service - token_one = UnderlyingToken("0x48f07301E9E29c3C38a80ae8d9ae771F224f1054", 0.482) - token_two = UnderlyingToken("0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2", 0.376) - token_three = UnderlyingToken("0xA0b86991c6218b36c1d19D4a2e9Eb0cE360", 0.142) - price_service_mock.return_value = [token_one, token_two, token_three] - curve_price = "0xe7ce624c00381b4b7abb03e633fb4acac4537dd6" - eth_price = price_service.get_token_eth_price_from_composed_oracles(curve_price) - self.assertEqual(eth_price, 1.0) - - def test_get_token_eth_price_from_oracles(self): - mainnet_node = just_test_if_mainnet_node() - price_service = PriceService(EthereumClient(mainnet_node), self.redis) - gno_token_address = "0x6810e776880C02933D47DB1b9fc05908e5386b96" - token_eth_value = price_service.get_token_eth_price_from_oracles( - gno_token_address - ) - self.assertIsInstance(token_eth_value, float) - self.assertGreater(token_eth_value, 0) - with mock.patch.object( - PriceService, "get_token_eth_value", autospec=True, return_value=0 - ): - token_eth_value_from_coingecko = ( - price_service.get_token_eth_price_from_oracles(gno_token_address) - ) - self.assertAlmostEqual( - token_eth_value, token_eth_value_from_coingecko, delta=0.1 - ) diff --git a/safe_transaction_service/tokens/tests/test_tasks.py b/safe_transaction_service/tokens/tests/test_tasks.py index 96d099a2..88f63165 100644 --- a/safe_transaction_service/tokens/tests/test_tasks.py +++ b/safe_transaction_service/tokens/tests/test_tasks.py @@ -3,26 +3,14 @@ from unittest.mock import MagicMock from django.test import TestCase -from django.utils import timezone -from eth_account import Account +from ens import ENS -from gnosis.eth.ethereum_client import ( - EthereumClient, - EthereumClientProvider, - EthereumNetwork, -) +from gnosis.eth.ethereum_client import EthereumNetwork -from ...history.tests.utils import just_test_if_mainnet_node from ...utils.redis import get_redis from ..models import TokenList -from ..services import PriceService, PriceServiceProvider -from ..tasks import ( - EthValueWithTimestamp, - calculate_token_eth_price_task, - fix_pool_tokens_task, - update_token_info_from_token_list_task, -) +from ..tasks import fix_pool_tokens_task, update_token_info_from_token_list_task from .factories import TokenFactory, TokenListFactory from .mocks import token_list_mock @@ -31,7 +19,6 @@ class TestTasks(TestCase): def setUp(self) -> None: - PriceServiceProvider.del_singleton() get_redis().flushall() def tearDown(self) -> None: @@ -44,102 +31,9 @@ def tearDown(self) -> None: def test_fix_pool_tokens_task(self, get_network_mock: MagicMock): self.assertEqual(fix_pool_tokens_task.delay().result, 0) - get_network_mock.return_value = EthereumNetwork.RINKEBY + get_network_mock.return_value = EthereumNetwork.SEPOLIA self.assertIsNone(fix_pool_tokens_task.delay().result) - @mock.patch.object( - PriceService, "get_token_eth_value", autospec=True, return_value=4815 - ) - @mock.patch.object(timezone, "now", return_value=timezone.now()) - def test_calculate_token_eth_price_task( - self, timezone_now_mock: MagicMock, get_token_eth_value_mock: MagicMock - ): - random_token_address = Account.create().address - random_redis_key = Account.create().address - expected = EthValueWithTimestamp( - get_token_eth_value_mock.return_value, timezone_now_mock.return_value - ) - self.assertEqual( - calculate_token_eth_price_task.delay( - random_token_address, random_redis_key - ).result, - expected, - ) - - # Check caching works even if we change the token_address - another_token_address = Account.create().address - self.assertEqual( - calculate_token_eth_price_task.delay( - another_token_address, random_redis_key - ).result, - expected, - ) - - with self.settings(CELERY_ALWAYS_EAGER=False): - random_token_address = Account.create().address - random_redis_key = Account.create().address - calculate_token_eth_price_task.delay(random_token_address, random_redis_key) - - def test_calculate_token_eth_price_task_without_mock(self): - mainnet_node_url = just_test_if_mainnet_node() - EthereumClientProvider.instance = EthereumClient(mainnet_node_url) - - dai_address = "0x6B175474E89094C44Da98b954EedeAC495271d0F" - random_redis_key = Account.create().address - eth_value_with_timestamp = calculate_token_eth_price_task( - dai_address, random_redis_key - ) - self.assertGreater(eth_value_with_timestamp.eth_value, 0.0) - - pool_together_address = "0x334cBb5858417Aee161B53Ee0D5349cCF54514CF" - random_redis_key = Account.create().address - eth_value_with_timestamp = calculate_token_eth_price_task( - pool_together_address, random_redis_key - ) - self.assertGreater(eth_value_with_timestamp.eth_value, 0.0) - - random_token_address = Account.create().address - random_redis_key = Account.create().address - eth_value_with_timestamp = calculate_token_eth_price_task( - random_token_address, random_redis_key - ) - self.assertEqual(eth_value_with_timestamp.eth_value, 0.0) - del EthereumClientProvider.instance - - @mock.patch.object( - PriceService, "get_token_eth_value", autospec=True, return_value=4815 - ) - @mock.patch.object( - PriceService, "get_token_usd_price", autospec=True, return_value=0.0 - ) - @mock.patch.object(timezone, "now", return_value=timezone.now()) - def test_return_last_valid_token_price( - self, - timezone_now_mock: MagicMock, - get_token_usd_price: MagicMock, - get_token_eth_value_mock: MagicMock, - ): - random_token_address = Account.create().address - random_redis_key = Account.create().address - expected = EthValueWithTimestamp( - get_token_eth_value_mock.return_value, timezone_now_mock.return_value - ) - self.assertEqual( - calculate_token_eth_price_task.delay( - random_token_address, random_redis_key - ).result, - expected, - ) - - get_token_eth_value_mock.return_value = 0.0 - - self.assertEqual( - calculate_token_eth_price_task.delay( - random_token_address, random_redis_key, True - ).result, - expected, - ) - @mock.patch( "safe_transaction_service.tokens.tasks.get_ethereum_network", return_value=EthereumNetwork.MAINNET, @@ -159,9 +53,34 @@ def test_update_token_info_from_token_list_task( self.assertEqual(update_token_info_from_token_list_task.delay().result, 0) # Create a token in the list, it should be updated - TokenFactory(address="0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2") + token = TokenFactory(address="0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2") + self.assertFalse(token.trusted) self.assertEqual(update_token_info_from_token_list_task.delay().result, 1) # Create another token in the list, both should be updated - TokenFactory(address="0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599") + token_2 = TokenFactory(address="0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599") + self.assertFalse(token_2.trusted) + self.assertEqual(update_token_info_from_token_list_task.delay().result, 2) + + # Test ENS + get_tokens_mock.return_value.append( + { + "chainId": 1, + "address": "safe.eth", + "symbol": "SAFE", + "name": "Safe Token", + "decimals": 18, + } + ) + + safe_token_address = "0x5aFE3855358E112B5647B952709E6165e1c1eEEe" + token_ens = TokenFactory(address=safe_token_address) + self.assertFalse(token_ens.trusted) + + # Ens cannot be resolved self.assertEqual(update_token_info_from_token_list_task.delay().result, 2) + + with mock.patch.object(ENS, "address", return_value=safe_token_address): + self.assertEqual(update_token_info_from_token_list_task.delay().result, 3) + token_ens.refresh_from_db() + self.assertTrue(token_ens.trusted) diff --git a/safe_transaction_service/tokens/tests/test_views.py b/safe_transaction_service/tokens/tests/test_views.py index 7976ec5a..3d3f61f6 100644 --- a/safe_transaction_service/tokens/tests/test_views.py +++ b/safe_transaction_service/tokens/tests/test_views.py @@ -3,7 +3,6 @@ from unittest.mock import MagicMock from django.urls import reverse -from django.utils import timezone from eth_account import Account from rest_framework import status @@ -13,10 +12,7 @@ from gnosis.eth.ethereum_client import Erc20Manager, InvalidERC20Info from gnosis.safe.tests.safe_test_case import SafeTestCaseMixin -from ..clients import CannotGetPrice from ..models import Token -from ..services import PriceService -from ..services.price_service import FiatCode, FiatPriceWithTimestamp from .factories import TokenFactory logger = logging.getLogger(__name__) @@ -33,7 +29,11 @@ def test_token_view(self): self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual( response.data, - {"detail": ErrorDetail(string="Not found.", code="not_found")}, + { + "detail": ErrorDetail( + string="No Token matches the given query.", code="not_found" + ) + }, ) token = TokenFactory(address=random_address, decimals=18) # ERC20 @@ -48,6 +48,7 @@ def test_token_view(self): "name": token.name, "symbol": token.symbol, "decimals": token.decimals, + "trusted": token.trusted, }, ) @@ -63,6 +64,7 @@ def test_token_view(self): "name": token.name, "symbol": token.symbol, "decimals": token.decimals, + "trusted": token.trusted, }, ) @@ -99,107 +101,7 @@ def test_tokens_view(self): "name": token.name, "symbol": token.symbol, "decimals": token.decimals, + "trusted": token.trusted, } ], ) - - def test_token_price_view(self): - invalid_address = "0x1234" - response = self.client.get( - reverse("v1:tokens:price-usd", args=(invalid_address,)) - ) - self.assertEqual(response.status_code, status.HTTP_422_UNPROCESSABLE_ENTITY) - - random_address = Account.create().address - response = self.client.get( - reverse("v1:tokens:price-usd", args=(random_address,)) - ) - self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual( - response.data, - {"detail": ErrorDetail(string="Not found.", code="not_found")}, - ) - - token = TokenFactory(address=random_address, decimals=18) # ERC20 - response = self.client.get( - reverse("v1:tokens:price-usd", args=(token.address,)) - ) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data["fiat_code"], "USD") - self.assertEqual(response.data["fiat_price"], "0.0") - self.assertTrue(response.data["timestamp"]) - - fiat_price_with_timestamp = FiatPriceWithTimestamp( - 48.1516, FiatCode.USD, timezone.now() - ) - with mock.patch.object( - PriceService, - "get_token_cached_usd_values", - autospec=True, - return_value=iter([fiat_price_with_timestamp]), - ) as get_token_cached_usd_values_mock: - response = self.client.get( - reverse("v1:tokens:price-usd", args=(token.address,)) - ) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data["fiat_code"], "USD") - self.assertEqual( - response.data["fiat_price"], str(fiat_price_with_timestamp.fiat_price) - ) - self.assertTrue(response.data["timestamp"]) - self.assertEqual( - get_token_cached_usd_values_mock.call_args.args[1], [token.address] - ) - - # Test copy price address - get_token_cached_usd_values_mock.return_value = iter( - [fiat_price_with_timestamp] - ) - token.copy_price = Account.create().address - token.save(update_fields=["copy_price"]) - response = self.client.get( - reverse("v1:tokens:price-usd", args=(token.address,)) - ) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data["fiat_code"], "USD") - self.assertEqual( - response.data["fiat_price"], str(fiat_price_with_timestamp.fiat_price) - ) - self.assertTrue(response.data["timestamp"]) - self.assertEqual( - get_token_cached_usd_values_mock.call_args.args[1], [token.copy_price] - ) - - @mock.patch.object( - PriceService, "get_native_coin_usd_price", return_value=321.2, autospec=True - ) - def test_token_price_view_address_0( - self, get_native_coin_usd_price_mock: MagicMock - ): - token_address = "0x0000000000000000000000000000000000000000" - - response = self.client.get( - reverse("v1:tokens:price-usd", args=(token_address,)) - ) - - # Native token should be retrieved even if it is not part of the Token table - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data["fiat_code"], "USD") - self.assertEqual(response.data["fiat_price"], "321.2") - self.assertTrue(response.data["timestamp"]) - - @mock.patch.object( - PriceService, - "get_native_coin_usd_price", - side_effect=CannotGetPrice(), - ) - def test_token_price_view_error(self, get_native_coin_usd_price_mock: MagicMock): - token_address = "0x0000000000000000000000000000000000000000" - - response = self.client.get( - reverse("v1:tokens:price-usd", args=(token_address,)) - ) - - self.assertEqual(response.status_code, status.HTTP_503_SERVICE_UNAVAILABLE) - self.assertEqual(response.data["message"], "Price retrieval failed") - self.assertEqual(response.data["arguments"], [token_address]) diff --git a/safe_transaction_service/tokens/urls.py b/safe_transaction_service/tokens/urls.py index 5b636d17..60e46f12 100644 --- a/safe_transaction_service/tokens/urls.py +++ b/safe_transaction_service/tokens/urls.py @@ -7,5 +7,4 @@ urlpatterns = [ path("", views.TokensView.as_view(), name="list"), path("/", views.TokenView.as_view(), name="detail"), - path("/prices/usd/", views.TokenPriceView.as_view(), name="price-usd"), ] diff --git a/safe_transaction_service/tokens/views.py b/safe_transaction_service/tokens/views.py index 85cd683d..006fface 100644 --- a/safe_transaction_service/tokens/views.py +++ b/safe_transaction_service/tokens/views.py @@ -1,4 +1,3 @@ -from django.utils import timezone from django.utils.decorators import method_decorator from django.views.decorators.cache import cache_page @@ -6,15 +5,11 @@ from rest_framework import response, status from rest_framework.filters import OrderingFilter, SearchFilter from rest_framework.generics import ListAPIView, RetrieveAPIView -from rest_framework.response import Response -from gnosis.eth.constants import NULL_ADDRESS from gnosis.eth.utils import fast_is_checksum_address from . import filters, serializers -from .clients import CannotGetPrice from .models import Token -from .services import PriceServiceProvider class TokenView(RetrieveAPIView): @@ -54,55 +49,3 @@ class TokensView(ListAPIView): @method_decorator(cache_page(60 * 15)) # Cache 15 minutes def get(self, request, *args, **kwargs): return super().get(request, *args, **kwargs) - - -class TokenPriceView(RetrieveAPIView): - serializer_class = serializers.TokenPriceResponseSerializer - lookup_field = "address" - queryset = Token.objects.all() - - @method_decorator(cache_page(60 * 10)) # Cache 10 minutes - def get(self, request, *args, **kwargs): - address = self.kwargs["address"] - if not fast_is_checksum_address(address): - return response.Response( - status=status.HTTP_422_UNPROCESSABLE_ENTITY, - data={ - "code": 1, - "message": "Invalid ethereum address", - "arguments": [address], - }, - ) - try: - price_service = PriceServiceProvider() - if address == NULL_ADDRESS: - data = { - "fiat_code": "USD", - "fiat_price": str(price_service.get_native_coin_usd_price()), - "timestamp": timezone.now(), - } - else: - token = self.get_object() # Raises 404 if not found - fiat_price_with_timestamp = next( - price_service.get_token_cached_usd_values( - [token.get_price_address()] - ) - ) - data = { - "fiat_code": fiat_price_with_timestamp.fiat_code.name, - "fiat_price": str(fiat_price_with_timestamp.fiat_price), - "timestamp": fiat_price_with_timestamp.timestamp, - } - serializer = self.get_serializer(data=data) - assert serializer.is_valid() - return Response(status=status.HTTP_200_OK, data=serializer.data) - - except CannotGetPrice: - return Response( - status=status.HTTP_503_SERVICE_UNAVAILABLE, - data={ - "code": 10, - "message": "Price retrieval failed", - "arguments": [address], - }, - ) diff --git a/safe_transaction_service/utils/admin.py b/safe_transaction_service/utils/admin.py index 29105134..d248d674 100644 --- a/safe_transaction_service/utils/admin.py +++ b/safe_transaction_service/utils/admin.py @@ -1,4 +1,9 @@ from django.contrib import admin +from django.contrib.admin.utils import lookup_spawns_duplicates +from django.core.exceptions import FieldDoesNotExist, ValidationError +from django.db import models +from django.db.models.constants import LOOKUP_SEP +from django.utils.text import smart_split, unescape_string_literal class HasLogoFilterAdmin(admin.SimpleListFilter): @@ -18,3 +23,83 @@ def queryset(self, request, queryset): return queryset.with_logo() else: return queryset + + +# TODO Use the class in safe-eth-py +class AdvancedAdminSearchMixin: + """ + Use database indexes when using exact search instead + of converting everything to text before searching + """ + + def get_search_results(self, request, queryset, search_term): + """ + Return a tuple containing a queryset to implement the search + and a boolean indicating if the results may contain duplicates. + + This function was modified from Django original get_search_results + to allow `exact` search that uses database indexes + """ + + def construct_search(field_name): + if field_name.startswith("^"): + return "%s__istartswith" % field_name[1:] + elif field_name.startswith("=="): + return "%s__exact" % field_name[2:] + elif field_name.startswith("="): + return "%s__iexact" % field_name[1:] + elif field_name.startswith("@"): + return "%s__search" % field_name[1:] + # Use field_name if it includes a lookup. + opts = queryset.model._meta + lookup_fields = field_name.split(LOOKUP_SEP) + # Go through the fields, following all relations. + prev_field = None + for path_part in lookup_fields: + if path_part == "pk": + path_part = opts.pk.name + try: + field = opts.get_field(path_part) + except FieldDoesNotExist: + # Use valid query lookups. + if prev_field and prev_field.get_lookup(path_part): + return field_name + else: + prev_field = field + if hasattr(field, "path_infos"): + # Update opts to follow the relation. + opts = field.path_infos[-1].to_opts + # Otherwise, use the field with icontains. + return "%s__icontains" % field_name + + may_have_duplicates = False + search_fields = self.get_search_fields(request) + if search_fields and search_term: + orm_lookups = [ + construct_search(str(search_field)) for search_field in search_fields + ] + term_queries = [] + for bit in smart_split(search_term): + if bit.startswith(('"', "'")) and bit[0] == bit[-1]: + bit = unescape_string_literal(bit) + + valid_queries = [] + for orm_lookup in orm_lookups: + try: + # Check if query is valid (for example, not a number provided for an integer exact query) + # This is the main difference comparing to Django official implementation + queryset.filter(**{orm_lookup: bit}) + valid_queries.append((orm_lookup, bit)) + except (ValueError, ValidationError): + pass + or_queries = models.Q.create( + [valid_query for valid_query in valid_queries], + connector=models.Q.OR, + ) + term_queries.append(or_queries) + queryset = queryset.filter(models.Q.create(term_queries)) + may_have_duplicates |= any( + lookup_spawns_duplicates(self.opts, search_spec) + for search_spec in orm_lookups + ) + return queryset, may_have_duplicates diff --git a/safe_transaction_service/utils/constants.py b/safe_transaction_service/utils/constants.py new file mode 100644 index 00000000..57916716 --- /dev/null +++ b/safe_transaction_service/utils/constants.py @@ -0,0 +1 @@ +SIGNATURE_LENGTH = 5_000 diff --git a/safe_transaction_service/utils/ethereum.py b/safe_transaction_service/utils/ethereum.py index a0360cd0..facae383 100644 --- a/safe_transaction_service/utils/ethereum.py +++ b/safe_transaction_service/utils/ethereum.py @@ -1,9 +1,6 @@ -from functools import cache - from gnosis.eth import EthereumClientProvider, EthereumNetwork -@cache def get_chain_id() -> int: return EthereumClientProvider().get_chain_id() diff --git a/safe_transaction_service/utils/serializers.py b/safe_transaction_service/utils/serializers.py index b4f38f07..a4ef76f2 100644 --- a/safe_transaction_service/utils/serializers.py +++ b/safe_transaction_service/utils/serializers.py @@ -27,25 +27,3 @@ def get_safe_owners(safe_address: ChecksumAddress) -> List[ChecksumAddress]: raise ValidationError( "Problem connecting to the ethereum node, please try again later" ) - - -def get_safe_version(safe_address: ChecksumAddress) -> str: - """ - - :param safe_address: - :return: Current version for a Safe - :raises: ValidationError - """ - ethereum_client = EthereumClientProvider() - safe = Safe(safe_address, ethereum_client) - try: - return safe.retrieve_version() - except Web3Exception as e: - raise ValidationError( - f"Could not get Safe {safe_address} version from blockchain, check contract exists on network " - f"{ethereum_client.get_network().name}" - ) from e - except IOError: - raise ValidationError( - "Problem connecting to the ethereum node, please try again later" - ) diff --git a/safe_transaction_service/utils/swagger.py b/safe_transaction_service/utils/swagger.py new file mode 100644 index 00000000..8d093dcd --- /dev/null +++ b/safe_transaction_service/utils/swagger.py @@ -0,0 +1,47 @@ +import re + +from drf_yasg.inspectors import SwaggerAutoSchema + + +class CustomSwaggerSchema(SwaggerAutoSchema): + VERSION_REGULAR_EXPRESSION = re.compile(r"v[\d]+") + CUSTOM_TAGS = { + "messages": ["messages"], + "owners": ["owners"], + "transaction": ["transactions"], + "transfers": ["transactions"], + "multisig-transaction": ["transactions"], + "user-operation": ["4337"], + "safe-operation": ["4337"], + } + + def get_tags(self, operation_keys=None): + """ + The method `get_tags` defined by default just gets the `operation_keys` (generated from the + url) and return the first element, for example in our case being all the tags `v1`, `v2`, etc. + + We are now defining some logic to generate `tags`: + - If they are explicitly defined in the view, we keep that (`self.overrides`). + - If the `operation_id` contains any of the words defined, we override the tag. + - Otherwise, just iterate the `operation_keys` and return + + :param operation_keys: + :return: + """ + operation_keys = operation_keys or self.operation_keys + + if tags := self.overrides.get("tags"): + return tags + + if len(operation_keys) == 1: + return list(operation_keys) + + operation_id = self.get_operation_id() + for key, tags in self.CUSTOM_TAGS.items(): + if key in operation_id: + return tags[:] + + for operation_key in operation_keys: + if not self.VERSION_REGULAR_EXPRESSION.match(operation_key): + return [operation_key] + return [] # This should never happen diff --git a/safe_transaction_service/utils/utils.py b/safe_transaction_service/utils/utils.py index abe971d6..74655628 100644 --- a/safe_transaction_service/utils/utils.py +++ b/safe_transaction_service/utils/utils.py @@ -1,3 +1,4 @@ +import datetime import socket from functools import wraps from itertools import islice @@ -80,3 +81,14 @@ def wrapper(*args, **kwargs): def parse_boolean_query_param(value: Union[bool, str, int]) -> bool: return value in (True, "True", "true", "1", 1) + + +def datetime_to_str(value: datetime.datetime) -> str: + """ + :param value: `datetime.datetime` value + :return: ``ISO 8601`` date with ``Z`` format + """ + value = value.isoformat() + if value.endswith("+00:00"): + value = value[:-6] + "Z" + return value diff --git a/scripts/benchmark_keccak.py b/scripts/benchmark_keccak.py index 1e75b253..9b8d82c3 100644 --- a/scripts/benchmark_keccak.py +++ b/scripts/benchmark_keccak.py @@ -6,32 +6,33 @@ from web3 import Web3 -def eth_hash_benchmark(): +def eth_hash_benchmark() -> str: return eth_hash_keccak(os.urandom(32)).hex() -def web3_benchmark(): +def web3_benchmark() -> str: return Web3.keccak(os.urandom(32)).hex() -def cryptodome_benchmark(): - k = crypto_keccak.new(data=os.urandom(32), digest_bits=256) - return k.hexdigest() +def cryptodome_benchmark() -> str: + return crypto_keccak.new(data=os.urandom(32), digest_bits=256).hexdigest() -def pysha3_benchmark(): +def pysha3_benchmark() -> str: return sha3.keccak_256(os.urandom(32)).hexdigest() if __name__ == "__main__": import timeit + iterations = 1_000_000 + print( "eth_hash", timeit.timeit( "eth_hash_benchmark()", setup="from __main__ import eth_hash_benchmark", - number=500000, + number=iterations, globals=globals(), ), ) @@ -40,7 +41,7 @@ def pysha3_benchmark(): timeit.timeit( "web3_benchmark()", setup="from __main__ import web3_benchmark", - number=500000, + number=iterations, globals=globals(), ), ) @@ -49,16 +50,16 @@ def pysha3_benchmark(): timeit.timeit( "cryptodome_benchmark()", setup="from __main__ import cryptodome_benchmark", - number=500000, + number=iterations, globals=globals(), ), ) print( - "pysha3", + "safe-pysha3", timeit.timeit( "pysha3_benchmark()", setup="from __main__ import pysha3_benchmark", - number=500000, + number=iterations, globals=globals(), ), ) diff --git a/setup.cfg b/setup.cfg index 570c062d..dcc565f4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -22,7 +22,7 @@ env = DJANGO_DOT_ENV_FILE=.env.test [mypy] -python_version = 3.10 +python_version = 3.12 check_untyped_defs = True ignore_missing_imports = True warn_unused_ignores = True