diff --git a/.github/workflows/ci-properties.json b/.github/workflows/ci-properties.json new file mode 100644 index 00000000..a9e55f57 --- /dev/null +++ b/.github/workflows/ci-properties.json @@ -0,0 +1,26 @@ +{ + "app_name": "rootstock-transaction-service", + "aws_region": "us-east-2", + "k8s_cluster_name": "k8-mainnet", + "registry": "docker.io", + "image_name": "sovryn/rootstock-transaction-service", + "prod_branch": "rootstock", + "dev_branch": "rootstock-stg", + "dockerfile_path": "./docker/web", + "APP_ENV_VARS": { + "PYTHONPATH": "/app/", + "C_FORCE_ROOT": "true", + "DEBUG": "0", + "ETH_L2_NETWORK": "1", + "ETH_INTERNAL_NO_FILTER": "1", + "DJANGO_SU_NAME": "sovryn", + "DJANGO_SU_EMAIL": "sovryn@sovryn.app", + "DJANGO_SETTINGS_MODULE": "config.settings.production" + }, + "DEV_ENV_VARS": { + "INGRESS_HOSTNAME": "safe-tx.test.sovryn.app" + }, + "PROD_ENV_VARS": { + "INGRESS_HOSTNAME": "safe-tx.sovryn.app" + } +} diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml deleted file mode 100644 index e38a8f6d..00000000 --- a/.github/workflows/cla.yml +++ /dev/null @@ -1,36 +0,0 @@ -name: "CLA Assistant" -on: - issue_comment: - types: [ created ] - pull_request_target: - types: [ opened,closed,synchronize ] - -jobs: - CLAssistant: - runs-on: ubuntu-latest - steps: - - name: "CLA Assistant" - if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') || github.event_name == 'pull_request_target' - # Beta Release - uses: cla-assistant/github-action@v2.4.0 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # the below token should have repo scope and must be manually added by you in the repository's secret - PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} - with: - path-to-signatures: 'signatures/version1/cla.json' - path-to-document: 'https://safe.global/cla' - # branch should not be protected - branch: 'cla-signatures' - allowlist: falvaradorodriguez,hectorgomezv,moisses89,luarx,fmrsabino,luarx,rmeissner,Uxio0,*bot # may need to update this expression if we add new bots - - #below are the optional inputs - If the optional inputs are not given, then default values will be taken - #remote-organization-name: enter the remote organization name where the signatures should be stored (Default is storing the signatures in the same repository) - #remote-repository-name: enter the remote repository name where the signatures should be stored (Default is storing the signatures in the same repository) - #create-file-commit-message: 'For example: Creating file for storing CLA Signatures' - #signed-commit-message: 'For example: $contributorName has signed the CLA in #$pullRequestNo' - #custom-notsigned-prcomment: 'pull request comment with Introductory message to ask new contributors to sign' - #custom-pr-sign-comment: 'The signature to be committed in order to sign the CLA' - #custom-allsigned-prcomment: 'pull request comment when all contributors has signed, defaults to **CLA Assistant Lite bot** All Contributors have signed the CLA.' - #lock-pullrequest-aftermerge: false - if you don't want this bot to automatically lock the pull request after merging (default - true) - #use-dco-flag: true - If you are using DCO instead of CLA diff --git a/.github/workflows/deployment.yml b/.github/workflows/deployment.yml new file mode 100644 index 00000000..a2ce9299 --- /dev/null +++ b/.github/workflows/deployment.yml @@ -0,0 +1,39 @@ +name: CI Template + +on: + push: + branches: [ rootstock, rootstock-stg ] + workflow_dispatch: + +jobs: + call-workflow-init: + uses: DistributedCollective/.github/.github/workflows/init.yml@master + with: + ref: ${{ github.ref }} + base_ref: ${{ github.base_ref }} + call-workflow-docker-build: + uses: DistributedCollective/.github/.github/workflows/docker.yml@master + needs: [call-workflow-init] + with: + KUBE_NAMESPACE: ${{ needs.call-workflow-init.outputs.KUBE_NAMESPACE }} + IMAGE_NAME: ${{ needs.call-workflow-init.outputs.image_name }} + event_name: ${{ github.event_name }} + registry: ${{ needs.call-workflow-init.outputs.registry }} + dockerfile_path: ${{ needs.call-workflow-init.outputs.dockerfile_path }} + secrets: + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + call-workflow-k8s-deploy: + uses: DistributedCollective/.github/.github/workflows/deploy-k8s.yml@master + needs: [call-workflow-init, call-workflow-docker-build] + if: github.event_name != 'pull_request' + with: + KUBE_NAMESPACE: ${{ needs.call-workflow-init.outputs.KUBE_NAMESPACE }} + IMAGE_NAME: ${{ needs.call-workflow-init.outputs.image_name }} + ci_env: ${{ needs.call-workflow-init.outputs.ci_env }} + aws_region: ${{ needs.call-workflow-init.outputs.aws_region }} + k8s_cluster_name: ${{ needs.call-workflow-init.outputs.k8s_cluster_name }} + app_name: ${{ needs.call-workflow-init.outputs.app_name }} + secrets: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml deleted file mode 100644 index 325db662..00000000 --- a/.github/workflows/python.yml +++ /dev/null @@ -1,179 +0,0 @@ -name: Python CI -on: - push: - branches: - - main - - develop - pull_request: - release: - types: [ released ] - -jobs: - linting: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.12"] - - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: pip install pre-commit - - name: Run pre-commit - run: pre-commit run --all-files - - test-app: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.12"] - services: - redis: - image: redis - options: >- - --health-cmd "redis-cli ping" - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 6379:6379 - postgres: - image: postgres:14 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5432:5432 - rabbitmq: - image: rabbitmq:alpine - options: >- - --health-cmd "rabbitmqctl await_startup" - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - "5672:5672" - steps: - - name: Setup and run ganache - run: | - docker run --detach --publish 8545:8545 --network-alias ganache -e DOCKER=true trufflesuite/ganache:latest --defaultBalanceEther 10000 --gasLimit 10000000 -a 30 --chain.chainId 1337 --chain.networkId 1337 -d - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: 'requirements*.txt' - - name: Install dependencies - run: | - pip install wheel setuptools - pip install -r requirements-test.txt - env: - PIP_USE_MIRRORS: true - - name: Run tests and coverage - run: | - python manage.py check - python manage.py makemigrations --check --dry-run - coverage run --source=$SOURCE_FOLDER -m pytest -rxXs --reruns 3 - env: - SOURCE_FOLDER: safe_transaction_service - CELERY_BROKER_URL: redis://localhost:6379/0 - COINMARKETCAP_API_TOKEN: ${{ secrets.COINMARKETCAP_API_TOKEN }} - DATABASE_URL: psql://postgres:postgres@localhost/postgres - DJANGO_SETTINGS_MODULE: config.settings.test - ETHEREUM_MAINNET_NODE: ${{ secrets.ETHEREUM_MAINNET_NODE }} - ETHEREUM_NODE_URL: http://localhost:8545 - ETHEREUM_TRACING_NODE_URL: http://localhost:8545 - ETHEREUM_4337_BUNDLER_URL: ${{ secrets.ETHEREUM_4337_BUNDLER_URL }} - ETH_HASH_BACKEND: pysha3 - REDIS_URL: redis://localhost:6379/0 - EVENTS_QUEUE_URL: amqp://guest:guest@localhost:5672/ - - name: Coveralls - uses: coverallsapp/github-action@v2 - docker-deploy: - runs-on: ubuntu-latest - needs: - - linting - - test-app - if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop' || (github.event_name == 'release' && github.event.action == 'released') - steps: - - uses: actions/checkout@v4 - - uses: docker/setup-qemu-action@v3 - with: - platforms: arm64 - - uses: docker/setup-buildx-action@v3 - - name: Dockerhub login - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKER_USER }} - password: ${{ secrets.DOCKER_PASSWORD }} - - name: Deploy Master - if: github.ref == 'refs/heads/main' - uses: docker/build-push-action@v5 - with: - context: . - file: docker/web/Dockerfile - push: true - tags: safeglobal/safe-transaction-service:staging - platforms: | - linux/amd64 - linux/arm64 - cache-from: type=gha - cache-to: type=gha,mode=max - - name: Deploy Develop - if: github.ref == 'refs/heads/develop' - uses: docker/build-push-action@v5 - with: - context: . - file: docker/web/Dockerfile - push: true - tags: safeglobal/safe-transaction-service:develop - platforms: | - linux/amd64 - linux/arm64 - cache-from: type=gha - cache-to: type=gha,mode=max - - name: Deploy Tag - if: (github.event_name == 'release' && github.event.action == 'released') - uses: docker/build-push-action@v5 - with: - context: . - file: docker/web/Dockerfile - push: true - tags: | - safeglobal/safe-transaction-service:${{ github.event.release.tag_name }} - safeglobal/safe-transaction-service:latest - platforms: | - linux/amd64 - linux/arm64 - cache-from: type=gha - cache-to: type=gha,mode=max - autodeploy: - runs-on: ubuntu-latest - needs: [docker-deploy] - if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop' - steps: - - uses: actions/checkout@v4 - - name: Deploy Staging - if: github.ref == 'refs/heads/main' - run: bash scripts/autodeploy.sh - env: - AUTODEPLOY_URL: ${{ secrets.AUTODEPLOY_URL }} - AUTODEPLOY_TOKEN: ${{ secrets.AUTODEPLOY_TOKEN }} - TARGET_ENV: "staging" - - name: Deploy Develop - if: github.ref == 'refs/heads/develop' - run: bash scripts/autodeploy.sh - env: - AUTODEPLOY_URL: ${{ secrets.AUTODEPLOY_URL }} - AUTODEPLOY_TOKEN: ${{ secrets.AUTODEPLOY_TOKEN }} - TARGET_ENV: "develop" diff --git a/config/settings/production.py b/config/settings/production.py index 361725b9..9bfce65a 100644 --- a/config/settings/production.py +++ b/config/settings/production.py @@ -64,6 +64,7 @@ # ------------------------------------------------------------------------------ # Django Admin URL regex. ADMIN_URL = env("DJANGO_ADMIN_URL", default="admin/") +SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTOCOL", "https") # Gunicorn # ------------------------------------------------------------------------------ diff --git a/deployment.yaml b/deployment.yaml new file mode 100644 index 00000000..9b73d730 --- /dev/null +++ b/deployment.yaml @@ -0,0 +1,153 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + annotations: + deployment.kubernetes.io/revision: '1' + labels: + app: rootstock-transaction-service + name: rootstock-transaction-service +spec: + replicas: 1 + selector: + matchLabels: + app: rootstock-transaction-service + strategy: + type: Recreate + template: + metadata: + labels: + app: rootstock-transaction-service + spec: + containers: + - image: sovryn/rootstock-transaction-service + imagePullPolicy: Always + envFrom: + - secretRef: + name: rootstock-transaction-service + - secretRef: + name: rootstock-transaction-service-secret + ports: + - containerPort: 5555 + env: + - name: WORKER_QUEUES + value: "default,indexing" + - name: RUN_MIGRATIONS + value: "1" + args: ["docker/web/celery/flower/run.sh"] + name: flower + resources: + requests: + memory: "3Gi" + cpu: "200m" + - image: sovryn/rootstock-transaction-service + imagePullPolicy: Always + envFrom: + - secretRef: + name: rootstock-transaction-service + - secretRef: + name: rootstock-transaction-service-secret + ports: + - containerPort: 5555 + env: + - name: WORKER_QUEUES + value: "default,indexing" + - name: RUN_MIGRATIONS + value: "1" + args: ["docker/web/celery/scheduler/run.sh"] + name: scheduler + resources: {} + - image: sovryn/rootstock-transaction-service + imagePullPolicy: Always + envFrom: + - secretRef: + name: rootstock-transaction-service + - secretRef: + name: rootstock-transaction-service-secret + env: + - name: WORKER_QUEUES + value: "contracts,tokens" + - name: RUN_MIGRATIONS + value: "1" + args: ["docker/web/celery/worker/run.sh"] + name: contracts-tokens-worker + resources: {} + - image: sovryn/rootstock-transaction-service + imagePullPolicy: Always + envFrom: + - secretRef: + name: rootstock-transaction-service + - secretRef: + name: rootstock-transaction-service-secret + env: + - name: WORKER_QUEUES + value: "notifications,webhooks" + - name: RUN_MIGRATIONS + value: "1" + args: ["docker/web/celery/worker/run.sh"] + name: notifications-webhooks-worker + resources: {} + - image: sovryn/rootstock-transaction-service + imagePullPolicy: Always + envFrom: + - secretRef: + name: rootstock-transaction-service + - secretRef: + name: rootstock-transaction-service-secret + env: + - name: WORKER_QUEUES + value: "default,indexing" + - name: RUN_MIGRATIONS + value: "1" + args: ["docker/web/celery/worker/run.sh"] + name: indexer-worker + resources: {} + - image: sovryn/rootstock-transaction-service + imagePullPolicy: Always + envFrom: + - secretRef: + name: rootstock-transaction-service + - secretRef: + name: rootstock-transaction-service-secret + name: web + resources: {} + volumeMounts: + - mountPath: /nginx + name: nginx-shared + args: ["docker/web/run_web.sh"] + - image: nginx:1.20-alpine + name: nginx + envFrom: + - secretRef: + name: rootstock-transaction-service + - secretRef: + name: rootstock-transaction-service-secret + ports: + - containerPort: 8000 + resources: {} + volumeMounts: + - name: nginxconf + mountPath: /etc/nginx/nginx.conf + subPath: nginx.conf + - mountPath: /nginx + name: nginx-shared + restartPolicy: Always + volumes: + - name: nginx-shared + emptyDir: {} + - name: nginxconf + secret: + secretName: rootstock-transaction-service-nginx + imagePullSecrets: + - name: regcred +--- +apiVersion: v1 +kind: Service +metadata: + name: rootstock-transaction-service +spec: + selector: + app: rootstock-transaction-service + ports: + - port: 443 + targetPort: 8000 + type: NodePort diff --git a/docker/web/celery/worker/run.sh b/docker/web/celery/worker/run.sh index 2df9e2cb..60e2cf0b 100755 --- a/docker/web/celery/worker/run.sh +++ b/docker/web/celery/worker/run.sh @@ -27,11 +27,16 @@ fi echo "==> $(date +%H:%M:%S) ==> Check RPC connected matches previously used RPC... " python manage.py check_chainid_matches -echo "==> $(date +%H:%M:%S) ==> Running Celery worker for queues $WORKER_QUEUES with concurrency $TASK_CONCURRENCY <==" -exec celery --no-color -A config.celery_app worker \ - --pool=gevent \ - --loglevel $log_level \ - --concurrency="${TASK_CONCURRENCY}" \ - --without-heartbeat \ - --without-gossip \ - --without-mingle -E -Q "$WORKER_QUEUES" +# Run Celery as root +export C_FORCE_ROOT=true + +echo "==> $(date +%H:%M:%S) ==> Running Celery worker with a max_memory_per_child of ${MAX_MEMORY_PER_CHILD} <==" +# https://github.com/sumitasok/celery/issues/5#issuecomment-781717855 +exec celery -C -A config.celery_app worker \ + --loglevel $log_level --pool=gevent \ + -E \ + --concurrency=${TASK_CONCURRENCY} \ + --max-memory-per-child=${MAX_MEMORY_PER_CHILD} \ + --max-tasks-per-child=${MAX_TASKS_PER_CHILD} \ + --without-heartbeat --without-gossip \ + --without-mingle -Q "$WORKER_QUEUES" diff --git a/ingress.yaml b/ingress.yaml new file mode 100644 index 00000000..4add829e --- /dev/null +++ b/ingress.yaml @@ -0,0 +1,17 @@ +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: rootstock-transaction-service-ingress +spec: + ingressClassName: nginx + rules: + - host: ingress.hostname + http: + paths: + - backend: + service: + name: rootstock-transaction-service + port: + number: 443 + path: / + pathType: Prefix diff --git a/safe_transaction_service/tokens/clients/coingecko_client.py b/safe_transaction_service/tokens/clients/coingecko_client.py index 09d72f78..7c26ec42 100644 --- a/safe_transaction_service/tokens/clients/coingecko_client.py +++ b/safe_transaction_service/tokens/clients/coingecko_client.py @@ -78,3 +78,39 @@ def get_token_logo_url(self, token_address: ChecksumAddress) -> Optional[str]: token_info = self.get_token_info(token_address) if token_info: return token_info["image"]["large"] + + def get_ada_usd_price(self) -> float: + return self.get_price("cardano") + + def get_avax_usd_price(self) -> float: + return self.get_price("avalanche-2") + + def get_aoa_usd_price(self) -> float: + return self.get_price("aurora") + + def get_bnb_usd_price(self) -> float: + return self.get_price("binancecoin") + + def get_ewt_usd_price(self) -> float: + return self.get_price("energy-web-token") + + def get_matic_usd_price(self) -> float: + return self.get_price("matic-network") + + def get_gather_usd_price(self) -> float: + return self.get_price("gather") + + def get_fuse_usd_price(self) -> float: + return self.get_price("fuse-network-token") + + def get_kcs_usd_price(self) -> float: + return self.get_price("kucoin-shares") + + def get_metis_usd_price(self) -> float: + return self.get_price("metis-token") + + def get_btc_usd_price(self) -> float: + return self.get_price("bitcoin") + + def get_mtr_usd_price(self) -> float: + return self.get_price("meter-stable") diff --git a/safe_transaction_service/tokens/services/price_service.py b/safe_transaction_service/tokens/services/price_service.py new file mode 100644 index 00000000..26efe033 --- /dev/null +++ b/safe_transaction_service/tokens/services/price_service.py @@ -0,0 +1,500 @@ +import operator +from dataclasses import dataclass +from datetime import datetime +from enum import Enum +from functools import cached_property +from logging import getLogger +from typing import Iterator, List, Optional, Sequence, Tuple + +from django.utils import timezone + +from cache_memoize import cache_memoize +from cachetools import TTLCache, cachedmethod +from eth_typing import ChecksumAddress +from redis import Redis + +from gnosis.eth import EthereumClient, EthereumClientProvider +from gnosis.eth.constants import NULL_ADDRESS +from gnosis.eth.ethereum_client import EthereumNetwork +from gnosis.eth.oracles import ( + AaveOracle, + BalancerOracle, + ComposedPriceOracle, + CowswapOracle, + CurveOracle, + EnzymeOracle, + KyberOracle, + MooniswapOracle, + OracleException, + PoolTogetherOracle, + PriceOracle, + PricePoolOracle, + SuperfluidOracle, + SushiswapOracle, + UnderlyingToken, + UniswapV2Oracle, + UniswapV3Oracle, + YearnOracle, +) + +from safe_transaction_service.utils.redis import get_redis + +from ..clients import CannotGetPrice, CoingeckoClient, KrakenClient, KucoinClient +from ..tasks import EthValueWithTimestamp, calculate_token_eth_price_task + +logger = getLogger(__name__) + + +class FiatCode(Enum): + USD = 1 + EUR = 2 + + +@dataclass +class FiatPriceWithTimestamp: + fiat_price: float + fiat_code: FiatCode + timestamp: datetime + + +class PriceServiceProvider: + def __new__(cls): + if not hasattr(cls, "instance"): + cls.instance = PriceService(EthereumClientProvider(), get_redis()) + return cls.instance + + @classmethod + def del_singleton(cls): + if hasattr(cls, "instance"): + del cls.instance + + +class PriceService: + def __init__(self, ethereum_client: EthereumClient, redis: Redis): + self.ethereum_client = ethereum_client + self.ethereum_network = self.ethereum_client.get_network() + self.redis = redis + self.coingecko_client = CoingeckoClient(self.ethereum_network) + self.kraken_client = KrakenClient() + self.kucoin_client = KucoinClient() + self.cache_ether_usd_price = TTLCache( + maxsize=2048, ttl=60 * 30 + ) # 30 minutes of caching + self.cache_native_coin_usd_price = TTLCache( + maxsize=2048, ttl=60 * 30 + ) # 30 minutes of caching + self.cache_token_eth_value = TTLCache( + maxsize=2048, ttl=60 * 30 + ) # 30 minutes of caching + self.cache_token_usd_value = TTLCache( + maxsize=2048, ttl=60 * 30 + ) # 30 minutes of caching + self.cache_underlying_token = TTLCache( + maxsize=2048, ttl=60 * 30 + ) # 30 minutes of caching + self.cache_token_info = {} + + @cached_property + def enabled_price_oracles(self) -> Tuple[PriceOracle]: + oracles = tuple( + Oracle(self.ethereum_client) + for Oracle in ( + UniswapV3Oracle, + CowswapOracle, + UniswapV2Oracle, + SushiswapOracle, + KyberOracle, + ) + if Oracle.is_available(self.ethereum_client) + ) + if oracles: + if AaveOracle.is_available(self.ethereum_client): + oracles += (AaveOracle(self.ethereum_client, oracles[0]),) + if SuperfluidOracle.is_available(self.ethereum_client): + oracles += (SuperfluidOracle(self.ethereum_client, oracles[0]),) + + return oracles + + @cached_property + def enabled_price_pool_oracles(self) -> Tuple[PricePoolOracle]: + if not self.enabled_price_oracles: + return tuple() + oracles = tuple( + Oracle(self.ethereum_client, self.enabled_price_oracles[0]) + for Oracle in ( + BalancerOracle, + MooniswapOracle, + ) + ) + + if UniswapV2Oracle.is_available(self.ethereum_client): + # Uses a different constructor that others pool oracles + oracles = (UniswapV2Oracle(self.ethereum_client),) + oracles + return oracles + + @cached_property + def enabled_composed_price_oracles(self) -> Tuple[ComposedPriceOracle]: + return tuple( + Oracle(self.ethereum_client) + for Oracle in (CurveOracle, YearnOracle, PoolTogetherOracle, EnzymeOracle) + if Oracle.is_available(self.ethereum_client) + ) + + def get_avalanche_usd_price(self) -> float: + try: + return self.kraken_client.get_avax_usd_price() + except CannotGetPrice: + return self.coingecko_client.get_avax_usd_price() + + def get_aurora_usd_price(self) -> float: + try: + return self.kucoin_client.get_aurora_usd_price() + except CannotGetPrice: + return self.coingecko_client.get_aoa_usd_price() + + def get_cardano_usd_price(self) -> float: + try: + return self.kraken_client.get_ada_usd_price() + except CannotGetPrice: + return self.coingecko_client.get_ada_usd_price() + + def get_algorand_usd_price(self) -> float: + return self.kraken_client.get_algo_usd_price() + + def get_binance_usd_price(self) -> float: + try: + return self.kucoin_client.get_bnb_usd_price() + except CannotGetPrice: + return self.coingecko_client.get_bnb_usd_price() + + def get_ewt_usd_price(self) -> float: + try: + return self.kraken_client.get_ewt_usd_price() + except CannotGetPrice: + try: + return self.kucoin_client.get_ewt_usd_price() + except CannotGetPrice: + return self.coingecko_client.get_ewt_usd_price() + + def get_matic_usd_price(self) -> float: + try: + return self.kraken_client.get_matic_usd_price() + except CannotGetPrice: + try: + return self.kucoin_client.get_matic_usd_price() + except CannotGetPrice: + return self.coingecko_client.get_matic_usd_price() + + def get_cronos_usd_price(self) -> float: + return self.kucoin_client.get_cro_usd_price() + + def get_xdc_usd_price(self) -> float: + return self.kucoin_client.get_xdc_usd_price() + + def get_ftm_usd_price(self) -> float: + return self.kucoin_client.get_ftm_usd_price() + + def get_kcs_usd_price(self) -> float: + try: + return self.kucoin_client.get_kcs_usd_price() + except CannotGetPrice: + return self.coingecko_client.get_kcs_usd_price() + + def get_btc_usd_price(self) -> float: + return self.coingecko_client.get_btc_usd_price() + + def get_btc_usd_price(self) -> float: + return self.coingecko_client.get_btc_usd_price() + + def get_mtr_usd_price(self) -> float: + return self.coingecko_client.get_mtr_usd_price() + + @cachedmethod(cache=operator.attrgetter("cache_ether_usd_price")) + @cache_memoize(60 * 30, prefix="balances-get_ether_usd_price") # 30 minutes + def get_ether_usd_price(self) -> float: + """ + :return: USD Price for Ether + """ + try: + return self.kraken_client.get_ether_usd_price() + except CannotGetPrice: + return self.kucoin_client.get_ether_usd_price() + + @cachedmethod(cache=operator.attrgetter("cache_native_coin_usd_price")) + @cache_memoize(60 * 30, prefix="balances-get_native_coin_usd_price") # 30 minutes + def get_native_coin_usd_price(self) -> float: + """ + Get USD price for native coin. It depends on the ethereum network: + - On mainnet, use ETH/USD + - On xDAI, use DAI/USD. + - On EWT/VOLTA, use EWT/USD + - ... + + :return: USD price for Ether + """ + if self.ethereum_network == EthereumNetwork.GNOSIS: + try: + return self.kraken_client.get_dai_usd_price() + except CannotGetPrice: + return 1 # DAI/USD should be close to 1 + elif self.ethereum_network in ( + EthereumNetwork.ENERGY_WEB_CHAIN, + EthereumNetwork.ENERGY_WEB_VOLTA_TESTNET, + ): + return self.get_ewt_usd_price() + elif self.ethereum_network in (EthereumNetwork.POLYGON, EthereumNetwork.MUMBAI): + return self.get_matic_usd_price() + elif self.ethereum_network == EthereumNetwork.BINANCE_SMART_CHAIN_MAINNET: + return self.get_binance_usd_price() + elif self.ethereum_network in ( + EthereumNetwork.GATHER_DEVNET_NETWORK, + EthereumNetwork.GATHER_TESTNET_NETWORK, + EthereumNetwork.GATHER_MAINNET_NETWORK, + ): + return self.coingecko_client.get_gather_usd_price() + elif self.ethereum_network == EthereumNetwork.AVALANCHE_C_CHAIN: + return self.get_avalanche_usd_price() + elif self.ethereum_network in ( + EthereumNetwork.MILKOMEDA_C1_TESTNET, + EthereumNetwork.MILKOMEDA_C1_MAINNET, + ): + return self.get_cardano_usd_price() + elif self.ethereum_network in ( + EthereumNetwork.AURORA_MAINNET, + EthereumNetwork.ARBITRUM_RINKEBY, + ): + return self.get_aurora_usd_price() + elif self.ethereum_network in ( + EthereumNetwork.CRONOS_TESTNET, + EthereumNetwork.CRONOS_MAINNET_BETA, + ): + return self.get_cronos_usd_price() + elif self.ethereum_network in ( + EthereumNetwork.FUSE_MAINNET, + EthereumNetwork.FUSE_SPARKNET, + ): + return self.coingecko_client.get_fuse_usd_price() + elif self.ethereum_network in ( + EthereumNetwork.KCC_MAINNET, + EthereumNetwork.KCC_TESTNET, + ): + return self.get_kcs_usd_price() + elif self.ethereum_network in ( + EthereumNetwork.METIS_ANDROMEDA_MAINNET, + EthereumNetwork.METIS_GOERLI_TESTNET, + EthereumNetwork.METIS_STARDUST_TESTNET, + ): + return self.coingecko_client.get_metis_usd_price() + elif self.ethereum_network in ( + EthereumNetwork.MILKOMEDA_A1_TESTNET, + EthereumNetwork.MILKOMEDA_A1_MAINNET, + ): + return self.get_algorand_usd_price() + elif self.ethereum_network in ( + EthereumNetwork.CELO_MAINNET, + EthereumNetwork.CELO_ALFAJORES_TESTNET, + EthereumNetwork.CELO_BAKLAVA_TESTNET, + ): + return self.kucoin_client.get_celo_usd_price() + elif self.ethereum_network in ( + EthereumNetwork.XINFIN_XDC_NETWORK, + EthereumNetwork.XDC_APOTHEM_NETWORK, + ): + return self.get_xdc_usd_price() + elif self.ethereum_network in ( + EthereumNetwork.METER_MAINNET, + EthereumNetwork.METER_TESTNET, + ): + return self.coingecko_client.get_mtr_usd_price() + elif self.ethereum_network in ( + EthereumNetwork.FANTOM_OPERA, + EthereumNetwork.FANTOM_TESTNET, + ): + return self.get_ftm_usd_price() + elif self.ethereum_network in ( + EthereumNetwork.RSK_MAINNET, + EthereumNetwork.RSK_TESTNET, + ): + return self.get_btc_usd_price() + else: + return self.get_ether_usd_price() + + @cachedmethod(cache=operator.attrgetter("cache_token_eth_value")) + @cache_memoize(60 * 30, prefix="balances-get_token_eth_value") # 30 minutes + def get_token_eth_value(self, token_address: ChecksumAddress) -> float: + """ + Uses multiple decentralized and centralized oracles to get token prices + + :param token_address: + :return: Current ether value for a given `token_address` + """ + if token_address in ( + "0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE", # Used by some oracles + NULL_ADDRESS, + ): # Ether + return 1.0 + + for oracle in self.enabled_price_oracles: + try: + eth_value = oracle.get_price(token_address) + logger.info( + "Retrieved eth-value=%.4f for token-address=%s from %s", + eth_value, + token_address, + oracle.__class__.__name__, + ) + return eth_value + except OracleException: + logger.debug( + "Cannot get eth value for token-address=%s from %s", + token_address, + oracle.__class__.__name__, + ) + + # Try pool tokens + for oracle in self.enabled_price_pool_oracles: + try: + eth_value = oracle.get_pool_token_price(token_address) + logger.info( + "Retrieved eth-value=%.4f for token-address=%s from %s", + eth_value, + token_address, + oracle.__class__.__name__, + ) + return eth_value + except OracleException: + logger.debug( + "Cannot get eth value for token-address=%s from %s", + token_address, + oracle.__class__.__name__, + ) + + logger.warning("Cannot find eth value for token-address=%s", token_address) + return 0.0 + + @cachedmethod(cache=operator.attrgetter("cache_token_usd_value")) + @cache_memoize(60 * 30, prefix="balances-get_token_usd_price") # 30 minutes + def get_token_usd_price(self, token_address: ChecksumAddress) -> float: + """ + :param token_address: + :return: usd value for a given `token_address` using Coingecko + """ + if self.coingecko_client.supports_network(self.ethereum_network): + try: + return self.coingecko_client.get_token_price(token_address) + except CannotGetPrice: + pass + return 0.0 + + @cachedmethod(cache=operator.attrgetter("cache_underlying_token")) + @cache_memoize(60 * 30, prefix="balances-get_underlying_tokens") # 30 minutes + def get_underlying_tokens( + self, token_address: ChecksumAddress + ) -> Optional[List[UnderlyingToken]]: + """ + :param token_address: + :return: usd value for a given `token_address` using Curve, if not use Coingecko as last resource + """ + for oracle in self.enabled_composed_price_oracles: + try: + underlying_tokens = oracle.get_underlying_tokens(token_address) + logger.info( + "Retrieved underlying tokens %s for token-address=%s from %s", + underlying_tokens, + token_address, + oracle.__class__.__name__, + ) + return underlying_tokens + except OracleException: + logger.debug( + "Cannot get an underlying token for token-address=%s from %s", + token_address, + oracle.__class__.__name__, + ) + + def get_token_cached_eth_values( + self, token_addresses: Sequence[ChecksumAddress] + ) -> Iterator[EthValueWithTimestamp]: + """ + Get token eth prices with timestamp of calculation if ready on cache. If not, schedule tasks to do + the calculation so next time is available on cache and return `0.` and current datetime + + :param token_addresses: + :return: eth prices with timestamp if ready on cache, `0.` and None otherwise + """ + cache_keys = [ + f"price-service:{token_address}:eth-price" + for token_address in token_addresses + ] + results = self.redis.mget(cache_keys) # eth_value:epoch_timestamp + for token_address, cache_key, result in zip( + token_addresses, cache_keys, results + ): + if not token_address: # Ether, this will not be used + yield EthValueWithTimestamp( + 1.0, timezone.now() + ) # Even if not used, Ether value in ether is 1 :) + elif result: + yield EthValueWithTimestamp.from_string(result.decode()) + else: + task_result = calculate_token_eth_price_task.delay( + token_address, cache_key + ) + if task_result.ready(): + yield task_result.get() + else: + yield EthValueWithTimestamp(0.0, timezone.now()) + + def get_token_cached_usd_values( + self, token_addresses: Sequence[ChecksumAddress] + ) -> Iterator[FiatPriceWithTimestamp]: + """ + Get token usd prices with timestamp of calculation if ready on cache. + + :param token_addresses: + :return: eth prices with timestamp if ready on cache, `0.` and None otherwise + """ + try: + native_coin_usd_price = self.get_native_coin_usd_price() + except CannotGetPrice: + logger.warning("Cannot get Ether USD price", exc_info=True) + native_coin_usd_price = 0 + + for token_eth_values_with_timestamp in self.get_token_cached_eth_values( + token_addresses + ): + yield FiatPriceWithTimestamp( + native_coin_usd_price * token_eth_values_with_timestamp.eth_value, + FiatCode.USD, + token_eth_values_with_timestamp.timestamp, + ) + + def get_token_eth_price_from_oracles(self, token_address: ChecksumAddress) -> float: + """ + :param token_address + :return: Token/Ether price from oracles + """ + return ( + self.get_token_eth_value(token_address) + or self.get_token_usd_price(token_address) + / self.get_native_coin_usd_price() + ) + + def get_token_eth_price_from_composed_oracles( + self, token_address: ChecksumAddress + ) -> float: + """ + :param token_address + :return: Token/Ether price from composed oracles + """ + eth_price = 0 + if underlying_tokens := self.get_underlying_tokens(token_address): + for underlying_token in underlying_tokens: + # Find underlying token price and multiply by quantity + address = underlying_token.address + eth_price += ( + self.get_token_eth_price_from_oracles(address) + * underlying_token.quantity + ) + + return eth_price