diff --git a/.circleci/config.yml b/.circleci/config.yml index f8ddaf08b..1bfa0730e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -24,14 +24,26 @@ jobs: test: docker: - - image: python:2.7 + - image: python:3.7 working_directory: ~/mozilla/telemetry-airflow steps: - checkout - run: pip install tox - run: python -m py_compile dags/*.py - run: find . -name *.pyc -delete - - run: tox -e py27 + - run: tox -e py37 + + verify-requirements: + docker: + - image: python:3.7 + steps: + - checkout + - run: + name: Verify that requirements.txt contains the right dependencies for this python version + command: | + pip install pip-tools + pip-compile --quiet + git diff --exit-code requirements.txt test-environment: machine: diff --git a/Dockerfile b/Dockerfile index 7494116fc..7090fb2c7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,8 @@ -FROM python:2.7-slim +FROM python:3.7-slim MAINTAINER Jannis Leidel +# Due to AIRFLOW-6854, Python 3.7 is chosen as the base python version. + # add a non-privileged user for installing and running the application RUN mkdir /app && \ chown 10001:10001 /app && \ diff --git a/Dockerfile.dev b/Dockerfile.dev index 89f32d1dd..79f9ac438 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -1,4 +1,4 @@ -FROM python:2.7-slim +FROM python:3.7-slim MAINTAINER Jannis Leidel # add a non-privileged user for installing and running the application diff --git a/README.md b/README.md index 047974bd7..f6f8c010e 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ +# Telemetry-Airflow + [![CircleCi](https://circleci.com/gh/mozilla/telemetry-airflow.svg?style=shield&circle-token=62f4c1be98e5c9f36bd667edb7545fa736eed3ae)](https://circleci.com/gh/mozilla/telemetry-airflow) -# Telemetry-Airflow Airflow is a platform to programmatically author, schedule and monitor workflows. When workflows are defined as code, they become more maintainable, versionable, @@ -13,23 +14,22 @@ surgeries on DAGs a snap. The rich user interface makes it easy to visualize pipelines running in production, monitor progress, and troubleshoot issues when needed. -### Prerequisites +## Prerequisites This app is built and deployed with [docker](https://docs.docker.com/) and [docker-compose](https://docs.docker.com/compose/). -### Dependencies +### Updating Python dependencies -Most Airflow jobs are thin wrappers that spin up an EMR cluster for running -the job. Be aware that the configuration of the created EMR clusters depends -on finding scripts in an S3 location configured by the `SPARK_BUCKET` variable. -Those scripts are maintained in -[emr-bootstrap-spark](https://github.com/mozilla/emr-bootstrap-spark/) -and are deployed independently of this repository. -Changes in behavior of Airflow jobs not explained by changes in the source of the -Spark jobs or by changes in this repository -could be due to changes in the bootstrap scripts. +Add new Python dependencies into `requirements.in`. Run the following commands with the same Python +version specified by the Dockerfile. + +```bash +# As of time of writing, python3.7 +pip install pip-tools +pip-compile +``` ### Build Container @@ -42,6 +42,7 @@ make build ### Export Credentials For now, DAGs that use the Databricks operator won't parse until the following environment variables are set (see issue #501): + ``` AWS_SECRET_ACCESS_KEY AWS_ACCESS_KEY_ID @@ -55,57 +56,15 @@ Airflow database migration is no longer a separate step for dev but is run by th ## Testing A single task, e.g. `spark`, of an Airflow dag, e.g. `example`, can be run with an execution date, e.g. `2018-01-01`, in the `dev` environment with: + ```bash -export DEV_USERNAME=... -export AWS_SECRET_ACCESS_KEY=... -export AWS_ACCESS_KEY_ID=... make run COMMAND="test example spark 20180101" ``` -The `DEV_USERNAME` is a short string used to identify your EMR instances. -This should be set to something like your IRC or Slack handle. - -The container will run the desired task to completion (or failure). -Note that if the container is stopped during the execution of a task, -the task will be aborted. In the example's case, the Spark job will be -terminated. - -The logs of the task can be inspected in real-time with: ```bash docker logs -f telemetryairflow_scheduler_1 ``` -You can see task logs and see cluster status on -[the EMR console](https://us-west-2.console.aws.amazon.com/elasticmapreduce/home?region=us-west-2) - -By default, the results will end up in the `telemetry-test-bucket` in S3. -If your desired task depends on other views, it will expect to be able to find those results -in `telemetry-test-bucket` too. It's your responsibility to run the tasks in correct -order of their dependencies. - -**CAVEAT**: When running the `make run` multiple times it can spin -up multiple versions of the `web` container. It can also fail if you've never -run `make up` to initialize the database. An alternative form of the above is to -launch the containers and shell into the `web` container to run the `airflow -test` command. - -In one terminal launch the docker containers: -```bash -make up -``` - -Note: initializing the web container will run the airflow initdb/upgradedb - -In another terminal shell into the `web` container, making sure to also supply -the environment variables, then run the `airflow test` command: -```bash -export DEV_USERNAME=... -export AWS_ACCESS_KEY_ID=... -export AWS_SECRET_ACCESS_KEY=... -docker exec -ti -e DEV_USERNAME -e AWS_SECRET_ACCESS_KEY -e AWS_ACCESS_KEY_ID telemetry-airflow_web_1 /bin/bash -airflow test example spark 20180101 -``` - ### Adding dummy credentials Tasks often require credentials to access external credentials. For example, one may choose to store @@ -125,6 +84,7 @@ click the docker icon in the menu bar, click on preferences and change the available memory to 4GB. To deploy the Airflow container on the docker engine, with its required dependencies, run: + ```bash make up ``` @@ -136,7 +96,6 @@ All DAGs are paused by default for local instances and our staging instance of A In order to submit a DAG via the UI, you'll need to toggle the DAG from "Off" to "On". You'll likely want to toggle the DAG back to "Off" as soon as your desired task starts running. - #### Workaround for permission issues Users on Linux distributions will encounter permission issues with `docker-compose`. @@ -168,24 +127,12 @@ Finally, run the testing command using docker-compose directly: docker-compose exec web airflow test example spark 20180101 ``` -### Testing Dev Changes - -*Note: This only works for `telemetry-batch-view` jobs* - -A dev changes can be run by simply changing the `DEPLOY_TAG` environment variable -to whichever upstream branch you've pushed your local changes to. - -Afterwards, you're going to need to:`make clean` and `make build` and `nohup make up &` - -From there, you can either set the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` in the -Dockerfile and run `make up` to get a local UI and run from there, or you can follow the -testing instructions above and use `make run`. - ### Testing GKE Jobs (including BigQuery-etl changes) For now, follow the steps outlined here to create a service account: https://bugzilla.mozilla.org/show_bug.cgi?id=1553559#c1. Enable that service account in Airflow with the following: + ``` make build && make up ./bin/add_gcp_creds $GOOGLE_APPLICATION_CREDENTIALS @@ -208,7 +155,8 @@ Set the project in the DAG entry to be configured based on development environme see the `ltv.py` job for an example of that. From there, run the following: -``` + +```bash make build && make up ./bin/add_gcp_creds $GOOGLE_APPLICATION_CREDENTIALS google_cloud_airflow_dataproc ``` @@ -248,7 +196,7 @@ variables: - `AIRFLOW_SMTP_HOST` -- The SMTP server to use to send emails e.g. `email-smtp.us-west-2.amazonaws.com` - `AIRFLOW_SMTP_USER` -- The SMTP user name -- `AIRFLOW_SMTP_PASSWORD` -- The SMTP password +- `AIRFLOW_SMTP_PASSWORD` -- The SMTP password - `AIRFLOW_SMTP_FROM` -- The email address to send emails from e.g. `telemetry-alerts@workflow.telemetry.mozilla.org` - `URL` -- The base URL of the website e.g. @@ -270,7 +218,9 @@ Also, please set Both values should be set by using the cryptography module's fernet tool that we've wrapped in a docker-compose call: - make secret +```bash +make secret +``` Run this for each key config variable, and **don't use the same for both!** diff --git a/airflow.cfg b/airflow.cfg index 173ee9c4c..3bd350cd8 100644 --- a/airflow.cfg +++ b/airflow.cfg @@ -3,9 +3,6 @@ default_timezone = utc log_filename_template = {{ ti.dag_id }}/{{ ti.task_id }}/{{ execution_date.strftime("%%Y-%%m-%%dT%%H:%%M:%%S") }}/{{ try_number }}.log -# The home folder for airflow, default is ~/airflow -airflow_home = $AIRFLOW_HOME - # The folder where your airflow pipelines live, most likely a # subfolder in a code repository dags_folder = $AIRFLOW_HOME/dags diff --git a/dags/operators/backport/gcp_container_operator_1_10_7.py b/dags/operators/backport/gcp_container_operator_1_10_7.py index 20ca9b0ac..cd06aaa4e 100644 --- a/dags/operators/backport/gcp_container_operator_1_10_7.py +++ b/dags/operators/backport/gcp_container_operator_1_10_7.py @@ -27,7 +27,7 @@ from airflow.contrib.hooks.gcp_container_hook import GKEClusterHook # Modified to import KubernetesPodOperator which imports 1.10.2 kube_client -from kubernetes_pod_operator_1_10_7 import KubernetesPodOperator +from .kubernetes_pod_operator_1_10_7 import KubernetesPodOperator from airflow.models import BaseOperator from airflow.utils.decorators import apply_defaults diff --git a/dags/operators/backport/kubernetes_pod_operator_1_10_7.py b/dags/operators/backport/kubernetes_pod_operator_1_10_7.py index b3eb6a07b..d0d17f8fd 100644 --- a/dags/operators/backport/kubernetes_pod_operator_1_10_7.py +++ b/dags/operators/backport/kubernetes_pod_operator_1_10_7.py @@ -24,7 +24,7 @@ from airflow.contrib.kubernetes import pod_generator, pod_launcher # import our own kube_client from 1.10.2. We also add pod name label to the pod. -from kube_client_1_10_2 import get_kube_client +from .kube_client_1_10_2 import get_kube_client from airflow.contrib.kubernetes.pod import Resources from airflow.utils.helpers import validate_key diff --git a/dags/operators/emr_spark_operator.py b/dags/operators/emr_spark_operator.py index fa344dc59..7a1c559ec 100644 --- a/dags/operators/emr_spark_operator.py +++ b/dags/operators/emr_spark_operator.py @@ -5,7 +5,7 @@ import boto3 from io import BytesIO from gzip import GzipFile -from urlparse import urlparse +from urllib.parse import urlparse import requests from airflow.models import BaseOperator from airflow.utils.decorators import apply_defaults diff --git a/dags/operators/gcp_container_operator.py b/dags/operators/gcp_container_operator.py index c35096c83..2300eba38 100644 --- a/dags/operators/gcp_container_operator.py +++ b/dags/operators/gcp_container_operator.py @@ -10,7 +10,7 @@ # We import upstream GKEPodOperator/KubernetesPodOperator from 1.10.7, modified to point to kube_client # from 1.10.2, because of some Xcom push breaking changes when using GKEPodOperator. -from backport.gcp_container_operator_1_10_7 import GKEPodOperator as UpstreamGKEPodOperator +from .backport.gcp_container_operator_1_10_7 import GKEPodOperator as UpstreamGKEPodOperator KUBE_CONFIG_ENV_VAR = "KUBECONFIG" GCLOUD_APP_CRED = "CLOUDSDK_AUTH_CREDENTIAL_FILE_OVERRIDE" diff --git a/dags/utils/mozetl.py b/dags/utils/mozetl.py index 17e20cb55..208f0b82d 100644 --- a/dags/utils/mozetl.py +++ b/dags/utils/mozetl.py @@ -23,7 +23,7 @@ def mozetl_envvar(command, options, dev_options={}, other={}): prefixed_options = { "MOZETL_{}_{}".format(command.upper(), key.upper().replace("-", "_")): value - for key, value in options.iteritems() + for key, value in options.items() } prefixed_options["MOZETL_COMMAND"] = command prefixed_options.update(other) diff --git a/requirements.in b/requirements.in new file mode 100644 index 000000000..67bda6e9e --- /dev/null +++ b/requirements.in @@ -0,0 +1,24 @@ +boto3 +kombu==4.6.3 # CeleryExecutor issues with 1.10.2 supposedly fixed in 1.10.5 airflow, but still observed issues on 1.10.7 +# removed hdfs +apache-airflow[celery,postgres,hive,jdbc,async,password,crypto,github_enterprise,datadog,statsd,s3,mysql,google_auth,gcp_api,kubernetes]==1.10.10 +mozlogging +retrying +newrelic +redis +hiredis +requests +jsonschema +Flask-OAuthlib +pytz +werkzeug==0.16.0 +# The next requirements are for kubernetes-client/python +urllib3>=1.24.2 # MIT +ipaddress>=1.0.17;python_version=="2.7" # PSF +websocket-client>=0.32.0,!=0.40.0,!=0.41.*,!=0.42.* # LGPLv2+ +# Pin to older version, newer version has issues +JPype1==0.7.1 +shelljob==0.5.6 +# Fix no inspection available issue +# https://github.com/apache/airflow/issues/8211 +SQLAlchemy==1.3.15 diff --git a/requirements.txt b/requirements.txt index 74802ad08..8d054784b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,23 +1,171 @@ -boto3 -kombu==4.6.3 # CeleryExecutor issues with 1.10.2 supposedly fixed in 1.10.5 airflow, but still observed issues on 1.10.7 -apache-airflow[celery,postgres,hive,hdfs,jdbc,async,password,crypto,github_enterprise,datadog,statsd,s3,mysql,google_auth,gcp_api,kubernetes]==1.10.7 -mozlogging -retrying -newrelic -redis -hiredis -requests -jsonschema -Flask-OAuthlib -pytz -werkzeug==0.16.0 -# The next requirements are for kubernetes-client/python -urllib3>=1.24.2 # MIT -ipaddress>=1.0.17;python_version=="2.7" # PSF -websocket-client>=0.32.0,!=0.40.0,!=0.41.*,!=0.42.* # LGPLv2+ -# Pin to older version, newer version has issues -JPype1==0.7.1 -shelljob==0.5.6 -# Fix no inspection available issue -# https://github.com/apache/airflow/issues/8211 -SQLAlchemy==1.3.15 +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile +# +alembic==1.4.2 # via apache-airflow +amqp==2.6.0 # via kombu +apache-airflow[async,celery,crypto,datadog,gcp_api,github_enterprise,google_auth,hive,jdbc,kubernetes,mysql,password,postgres,s3,statsd]==1.10.10 # via -r requirements.in +apispec[yaml]==1.3.3 # via flask-appbuilder +argcomplete==1.11.1 # via apache-airflow +attrs==19.3.0 # via apache-airflow, cattrs, jsonschema +babel==2.8.0 # via flask-babel +bcrypt==3.1.7 # via apache-airflow, flask-bcrypt +billiard==3.6.3.0 # via celery +boto3==1.13.20 # via -r requirements.in, apache-airflow +botocore==1.16.20 # via boto3, s3transfer +cached-property==1.5.1 # via apache-airflow +cachetools==4.1.0 # via google-auth +cattrs==0.9.2 # via apache-airflow +celery==4.3.0 # via apache-airflow, flower +certifi==2020.4.5.1 # via kubernetes, requests +cffi==1.14.0 # via bcrypt, cryptography +chardet==3.0.4 # via requests +click==7.1.2 # via flask, flask-appbuilder, hmsclient +colorama==0.4.3 # via flask-appbuilder +colorlog==4.0.2 # via apache-airflow +configparser==3.5.3 # via apache-airflow +croniter==0.3.32 # via apache-airflow +cryptography==2.9.2 # via apache-airflow, pyopenssl +datadog==0.36.0 # via apache-airflow +decorator==4.4.2 # via datadog +defusedxml==0.6.0 # via python3-openid +dill==0.3.1.1 # via apache-airflow +dnspython==1.16.0 # via email-validator, eventlet +docutils==0.15.2 # via botocore, python-daemon +email-validator==1.1.1 # via flask-appbuilder +eventlet==0.25.2 # via apache-airflow +flask-admin==1.5.4 # via apache-airflow +flask-appbuilder==2.3.4 # via apache-airflow +flask-babel==1.0.0 # via flask-appbuilder +flask-bcrypt==0.7.1 # via apache-airflow +flask-caching==1.3.3 # via apache-airflow +flask-jwt-extended==3.24.1 # via flask-appbuilder +flask-login==0.4.1 # via apache-airflow, flask-appbuilder +flask-oauthlib==0.9.5 # via -r requirements.in, apache-airflow +flask-openid==1.2.5 # via flask-appbuilder +flask-sqlalchemy==2.4.3 # via flask-appbuilder +flask-swagger==0.2.13 # via apache-airflow +flask-wtf==0.14.3 # via apache-airflow, flask-appbuilder +flask==1.1.2 # via apache-airflow, flask-admin, flask-appbuilder, flask-babel, flask-bcrypt, flask-caching, flask-jwt-extended, flask-login, flask-oauthlib, flask-openid, flask-sqlalchemy, flask-swagger, flask-wtf +flower==0.9.4 # via apache-airflow +funcsigs==1.0.2 # via apache-airflow +future==0.18.2 # via apache-airflow, pyhive +gevent==20.5.2 # via apache-airflow +google-api-core[grpc,grpcgcp]==1.17.0 # via google-api-python-client, google-cloud-bigquery, google-cloud-bigtable, google-cloud-container, google-cloud-core, google-cloud-dlp, google-cloud-language, google-cloud-secret-manager, google-cloud-spanner, google-cloud-speech, google-cloud-texttospeech, google-cloud-translate, google-cloud-videointelligence, google-cloud-vision +google-api-python-client==1.8.4 # via apache-airflow +google-auth-httplib2==0.0.3 # via apache-airflow, google-api-python-client +google-auth-oauthlib==0.4.1 # via pandas-gbq, pydata-google-auth +google-auth==1.16.0 # via apache-airflow, google-api-core, google-api-python-client, google-auth-httplib2, google-auth-oauthlib, google-cloud-bigquery, google-cloud-storage, kubernetes, pandas-gbq, pydata-google-auth +google-cloud-bigquery==1.24.0 # via pandas-gbq +google-cloud-bigtable==1.2.1 # via apache-airflow +google-cloud-container==0.5.0 # via apache-airflow +google-cloud-core==1.3.0 # via google-cloud-bigquery, google-cloud-bigtable, google-cloud-spanner, google-cloud-storage, google-cloud-translate +google-cloud-dlp==0.15.0 # via apache-airflow +google-cloud-language==1.3.0 # via apache-airflow +google-cloud-secret-manager==1.0.0 # via apache-airflow +google-cloud-spanner==1.17.0 # via apache-airflow +google-cloud-speech==1.3.2 # via apache-airflow +google-cloud-storage==1.28.1 # via apache-airflow +google-cloud-texttospeech==1.0.1 # via apache-airflow +google-cloud-translate==2.0.1 # via apache-airflow +google-cloud-videointelligence==1.14.0 # via apache-airflow +google-cloud-vision==1.0.0 # via apache-airflow +google-resumable-media==0.5.0 # via google-cloud-bigquery, google-cloud-storage +googleapis-common-protos[grpc]==1.51.0 # via google-api-core, grpc-google-iam-v1 +graphviz==0.14 # via apache-airflow +greenlet==0.4.15 # via apache-airflow, eventlet, gevent +grpc-google-iam-v1==0.12.3 # via google-cloud-bigtable, google-cloud-container, google-cloud-secret-manager, google-cloud-spanner +grpcio-gcp==0.2.2 # via apache-airflow, google-api-core +grpcio==1.29.0 # via google-api-core, googleapis-common-protos, grpc-google-iam-v1, grpcio-gcp +gunicorn==19.10.0 # via apache-airflow +hiredis==1.0.1 # via -r requirements.in +hmsclient==0.1.1 # via apache-airflow +httplib2==0.18.1 # via apache-airflow, google-api-python-client, google-auth-httplib2 +humanize==0.5.1 # via flower +idna==2.9 # via email-validator, requests +importlib-metadata==1.6.0 # via argcomplete, jsonschema +iso8601==0.1.12 # via apache-airflow +itsdangerous==1.1.0 # via flask, flask-wtf +jaydebeapi==1.2.1 # via apache-airflow +jinja2==2.10.3 # via apache-airflow, flask, flask-babel +jmespath==0.10.0 # via boto3, botocore +jpype1==0.7.1 # via -r requirements.in, apache-airflow, jaydebeapi +json-merge-patch==0.2 # via apache-airflow +jsonschema==3.2.0 # via -r requirements.in, apache-airflow, flask-appbuilder +kombu==4.6.3 # via -r requirements.in, celery +kubernetes==11.0.0 # via apache-airflow +lazy-object-proxy==1.4.3 # via apache-airflow +lockfile==0.12.2 # via python-daemon +mako==1.1.3 # via alembic +markdown==2.6.11 # via apache-airflow +markupsafe==1.1.1 # via jinja2, mako, wtforms +marshmallow-enum==1.5.1 # via flask-appbuilder +marshmallow-sqlalchemy==0.23.1 # via flask-appbuilder +marshmallow==2.21.0 # via flask-appbuilder, marshmallow-enum, marshmallow-sqlalchemy +monotonic==1.5 # via eventlet +mozlogging==0.1.0 # via -r requirements.in +mysqlclient==1.3.14 # via apache-airflow +natsort==7.0.1 # via croniter +newrelic==5.14.0.142 # via -r requirements.in +numpy==1.18.4 # via pandas +oauthlib==2.1.0 # via apache-airflow, flask-oauthlib, requests-oauthlib +pandas-gbq==0.13.2 # via apache-airflow +pandas==0.25.3 # via apache-airflow, pandas-gbq +pendulum==1.4.4 # via apache-airflow +prison==0.1.3 # via flask-appbuilder +protobuf==3.12.2 # via google-api-core, google-cloud-bigquery, googleapis-common-protos +psutil==5.7.0 # via apache-airflow +psycopg2-binary==2.8.5 # via apache-airflow +pyasn1-modules==0.2.8 # via google-auth +pyasn1==0.4.8 # via pyasn1-modules, rsa +pycparser==2.20 # via cffi +pydata-google-auth==1.1.0 # via pandas-gbq +pygments==2.6.1 # via apache-airflow +pyhive==0.6.2 # via apache-airflow +pyjwt==1.7.1 # via flask-appbuilder, flask-jwt-extended +pyopenssl==19.1.0 # via apache-airflow +pyrsistent==0.16.0 # via jsonschema +python-daemon==2.1.2 # via apache-airflow +python-dateutil==2.8.1 # via alembic, apache-airflow, botocore, croniter, flask-appbuilder, kubernetes, pandas, pendulum, pyhive +python-editor==1.0.4 # via alembic +python3-openid==3.1.0 # via flask-openid +pytz==2020.1 # via -r requirements.in, babel, celery, flask-babel, flower, google-api-core, pandas, tzlocal +pytzdata==2019.3 # via pendulum +pyyaml==5.3.1 # via apispec, flask-swagger, kubernetes +redis==3.5.3 # via -r requirements.in +requests-oauthlib==1.1.0 # via apache-airflow, flask-oauthlib, google-auth-oauthlib, kubernetes +requests==2.23.0 # via -r requirements.in, apache-airflow, datadog, google-api-core, kubernetes, requests-oauthlib +retrying==1.3.3 # via -r requirements.in +rsa==4.0 # via google-auth +s3transfer==0.3.3 # via boto3 +setproctitle==1.1.10 # via apache-airflow +shelljob==0.5.6 # via -r requirements.in +six==1.15.0 # via bcrypt, cryptography, eventlet, flask-jwt-extended, google-api-core, google-api-python-client, google-auth, google-cloud-bigquery, google-resumable-media, grpcio, jsonschema, kubernetes, prison, protobuf, pyopenssl, pyrsistent, python-dateutil, retrying, sqlalchemy-utils, tenacity, thrift, websocket-client +sqlalchemy-jsonfield==0.9.0 # via apache-airflow +sqlalchemy-utils==0.36.6 # via flask-appbuilder +sqlalchemy==1.3.15 # via -r requirements.in, alembic, apache-airflow, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-jsonfield, sqlalchemy-utils +statsd==3.3.0 # via apache-airflow +tabulate==0.8.7 # via apache-airflow +tenacity==4.12.0 # via apache-airflow +termcolor==1.1.0 # via apache-airflow +text-unidecode==1.2 # via apache-airflow +thrift==0.13.0 # via apache-airflow, hmsclient +tornado==5.1.1 # via apache-airflow, flower +typing-extensions==3.7.4.2 # via apache-airflow +tzlocal==1.5.1 # via apache-airflow, pendulum +unicodecsv==0.14.1 # via apache-airflow +uritemplate==3.0.1 # via google-api-python-client +urllib3==1.25.9 # via -r requirements.in, botocore, kubernetes, requests +vine==1.3.0 # via amqp, celery +websocket-client==0.57.0 # via -r requirements.in, kubernetes +werkzeug==0.16.0 # via -r requirements.in, apache-airflow, flask, flask-caching, flask-jwt-extended +wtforms==2.3.1 # via flask-admin, flask-wtf +zipp==3.1.0 # via importlib-metadata +zope.deprecation==4.4.0 # via apache-airflow +zope.event==4.4 # via gevent +zope.interface==5.1.0 # via gevent + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/tox.ini b/tox.ini index 79514723f..94d32ec95 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py27 +envlist = py37 skipsdist = True [testenv]