diff --git a/docker-compose.yml b/docker-compose.yml index a116a411..5b7071bc 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -136,12 +136,39 @@ services: ports: - 127.0.0.10:8786:8786 - 127.0.0.10:8787:8787 - command: ["dask-scheduler"] + entrypoint: bash -c "dask-scheduler" dask_worker_airflow: + environment: + - AIRFLOW__CORE__ENABLE_XCOM_PICKLING=true + - AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=True + - AIRFLOW__CORE__EXECUTOR=DaskExecutor + - AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=postgresql+psycopg2://airflow:airflow@db/airflow + - AIRFLOW__CORE__SQL_ALCHEMY_CONN=postgresql+psycopg2://airflow:airflow@db/airflow + - AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION=False + - AIRFLOW__API__AUTH_BACKENDS=airflow.api.auth.backend.basic_auth + - AIRFLOW__API__AUTH_BACKEND=airflow.api.auth.backend.basic_auth + - AIRFLOW__DASK__CLUSTER_ADDRESS=tcp://dask-scheduler-airflow:8786 + - 'AIRFLOW_CONN_LINUX_NETWORK_STACK_BREEDER_SSH={ "conn_type": "ssh", "login": "godon_robot", "host": "10.0.5.53", "port": 22, "extra": { "key_file": "/opt/airflow/credentials/id_rsa" } }' + - ARCHIVE_DB_USER=yugabyte + - ARCHIVE_DB_PASSWORD=yugabyte + - ARCHIVE_DB_HOST=archive-db + - ARCHIVE_DB_PORT=5433 + - ARCHIVE_DB_DATABASE=archive_db + - META_DB_USER=meta_data + - META_DB_PASSWORD=meta_data + - META_DB_HOSTNAME=meta-data-db + - META_DB_PORT=5432 + - DLM_DB_USER= + - DLM_DB_PASSWORD= + - DLM_DB_HOST=locks_db + - DLM_DB_DATABASE=distributed_locking + - DASK_ENDPOINT=dask_scheduler:8786 + - NATS_SERVER_URL="nats://godon_nats_1:4222" + - PROMETHEUS_URL="http://prometheus:9090" build: context: ./ dockerfile: ./Dockerfile-dask - command: ["dask-worker", "tcp://dask-scheduler-airflow:8786"] + entrypoint: bash -c "dask-worker tcp://dask-scheduler-airflow:8786" deploy: replicas: 2 volumes: @@ -155,12 +182,39 @@ services: ports: - 127.0.0.11:8786:8786 - 127.0.0.11:8787:8787 - command: ["dask-scheduler"] + entrypoint: bash -c "dask-scheduler" dask_worker: + environment: + - AIRFLOW__CORE__ENABLE_XCOM_PICKLING=true + - AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=True + - AIRFLOW__CORE__EXECUTOR=DaskExecutor + - AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=postgresql+psycopg2://airflow:airflow@db/airflow + - AIRFLOW__CORE__SQL_ALCHEMY_CONN=postgresql+psycopg2://airflow:airflow@db/airflow + - AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION=False + - AIRFLOW__API__AUTH_BACKENDS=airflow.api.auth.backend.basic_auth + - AIRFLOW__API__AUTH_BACKEND=airflow.api.auth.backend.basic_auth + - AIRFLOW__DASK__CLUSTER_ADDRESS=tcp://dask-scheduler-airflow:8786 + - 'AIRFLOW_CONN_LINUX_NETWORK_STACK_BREEDER_SSH={ "conn_type": "ssh", "login": "godon_robot", "host": "10.0.5.53", "port": 22, "extra": { "key_file": "/opt/airflow/credentials/id_rsa" } }' + - ARCHIVE_DB_USER=yugabyte + - ARCHIVE_DB_PASSWORD=yugabyte + - ARCHIVE_DB_HOST=archive-db + - ARCHIVE_DB_PORT=5433 + - ARCHIVE_DB_DATABASE=archive_db + - META_DB_USER=meta_data + - META_DB_PASSWORD=meta_data + - META_DB_HOSTNAME=meta-data-db + - META_DB_PORT=5432 + - DLM_DB_USER= + - DLM_DB_PASSWORD= + - DLM_DB_HOST=locks_db + - DLM_DB_DATABASE=distributed_locking + - DASK_ENDPOINT=dask_scheduler:8786 + - NATS_SERVER_URL="nats://godon_nats_1:4222" + - PROMETHEUS_URL="http://prometheus:9090" build: context: ./ dockerfile: ./Dockerfile-dask - command: ["dask-worker", "tcp://dask_scheduler:8786"] + entrypoint: bash -c "dask-worker tcp://dask_scheduler:8786" deploy: replicas: 2 prometheus: