Skip to content

Instantly share code, notes, and snippets.

@denzuko
Forked from sairamkrish/docker-compose.yml
Created October 12, 2023 02:07
Show Gist options
  • Save denzuko/2bce4b64abfbfe529ae12d795e2d9a8b to your computer and use it in GitHub Desktop.
Save denzuko/2bce4b64abfbfe529ae12d795e2d9a8b to your computer and use it in GitHub Desktop.

Revisions

  1. Sairam Krish revised this gist Sep 6, 2020. 1 changed file with 6 additions and 10 deletions.
    16 changes: 6 additions & 10 deletions docker-compose.yml
    Original file line number Diff line number Diff line change
    @@ -5,7 +5,7 @@ version: "3.8"
    # I have tried to capture core things to consider while some else needs to achieve Docker swarm based auto scalout of workers.

    services:
    workflow_webserver:
    webserver:
    image: customized/airflow:prod
    environment:
    - AIRFLOW__CORE__SQL_ALCHEMY_CONN=postgresql://username:password@db-host:5432/airflow-db
    @@ -21,16 +21,14 @@ services:
    - 8080
    command: ["webserver", "-p", "8080"]
    networks:
    - platform
    - myNetwork
    deploy:
    placement:
    constraints:
    - "node.role==manager"

    workflow_scheduler:
    scheduler:
    image: customized/airflow:prod
    depends_on:
    - platformStore
    environment:
    - AIRFLOW__CORE__SQL_ALCHEMY_CONN=postgresql://username:password@db-host:5432/airflow-db
    - AIRFLOW__CELERY__BROKER_URL=pyamqp://username:pass@rabbitmq:5672/
    @@ -40,16 +38,14 @@ services:
    - workflow_logs:/opt/airflow/logs
    command: ["scheduler"]
    networks:
    - platform
    - myNetwork
    deploy:
    placement:
    constraints:
    - "node.role==manager"

    workflow_worker:
    worker:
    image: customized/airflow:prod
    depends_on:
    - workflow_scheduler
    environment:
    - AIRFLOW__CORE__SQL_ALCHEMY_CONN=postgresql://username:password@db-host:5432/airflow-db
    - AIRFLOW__CELERY__BROKER_URL=pyamqp://username:pass@rabbitmq:5672/
    @@ -63,7 +59,7 @@ services:
    hostname: '{{.Node.Hostname}}'
    command: worker
    networks:
    - platform
    - myNetwork
    deploy:
    replicas: 5
    placement:
  2. Sairam Krish created this gist Sep 6, 2020.
    72 changes: 72 additions & 0 deletions docker-compose.yml
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,72 @@
    version: "3.8"

    # This should give a high level idea of the approach.
    # The complete solution is too complex and involves multiple internal microservices.
    # I have tried to capture core things to consider while some else needs to achieve Docker swarm based auto scalout of workers.

    services:
    workflow_webserver:
    image: customized/airflow:prod
    environment:
    - AIRFLOW__CORE__SQL_ALCHEMY_CONN=postgresql://username:password@db-host:5432/airflow-db
    - AIRFLOW__WEBSERVER__BASE_URL=http://localhost:8080/airflow
    - AIRFLOW__CELERY__DEFAULT_QUEUE=${HOSTNAME:-airflow}
    - AIRFLOW__CELERY__BROKER_URL=pyamqp://username:pass@rabbitmq:5672/
    - AIRFLOW__CELERY__RESULT_BACKEND=db+postgresql://username:password@db-host:5432/airflow-db
    volumes:
    - workflow_logs:/opt/airflow/logs
    - /var/run/docker.sock:/var/run/docker.sock
    - ${DOCKER_BIN_PATH}:/usr/bin/docker
    ports:
    - 8080
    command: ["webserver", "-p", "8080"]
    networks:
    - platform
    deploy:
    placement:
    constraints:
    - "node.role==manager"

    workflow_scheduler:
    image: customized/airflow:prod
    depends_on:
    - platformStore
    environment:
    - AIRFLOW__CORE__SQL_ALCHEMY_CONN=postgresql://username:password@db-host:5432/airflow-db
    - AIRFLOW__CELERY__BROKER_URL=pyamqp://username:pass@rabbitmq:5672/
    - AIRFLOW__CELERY__RESULT_BACKEND=db+postgresql://username:password@db-host:5432/airflow-db
    - AIRFLOW__CELERY__DEFAULT_QUEUE=${HOSTNAME:-airflow}
    volumes:
    - workflow_logs:/opt/airflow/logs
    command: ["scheduler"]
    networks:
    - platform
    deploy:
    placement:
    constraints:
    - "node.role==manager"

    workflow_worker:
    image: customized/airflow:prod
    depends_on:
    - workflow_scheduler
    environment:
    - AIRFLOW__CORE__SQL_ALCHEMY_CONN=postgresql://username:password@db-host:5432/airflow-db
    - AIRFLOW__CELERY__BROKER_URL=pyamqp://username:pass@rabbitmq:5672/
    - AIRFLOW__CELERY__RESULT_BACKEND=db+postgresql://username:password@db-host:5432/airflow-db
    - AIRFLOW__CELERY__DEFAULT_QUEUE=${HOSTNAME:-airflow}
    volumes:
    - /var/run/docker.sock:/var/run/docker.sock
    - ${DOCKER_BIN_PATH}:/usr/bin/docker
    ports:
    - 8793
    hostname: '{{.Node.Hostname}}'
    command: worker
    networks:
    - platform
    deploy:
    replicas: 5
    placement:
    max_replicas_per_node: 1
    constraints:
    - "node.labels.type==celery-worker"