admin管理员组

文章数量:1220844

I have airflow folder that contains dags: /Users/user_1/Documents/Projects/myProject/airflow/dags (here where i create my_dag.py)
and .env file that contains variables used in docker-compose.yaml
when i access to airflow ui, I don't see any dag

${AIRFLOW_DAGS_FOLDER} is /Users/user_1/Documents/Projects/myProject/airflow/dags in .env file.

docker-compose.yaml
version: "3.9"

services:
  elasticsearch:
    image: docker.elastic.co/elasticsearch/elasticsearch:8.17.0
    container_name: elasticsearch
    env_file:
      - .env
    environment:
      - discovery.type=single-node
      - ELASTIC_PASSWORD=${ES_PASSWORD}
      - xpack.security.enabled=true
    ports:
      - "9200:9200"
      - "9300:9300"
    volumes:
      - esdata:/usr/share/elasticsearch/data
    networks:
      - storywise_backend
    healthcheck:
      test: ["CMD", "curl", "-f", "http://localhost:9200"]
      interval: 30s
      retries: 3
      start_period: 30s

  postgres:
    image: postgres:15
    container_name: postgres_db
    env_file:
      - .env
    environment:
      - POSTGRES_USER=${POSTGRES_USER}
      - POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
      - POSTGRES_DB=${POSTGRES_DEFAULT_DB}
    ports:
      - "127.0.0.1:5432:5432"
    volumes:
      - pgdata:/var/lib/postgresql/data
    networks:
      - storywise_backend
    healthcheck:
      test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER}"]
      interval: 10s
      retries: 3

  pgadmin:
    image: dpage/pgadmin4
    container_name: pgadmin
    env_file:
      - .env
    environment:
      - [email protected]
      - PGADMIN_DEFAULT_PASSWORD=admin
    ports:
      - "5050:80"
    volumes:
      - pgadmin_data:/var/lib/pgadmin
    depends_on:
      postgres:
        condition: service_healthy
    networks:
      - storywise_backend

  redis:
    image: redis:7
    container_name: redis
    ports:
      - "6379:6379"
    volumes:
      - redis_data:/data
    networks:
      - storywise_backend
    healthcheck:
      test: ["CMD", "redis-cli", "-h", "localhost", "ping"]
      interval: 10s
      retries: 3

  minio:
    image: minio/minio
    container_name: minio
    command: ["minio", "server", "/data", "--console-address", ":9001"]
    env_file:
      - .env
    environment:
      - MINIO_ROOT_USER=sw_admin_minio
      - MINIO_ROOT_PASSWORD=sw_admin_minio
    ports:
      - "9000:9000"
      - "9001:9001"
    volumes:
      - minio_data:/data
    networks:
      - storywise_backend

  airflow-init:
    image: apache/airflow:2.7.0
    container_name: airflow_init
    entrypoint: /bin/bash
    command: -c "airflow db init && airflow users create --username admin --password admin --firstname Admin --lastname User --role Admin --email [email protected]"
    env_file:
      - .env
    environment:
      - AIRFLOW__CORE__EXECUTOR=CeleryExecutor
      - AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=postgresql+psycopg2://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DEFAULT_DB}
      - AIRFLOW__CELERY__BROKER_URL=redis://redis:6379/1
      - AIRFLOW__CORE__DAGS_FOLDER=${AIRFLOW_DAGS_FOLDER}
    depends_on:
      postgres:
        condition: service_healthy
      redis:
        condition: service_healthy
    networks:
      - storywise_backend

  airflow-webserver:
    image: apache/airflow:2.7.0
    container_name: airflow_webserver
    restart: always
    command: ["airflow", "webserver"]
    env_file:
      - .env
    environment:
      - AIRFLOW__CORE__EXECUTOR=CeleryExecutor
      - AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=postgresql+psycopg2://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DEFAULT_DB}
      - AIRFLOW__CELERY__BROKER_URL=redis://redis:6379/1
      - AIRFLOW__CORE__DAGS_FOLDER=${AIRFLOW_DAGS_FOLDER}
    ports:
      - "8080:8080"
    volumes:
      - airflow_logs:/opt/airflow/logs
      - airflow_dags:${AIRFLOW_DAGS_FOLDER}
    depends_on:
      airflow-init:
        condition: service_completed_successfully
    networks:
      - storywise_backend

  airflow-scheduler:
    image: apache/airflow:2.7.0
    container_name: airflow_scheduler
    restart: always
    command: ["airflow", "scheduler"]
    env_file:
      - .env
    environment:
      - AIRFLOW__CORE__EXECUTOR=CeleryExecutor
      - AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=postgresql+psycopg2://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DEFAULT_DB}
      - AIRFLOW__CELERY__BROKER_URL=redis://redis:6379/1
      - AIRFLOW__CORE__DAGS_FOLDER=${AIRFLOW_DAGS_FOLDER}
    volumes:
      - airflow_logs:/opt/airflow/logs
      - airflow_dags:${AIRFLOW_DAGS_FOLDER}
    depends_on:
      airflow-init:
        condition: service_completed_successfully
    networks:
      - storywise_backend

networks:
  storywise_backend:
    driver: bridge

volumes:
  esdata:
  pgdata:
  pgadmin_data:
  redis_data:
  minio_data:
  airflow_logs:
  airflow_dags:

I rebuild docker-compose.yaml many times, but I got same issue

本文标签: dockerWhy I can39t see any dag in airflow UIStack Overflow