I try to install the new airflow 3 with docker on a VM with this conf:
x-airflow-common: &airflow-common
image: ${AIRFLOW_IMAGE}
restart: unless-stopped
env_file: .env
environment:
# ---- Airflow (principales) ----
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: ${AIRFLOW__DATABASE__SQL_ALCHEMY_CONN}
AIRFLOW__SDK__API_BASE_URL: ${AIRFLOW__SDK__API_BASE_URL}
AIRFLOW__CORE__EXECUTOR: ${AIRFLOW__CORE__EXECUTOR}
AIRFLOW__CORE__FERNET_KEY: ${AIRFLOW__CORE__FERNET_KEY}
AIRFLOW__CORE__LOAD_EXAMPLES: ${AIRFLOW__CORE__LOAD_EXAMPLES}
AIRFLOW__CORE__DEFAULT_TIMEZONE: ${AIRFLOW__CORE__DEFAULT_TIMEZONE}
AIRFLOW__WEBSERVER__BASE_URL: ${AIRFLOW__WEBSERVER__BASE_URL}
AIRFLOW__WEBSERVER__DEFAULT_UI_TIMEZONE: ${AIRFLOW__WEBSERVER__DEFAULT_UI_TIMEZONE}
AIRFLOW__DATABASE__SQL_ALCHEMY_ENGINE_OPTIONS: ${AIRFLOW__DATABASE__SQL_ALCHEMY_ENGINE_OPTIONS}
AIRFLOW__CORE__SIMPLE_AUTH_MANAGER_USERS: ${AIRFLOW__CORE__SIMPLE_AUTH_MANAGER_USERS}
AIRFLOW__CORE__SIMPLE_AUTH_MANAGER_PASSWORDS_FILE: ${AIRFLOW__CORE__SIMPLE_AUTH_MANAGER_PASSWORDS_FILE}
# ---- Réseau / Proxy : neutraliser tout proxy pour les appels internes ----
NO_PROXY: "localhost,127.0.0.1,web,af_web,airflow.local,*.local"
no_proxy: "localhost,127.0.0.1,web,af_web,airflow.local,*.local"
HTTP_PROXY: ""
HTTPS_PROXY: ""
http_proxy: ""
https_proxy: ""
user: "${AIRFLOW_UID}:${AIRFLOW_GID}"
volumes:
- ./dags:/opt/airflow/dags
- ./logs:/opt/airflow/logs
- ./simple_auth_passwords.json:/opt/airflow/simple_auth_passwords.json
networks:
airflow_net: {}
services:
# === API + UI Airflow ===
web:
<<: *airflow-common
container_name: af_web
ports:
- "8080:8080"
command: >
bash -lc "
airflow db migrate &&
airflow api-server --host 0.0.0.0 --port 8080 --proxy-headers --workers 4
"
healthcheck:
test: ["CMD", "bash", "-lc", "curl -sf http://localhost:8080/api/v2/monitor/health | grep -q 'healthy'"]
interval: 10s
timeout: 5s
retries: 12
networks:
airflow_net:
aliases: [web, af_web]
# === Scheduler ===
scheduler:
<<: *airflow-common
container_name: af_scheduler
command: >
bash -lc "
export NO_PROXY='localhost,127.0.0.1,web,af_web,airflow.local,*.local' &&
export no_proxy='$NO_PROXY' &&
export HTTP_PROXY= HTTPS_PROXY= http_proxy= https_proxy= &&
export AIRFLOW__SDK__API_BASE_URL='http://web:8080' &&
airflow db migrate &&
airflow scheduler
"
depends_on:
web:
condition: service_healthy
healthcheck:
test: ["CMD", "airflow", "db", "check"]
interval: 20s
timeout: 5s
retries: 10
# === DAG Processor (parse/serialize des DAGs) ===
dag_processor:
<<: *airflow-common
container_name: af_dagproc
command: >
bash -lc "
export NO_PROXY='localhost,127.0.0.1,web,af_web,airflow.local,*.local' &&
export no_proxy='$NO_PROXY' &&
export HTTP_PROXY= HTTPS_PROXY= http_proxy= https_proxy= &&
export AIRFLOW__SDK__API_BASE_URL='http://web:8080' &&
airflow dag-processor
"
depends_on:
web:
condition: service_healthy
scheduler:
condition: service_started
# === Triggerer (capteurs asynchrones) ===
triggerer:
<<: *airflow-common
container_name: af_triggerer
command: >
bash -lc "
export NO_PROXY='localhost,127.0.0.1,web,af_web,airflow.local,*.local' &&
export no_proxy='$NO_PROXY' &&
export HTTP_PROXY= HTTPS_PROXY= http_proxy= https_proxy= &&
export AIRFLOW__SDK__API_BASE_URL='http://web:8080' &&
airflow triggerer
"
depends_on:
web:
condition: service_healthy
scheduler:
condition: service_started
networks:
airflow_net:
driver: bridge
dock
I have separated each service as the documentation.
Now every time I try to run a DAG I get this error:
Finished with state failed, but the task instance's state attribute is queued.
Looking at the log of the scheduler I got an error:
httpx.ConnectError: [Errno 111] Connection refused
From my understanding the schedule in v3 needs to connect to the web service from the API.
I did airflow config get-value sdk api_base_url and I got http://web:8080, which seems ok to me.
What's wrong here?
http:///web:8080with 3 slashes? Is that what you actually got or just a typo in this question?