|
| 1 | +# Licensed to the Apache Software Foundation (ASF) under one |
| 2 | +# or more contributor license agreements. See the NOTICE file |
| 3 | +# distributed with this work for additional information |
| 4 | +# regarding copyright ownership. The ASF licenses this file |
| 5 | +# to you under the Apache License, Version 2.0 (the |
| 6 | +# "License"); you may not use this file except in compliance |
| 7 | +# with the License. You may obtain a copy of the License at |
| 8 | +# |
| 9 | +# http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | +# |
| 11 | +# Unless required by applicable law or agreed to in writing, |
| 12 | +# software distributed under the License is distributed on an |
| 13 | +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
| 14 | +# KIND, either express or implied. See the License for the |
| 15 | +# specific language governing permissions and limitations |
| 16 | +# under the License. |
| 17 | + |
| 18 | +import contextlib |
| 19 | +import os |
| 20 | +import subprocess |
| 21 | +import tempfile |
| 22 | +from pathlib import Path |
| 23 | +from pprint import pprint |
| 24 | +from shutil import copyfile |
| 25 | +from time import monotonic, sleep |
| 26 | +from typing import Dict |
| 27 | +from unittest import mock |
| 28 | + |
| 29 | +import requests |
| 30 | + |
| 31 | +from docker_tests.command_utils import run_command |
| 32 | +from docker_tests.constants import SOURCE_ROOT |
| 33 | +from docker_tests.docker_tests_utils import docker_image |
| 34 | + |
| 35 | +AIRFLOW_WWW_USER_USERNAME = os.environ.get("_AIRFLOW_WWW_USER_USERNAME", "airflow") |
| 36 | +AIRFLOW_WWW_USER_PASSWORD = os.environ.get("_AIRFLOW_WWW_USER_PASSWORD", "airflow") |
| 37 | +DAG_ID = "example_bash_operator" |
| 38 | +DAG_RUN_ID = "test_dag_run_id" |
| 39 | + |
| 40 | + |
| 41 | +def api_request(method: str, path: str, base_url: str = "http://localhost:8080/api/v1", **kwargs) -> Dict: |
| 42 | + response = requests.request( |
| 43 | + method=method, |
| 44 | + url=f"{base_url}/{path}", |
| 45 | + auth=(AIRFLOW_WWW_USER_USERNAME, AIRFLOW_WWW_USER_PASSWORD), |
| 46 | + headers={"Content-Type": "application/json"}, |
| 47 | + **kwargs, |
| 48 | + ) |
| 49 | + response.raise_for_status() |
| 50 | + return response.json() |
| 51 | + |
| 52 | + |
| 53 | +@contextlib.contextmanager |
| 54 | +def tmp_chdir(path): |
| 55 | + current_cwd = os.getcwd() |
| 56 | + try: |
| 57 | + os.chdir(path) |
| 58 | + yield current_cwd |
| 59 | + finally: |
| 60 | + os.chdir(current_cwd) |
| 61 | + |
| 62 | + |
| 63 | +def wait_for_container(container_id: str, timeout: int = 300): |
| 64 | + container_name = ( |
| 65 | + subprocess.check_output(["docker", "inspect", container_id, "--format", '{{ .Name }}']) |
| 66 | + .decode() |
| 67 | + .strip() |
| 68 | + ) |
| 69 | + print(f"Waiting for container: {container_name} [{container_id}]") |
| 70 | + waiting_done = False |
| 71 | + start_time = monotonic() |
| 72 | + while not waiting_done: |
| 73 | + container_state = ( |
| 74 | + subprocess.check_output(["docker", "inspect", container_id, "--format", '{{ .State.Status }}']) |
| 75 | + .decode() |
| 76 | + .strip() |
| 77 | + ) |
| 78 | + if container_state in ("running", 'restarting'): |
| 79 | + health_status = ( |
| 80 | + subprocess.check_output( |
| 81 | + [ |
| 82 | + "docker", |
| 83 | + "inspect", |
| 84 | + container_id, |
| 85 | + "--format", |
| 86 | + "{{ if .State.Health }}{{ .State.Health.Status }}{{ else }}no-check{{ end }}", |
| 87 | + ] |
| 88 | + ) |
| 89 | + .decode() |
| 90 | + .strip() |
| 91 | + ) |
| 92 | + print(f"{container_name}: container_state={container_state}, health_status={health_status}") |
| 93 | + |
| 94 | + if health_status == "healthy" or health_status == "no-check": |
| 95 | + waiting_done = True |
| 96 | + else: |
| 97 | + print(f"{container_name}: container_state={container_state}") |
| 98 | + waiting_done = True |
| 99 | + if timeout != 0 and monotonic() - start_time > timeout: |
| 100 | + raise Exception(f"Timeout. The operation takes longer than the maximum waiting time ({timeout}s)") |
| 101 | + sleep(1) |
| 102 | + |
| 103 | + |
| 104 | +def wait_for_terminal_dag_state(dag_id, dag_run_id): |
| 105 | + # Wait 30 seconds |
| 106 | + for _ in range(30): |
| 107 | + dag_state = api_request("GET", f"dags/{dag_id}/dagRuns/{dag_run_id}").get("state") |
| 108 | + print(f"Waiting for DAG Run: dag_state={dag_state}") |
| 109 | + sleep(1) |
| 110 | + if dag_state in ("success", "failed"): |
| 111 | + break |
| 112 | + |
| 113 | + |
| 114 | +def test_trigger_dag_and_wait_for_result(): |
| 115 | + compose_file_path = SOURCE_ROOT / "docs" / "apache-airflow" / "start" / "docker-compose.yaml" |
| 116 | + |
| 117 | + with tempfile.TemporaryDirectory() as tmp_dir, tmp_chdir(tmp_dir), mock.patch.dict( |
| 118 | + 'os.environ', AIRFLOW_IMAGE_NAME=docker_image |
| 119 | + ): |
| 120 | + copyfile(str(compose_file_path), f"{tmp_dir}/docker-compose.yaml") |
| 121 | + os.mkdir(f"{tmp_dir}/dags") |
| 122 | + os.mkdir(f"{tmp_dir}/logs") |
| 123 | + os.mkdir(f"{tmp_dir}/plugins") |
| 124 | + (Path(tmp_dir) / ".env").write_text(f"AIRFLOW_UID={subprocess.check_output(['id', '-u']).decode()}\n") |
| 125 | + print(".emv=", (Path(tmp_dir) / ".env").read_text()) |
| 126 | + copyfile( |
| 127 | + str(SOURCE_ROOT / "airflow" / "example_dags" / "example_bash_operator.py"), |
| 128 | + f"{tmp_dir}/dags/example_bash_operator.py", |
| 129 | + ) |
| 130 | + |
| 131 | + run_command(["docker-compose", "config"]) |
| 132 | + run_command(["docker-compose", "down", "--volumes", "--remove-orphans"]) |
| 133 | + try: |
| 134 | + run_command(["docker-compose", "up", "-d"]) |
| 135 | + # The --wait condition was released in docker-compose v2.1.1, but we want to support |
| 136 | + # docker-compose v1 yet. |
| 137 | + # See: |
| 138 | + # https://github.com/docker/compose/releases/tag/v2.1.1 |
| 139 | + # https://github.com/docker/compose/pull/8777 |
| 140 | + for container_id in ( |
| 141 | + subprocess.check_output(["docker-compose", 'ps', '-q']).decode().strip().splitlines() |
| 142 | + ): |
| 143 | + wait_for_container(container_id) |
| 144 | + api_request("PATCH", path=f"dags/{DAG_ID}", json={"is_paused": False}) |
| 145 | + api_request("POST", path=f"dags/{DAG_ID}/dagRuns", json={"dag_run_id": DAG_RUN_ID}) |
| 146 | + try: |
| 147 | + wait_for_terminal_dag_state(dag_id=DAG_ID, dag_run_id=DAG_RUN_ID) |
| 148 | + dag_state = api_request("GET", f"dags/{DAG_ID}/dagRuns/{DAG_RUN_ID}").get("state") |
| 149 | + assert dag_state == "success" |
| 150 | + except Exception: |
| 151 | + print(f"HTTP: GET dags/{DAG_ID}/dagRuns/{DAG_RUN_ID}") |
| 152 | + pprint(api_request("GET", f"dags/{DAG_ID}/dagRuns/{DAG_RUN_ID}")) |
| 153 | + print(f"HTTP: GET dags/{DAG_ID}/dagRuns/{DAG_RUN_ID}/taskInstances") |
| 154 | + pprint(api_request("GET", f"dags/{DAG_ID}/dagRuns/{DAG_RUN_ID}/taskInstances")) |
| 155 | + raise |
| 156 | + except Exception: |
| 157 | + run_command(["docker", "ps"]) |
| 158 | + run_command(["docker-compose", "logs"]) |
| 159 | + raise |
| 160 | + finally: |
| 161 | + run_command(["docker-compose", "down", "--volumes"]) |
0 commit comments