export AIRFLOW_HOME=$(pwd)/airflow
mkdir -p ${AIRFLOW_HOME}
mkdir -p ${AIRFLOW_HOME}/dags
cat << EOF > requirements.txt
--constraint https://raw.githubusercontent.com/apache/airflow/constraints-3.2.0/constraints-3.13.txt
apache-airflow==3.2.0
graphviz==0.21
EOF
echo "3.13" > .python-version
python -m venv .venv
source .venv/bin/activate
pip install -r requirements.txt
airflow db migrate
sed -i 's/load_examples = True/load_examples = False/g' ${AIRFLOW_HOME}/airflow.cfg
airflow standalone
Create a file my_example_dag.py in ${AIRFLOW_HOME}/dags with following content.
from airflow.sdk import DAG
from airflow.providers.standard.operators.python import PythonOperator
from datetime import datetime
def greeting():
print("Hello world!")
with DAG(
"my_example_dag",
start_date=datetime(2000, 1, 1),
schedule="*/1 * * * *",
catchup=False
) as dag:
task = PythonOperator(
task_id="greet_task",
python_callable=greeting
)
(By default, Airflow looks for new DAG every 5 minutes.)
airflow dags list
airflow dags unpaused my_example_dag
Login to web console (htttp://localhost:8080). Default user name and password are stored in ${AIRFLOW_HOME}/simple_auth_manager_passwords.json.generated.
rm -rf airflow