airflowairflow-2.xairflow-webserver

The conn_id envinorment variable isn't defined


I have the dag which tries connects to http endpoint nevertheless somehow it doesn't work. I defined connection using envinorment variable but somehow it is not seen by httpsensor and i am getting below error however that variable was created in the system. Whats wrong here? Below dag and full error code.

The conn_id `AIRFLOW_VAR_FOO` isn't defined

DAG:

import os
import json
import pprint
import datetime
import requests

from airflow.models import DAG
from airflow.operators.python import PythonOperator
from airflow.providers.sftp.operators.sftp import SFTPOperator
from airflow.providers.sftp.sensors.sftp import SFTPSensor
from airflow.utils.dates import days_ago
from airflow.models import Variable
from airflow.sensors.http_sensor import HttpSensor
from airflow.hooks.base_hook import BaseHook

def init_vars():
    os.environ['AIRFLOW_VAR_FOO'] = "https://mywebxxx.net/"
    print(os.environ['AIRFLOW_VAR_FOO'])

with DAG(
         dag_id='request_test',
         schedule_interval=None,
         start_date=days_ago(2)) as dag:

    init_vars = PythonOperator(task_id="init_vars",
                                  python_callable=init_vars)

    task_is_api_active = HttpSensor(
        task_id='is_api_active',
        http_conn_id='AIRFLOW_VAR_FOO',
        endpoint='post'
    )

    get_data = PythonOperator(task_id="get_data",
                                  python_callable=get_data)

    init_vars >> task_is_api_active

Full Error:

 File "/home/airflow/.local/lib/python3.7/site-packages/airflow/models/connection.py", line 379, in get_connection_from_secrets
    raise AirflowNotFoundException(f"The conn_id `{conn_id}` isn't defined")
airflow.exceptions.AirflowNotFoundException: The conn_id `AIRFLOW_VAR_FOO` isn't defined
[2022-11-04 10:32:41,720] {taskinstance.py:1551} INFO - Marking task as FAILED. dag_id=request_test, task_id=is_api_active, execution_date=20221104T103235, start_date=20221104T103240, end_date=20221104T103241
[2022-11-04 10:32:42,628] {local_task_job.py:149} INFO - Task exited with return code 1

EDIT:

import os
import json
import pprint
import datetime
import requests

from airflow.models import DAG
from airflow.operators.python import PythonOperator
from airflow.providers.sftp.operators.sftp import SFTPOperator
from airflow.providers.sftp.sensors.sftp import SFTPSensor
from airflow.utils.dates import days_ago
from airflow.models import Variable
from airflow.sensors.http_sensor import HttpSensor
from airflow.hooks.base_hook import BaseHook

def init_vars():
    os.environ['AIRFLOW_VAR_FOO'] = "https://mywebxxx.net/"
    print(os.environ['AIRFLOW_VAR_FOO'])

with DAG(
         dag_id='request_test',
         schedule_interval=None,
         start_date=days_ago(2)) as dag:

    init_vars = PythonOperator(task_id="init_vars",
                                  python_callable=init_vars)

    call init_vars()
    
    task_is_api_active = HttpSensor(
        task_id='is_api_active',
        http_conn_id='AIRFLOW_VAR_FOO',
        endpoint='post'
    )

    get_data = PythonOperator(task_id="get_data",
                                  python_callable=get_data)

    task_is_api_active

Solution

  • You need to define the env variable as AIRFLOW_CONN_VAR_FOO and then your http_conn_id="var_foo".

    for more details see this link

    def init_vars():
    os.environ['AIRFLOW_VAR_FOO'] = "https://mywebxxx.net/"
    print(os.environ['AIRFLOW_VAR_FOO'])
    
    with DAG(
         dag_id='request_test',
         schedule_interval=None,
         start_date=days_ago(2)) as dag:
    
    init_vars()
    
    task_is_api_active = HttpSensor(
        task_id='is_api_active',
        http_conn_id='AIRFLOW_VAR_FOO',
        endpoint='post'
    )
    
    get_data = PythonOperator(task_id="get_data",
                                  python_callable=get_data)
    
    task_is_api_active