Skip to content

Commit

Permalink
Add running provider tests against Airflow 2.7 (apache#39862)
Browse files Browse the repository at this point in the history
  • Loading branch information
potiuk authored May 28, 2024
1 parent dad3c59 commit c531e38
Show file tree
Hide file tree
Showing 27 changed files with 170 additions and 53 deletions.
4 changes: 2 additions & 2 deletions dev/breeze/src/airflow_breeze/global_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -488,9 +488,9 @@ def get_airflow_extras():
BASE_PROVIDERS_COMPATIBILITY_CHECKS: list[dict[str, str | list[str]]] = [
{
"python-version": "3.8",
"airflow-version": "2.7.1",
"airflow-version": "2.7.3",
"remove-providers": "common.io fab",
"run-tests": "false",
"run-tests": "true",
},
{
"python-version": "3.8",
Expand Down
2 changes: 1 addition & 1 deletion tests/api_connexion/endpoints/test_extra_link_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import pytest

from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP
from airflow.models.baseoperatorlink import BaseOperatorLink
from airflow.models.dag import DAG
from airflow.models.dagbag import DagBag
from airflow.models.xcom import XCom
Expand All @@ -34,6 +33,7 @@
from airflow.utils.state import DagRunState
from airflow.utils.types import DagRunType
from tests.test_utils.api_connexion_utils import create_user, delete_user
from tests.test_utils.compat import BaseOperatorLink
from tests.test_utils.db import clear_db_runs, clear_db_xcom
from tests.test_utils.mock_plugins import mock_plugin_manager

Expand Down
2 changes: 1 addition & 1 deletion tests/api_connexion/endpoints/test_plugin_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,13 @@
from flask_appbuilder import BaseView

from airflow.hooks.base import BaseHook
from airflow.models.baseoperatorlink import BaseOperatorLink
from airflow.plugins_manager import AirflowPlugin
from airflow.security import permissions
from airflow.ti_deps.deps.base_ti_dep import BaseTIDep
from airflow.timetables.base import Timetable
from airflow.utils.module_loading import qualname
from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user
from tests.test_utils.compat import BaseOperatorLink
from tests.test_utils.config import conf_vars
from tests.test_utils.mock_plugins import mock_plugin_manager

Expand Down
2 changes: 1 addition & 1 deletion tests/api_connexion/schemas/test_plugin_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@
plugin_schema,
)
from airflow.hooks.base import BaseHook
from airflow.models.baseoperatorlink import BaseOperatorLink
from airflow.plugins_manager import AirflowPlugin
from tests.test_utils.compat import BaseOperatorLink


class PluginHook(BaseHook): ...
Expand Down
5 changes: 3 additions & 2 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ def initial_db_init():
from airflow.utils import db
from airflow.www.extensions.init_appbuilder import init_appbuilder
from airflow.www.extensions.init_auth_manager import get_auth_manager
from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS
from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_2_10_PLUS

if AIRFLOW_V_2_10_PLUS:
db.resetdb(use_migration_files=True)
Expand All @@ -345,7 +345,8 @@ def initial_db_init():
flask_app = Flask(__name__)
flask_app.config["SQLALCHEMY_DATABASE_URI"] = conf.get("database", "SQL_ALCHEMY_CONN")
init_appbuilder(flask_app)
get_auth_manager().init()
if AIRFLOW_V_2_8_PLUS:
get_auth_manager().init()


@pytest.fixture(autouse=True, scope="session")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,13 @@

from airflow.cli import cli_parser
from airflow.providers.amazon.aws.auth_manager.cli.avp_commands import init_avp, update_schema
from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS
from tests.test_utils.config import conf_vars

mock_boto3 = Mock()

pytestmark = pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Test requires Airflow 2.8+")


@pytest.mark.db_test
class TestAvpCommands:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,13 @@

from airflow.cli import cli_parser
from airflow.providers.amazon.aws.auth_manager.cli.idc_commands import init_idc
from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS
from tests.test_utils.config import conf_vars

mock_boto3 = Mock()

pytestmark = pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Test requires Airflow 2.8+")


@pytest.mark.db_test
class TestIdcCommands:
Expand Down
4 changes: 4 additions & 0 deletions tests/providers/amazon/aws/auth_manager/views/test_auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,14 @@

from airflow.exceptions import AirflowException
from airflow.www import app as application
from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS
from tests.test_utils.config import conf_vars

pytest.importorskip("onelogin")

pytestmark = pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Test requires Airflow 2.8+")


SAML_METADATA_URL = "/saml/metadata"
SAML_METADATA_PARSED = {
"idp": {
Expand Down
41 changes: 23 additions & 18 deletions tests/providers/atlassian/jira/hooks/test_jira.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.models import Connection
from airflow.providers.atlassian.jira.hooks.jira import JiraHook
from tests.test_utils.compat import connection_as_json


@pytest.fixture
Expand All @@ -46,27 +47,31 @@ def setup_test_cases(self, monkeypatch):

monkeypatch.setenv(
f"AIRFLOW_CONN_{self.conn_id}".upper(),
Connection(
conn_id="jira_default",
conn_type="jira",
host="https://localhost/jira/",
port=443,
login="user",
password="password",
extra='{"verify": false, "project": "AIRFLOW"}',
).as_json(),
connection_as_json(
Connection(
conn_id="jira_default",
conn_type="jira",
host="https://localhost/jira/",
port=443,
login="user",
password="password",
extra='{"verify": false, "project": "AIRFLOW"}',
)
),
)
monkeypatch.setenv(
f"AIRFLOW_CONN_{self.conn_id_with_str_verify}".upper(),
Connection(
conn_id=self.conn_id_with_str_verify,
conn_type="jira",
host="https://localhost/jira/",
port=443,
login="user",
password="password",
extra='{"verify": "False", "project": "AIRFLOW"}',
).as_json(),
connection_as_json(
Connection(
conn_id=self.conn_id_with_str_verify,
conn_type="jira",
host="https://localhost/jira/",
port=443,
login="user",
password="password",
extra='{"verify": "False", "project": "AIRFLOW"}',
)
),
)

def test_jira_client_connection(self, mocked_jira_client):
Expand Down
17 changes: 10 additions & 7 deletions tests/providers/atlassian/jira/operators/test_jira.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from airflow.models import Connection
from airflow.providers.atlassian.jira.operators.jira import JiraOperator
from airflow.utils import timezone
from tests.test_utils.compat import connection_as_json

DEFAULT_DATE = timezone.datetime(2017, 1, 1)
MINIMAL_TEST_TICKET = {
Expand All @@ -49,13 +50,15 @@ class TestJiraOperator:
def setup_test_cases(self, monkeypatch):
monkeypatch.setenv(
"AIRFLOW_CONN_JIRA_DEFAULT",
Connection(
conn_id="jira_default",
conn_type="jira",
host="https://localhost/jira/",
port=443,
extra='{"verify": false, "project": "AIRFLOW"}',
).as_json(),
connection_as_json(
Connection(
conn_id="jira_default",
conn_type="jira",
host="https://localhost/jira/",
port=443,
extra='{"verify": false, "project": "AIRFLOW"}',
)
),
)
with mock.patch("airflow.models.baseoperator.BaseOperator.xcom_push", return_value=None) as m:
self.mocked_xcom_push = m
Expand Down
21 changes: 12 additions & 9 deletions tests/providers/atlassian/jira/sensors/test_jira.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from airflow.models import Connection
from airflow.providers.atlassian.jira.sensors.jira import JiraTicketSensor
from airflow.utils import timezone
from tests.test_utils.compat import connection_as_json

DEFAULT_DATE = timezone.datetime(2017, 1, 1)
MINIMAL_TEST_TICKET = {
Expand All @@ -49,15 +50,17 @@ class TestJiraSensor:
def setup_test_cases(self, monkeypatch):
monkeypatch.setenv(
"AIRFLOW_CONN_JIRA_DEFAULT".upper(),
Connection(
conn_id="jira_default",
conn_type="jira",
host="https://localhost/jira/",
port=443,
login="user",
password="password",
extra='{"verify": false, "project": "AIRFLOW"}',
).as_json(),
connection_as_json(
Connection(
conn_id="jira_default",
conn_type="jira",
host="https://localhost/jira/",
port=443,
login="user",
password="password",
extra='{"verify": false, "project": "AIRFLOW"}',
)
),
)

def test_issue_label_set(self, mocked_jira_client):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@
from airflow import DAG
from airflow.models import Connection, DagRun, TaskInstance
from airflow.providers.cncf.kubernetes.operators.spark_kubernetes import SparkKubernetesOperator
from airflow.template.templater import LiteralValue
from airflow.utils import db, timezone
from airflow.utils.types import DagRunType
from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS


@patch("airflow.providers.cncf.kubernetes.operators.spark_kubernetes.KubernetesHook")
Expand Down Expand Up @@ -624,12 +624,17 @@ def test_resolve_application_file_template_non_dictionary(dag_maker, tmp_path, b
@pytest.mark.parametrize(
"use_literal_value", [pytest.param(True, id="literal-value"), pytest.param(False, id="whitespace-compat")]
)
@pytest.mark.skipif(
not AIRFLOW_V_2_8_PLUS, reason="Skipping tests that require LiteralValue for Airflow < 2.8.0"
)
def test_resolve_application_file_real_file(create_task_instance_of_operator, tmp_path, use_literal_value):
application_file = tmp_path / "test-application-file.yml"
application_file.write_text("foo: bar\nspam: egg")

application_file = application_file.resolve().as_posix()
if use_literal_value:
from airflow.template.templater import LiteralValue

application_file = LiteralValue(application_file)
else:
# Prior Airflow 2.8 workaround was adding whitespace at the end of the filepath
Expand All @@ -649,8 +654,13 @@ def test_resolve_application_file_real_file(create_task_instance_of_operator, tm


@pytest.mark.db_test
@pytest.mark.skipif(
not AIRFLOW_V_2_8_PLUS, reason="Skipping tests that require LiteralValue for Airflow < 2.8.0"
)
def test_resolve_application_file_real_file_not_exists(create_task_instance_of_operator, tmp_path):
application_file = (tmp_path / "test-application-file.yml").resolve().as_posix()
from airflow.template.templater import LiteralValue

ti = create_task_instance_of_operator(
SparkKubernetesOperator,
application_file=LiteralValue(application_file),
Expand Down
5 changes: 5 additions & 0 deletions tests/providers/common/sql/hooks/test_dbapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,11 @@
from airflow.hooks.base import BaseHook
from airflow.models import Connection
from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler, fetch_one_handler
from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS

pytestmark = [
pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"),
]


class DbApiHookInProvider(DbApiHook):
Expand Down
5 changes: 5 additions & 0 deletions tests/providers/common/sql/hooks/test_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,11 @@
from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler
from airflow.utils.session import provide_session
from tests.providers.common.sql.test_utils import mock_hook
from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS

pytestmark = [
pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"),
]

TASK_ID = "sql-operator"
HOST = "host"
Expand Down
5 changes: 5 additions & 0 deletions tests/providers/common/sql/hooks/test_sqlparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,11 @@
import pytest

from airflow.providers.common.sql.hooks.sql import DbApiHook
from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS

pytestmark = [
pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"),
]


@pytest.mark.parametrize(
Expand Down
6 changes: 5 additions & 1 deletion tests/providers/common/sql/operators/test_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,12 @@
from airflow.utils import timezone
from airflow.utils.session import create_session
from airflow.utils.state import State
from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS

pytestmark = pytest.mark.db_test
pytestmark = [
pytest.mark.db_test,
pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"),
]


class MockHook:
Expand Down
5 changes: 5 additions & 0 deletions tests/providers/common/sql/operators/test_sql_execute.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,11 @@
from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
from airflow.providers.openlineage.extractors.base import OperatorLineage
from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS

pytestmark = [
pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"),
]

DATE = "2017-04-20"
TASK_ID = "sql-operator"
Expand Down
5 changes: 5 additions & 0 deletions tests/providers/common/sql/sensors/test_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,11 @@
from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.providers.common.sql.sensors.sql import SqlSensor
from airflow.utils.timezone import datetime
from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS

pytestmark = [
pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.8.0+ only"),
]

DEFAULT_DATE = datetime(2015, 1, 1)
TEST_DAG_ID = "unit_test_sql_dag"
Expand Down
8 changes: 8 additions & 0 deletions tests/providers/common/sql/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,15 @@
from typing import TYPE_CHECKING
from unittest import mock

import pytest

from airflow.models import Connection
from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS

pytestmark = [
pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"),
]


if TYPE_CHECKING:
from airflow.hooks.base import BaseHook
Expand Down
2 changes: 2 additions & 0 deletions tests/providers/docker/hooks/test_docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
MOCK_CONNECTION_NOT_EXIST_MSG = "Testing connection not exists"
MOCK_CONNECTION_NOT_EXISTS_EX = AirflowNotFoundException(MOCK_CONNECTION_NOT_EXIST_MSG)
HOOK_LOGGER_NAME = "airflow.task.hooks.airflow.providers.docker.hooks.docker.DockerHook"
AIRFLOW_V_2_7_HOOK_LOGGER_NAME = "airflow.providers.docker.hooks.docker"


@pytest.fixture
Expand Down Expand Up @@ -107,6 +108,7 @@ def test_create_api_client(conn_id, hook_conn, docker_api_client_patcher, caplog
- If `docker_conn_id` not provided that hook doesn't try access to Airflow Connections.
"""
caplog.set_level(logging.DEBUG, logger=HOOK_LOGGER_NAME)
caplog.set_level(logging.DEBUG, logger=AIRFLOW_V_2_7_HOOK_LOGGER_NAME)
hook = DockerHook(
docker_conn_id=conn_id, base_url=TEST_TLS_BASE_URL, version=TEST_VERSION, tls=True, timeout=42
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,6 @@
delete_user,
)

pytestmark = pytest.mark.db_test


@pytest.fixture(scope="module")
def configured_app(minimal_app_for_auth_api):
Expand Down
Loading

0 comments on commit c531e38

Please sign in to comment.