diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1716be16b1239..0f7bb2248f6db 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -884,32 +884,22 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" run: | pipx install twine twine check dist/*.whl - - name: "Remove airflow package and replace providers with 2.2-compliant versions" + - name: "Remove airflow package and replace providers with 2.3-compliant versions" run: | rm -vf dist/apache_airflow-*.whl \ - dist/apache_airflow_providers_cncf_kubernetes*.whl \ - dist/apache_airflow_providers_celery*.whl + dist/apache_airflow_providers_docker*.whl pip download --no-deps --dest dist \ - apache-airflow-providers-cncf-kubernetes==3.0.0 \ - apache-airflow-providers-celery==2.1.3 - - name: "Install and test provider packages and airflow on Airflow 2.2 files" + apache-airflow-providers-docker==3.1.0 + - name: "Get all provider extras as AIRFLOW_EXTRAS evn variable" run: > - breeze release-management verify-provider-packages --use-airflow-version 2.2.0 - --use-packages-from-dist --package-format wheel --airflow-constraints-reference constraints-2.2.0 - env: - # The extras below are all extras that should be installed with Airflow 2.2.0 - AIRFLOW_EXTRAS: "airbyte,alibaba,amazon,apache.atlas,apache.beam,apache.cassandra,apache.drill,\ - apache.druid,apache.hdfs,apache.hive,apache.kylin,apache.livy,apache.pig,apache.pinot,\ - apache.spark,apache.sqoop,apache.webhdfs,asana,async,\ - celery,cgroups,cloudant,cncf.kubernetes,dask,databricks,datadog,\ - deprecated_api,dingding,discord,docker,\ - elasticsearch,exasol,facebook,ftp,github_enterprise,google,google_auth,\ - grpc,hashicorp,http,imap,influxdb,jdbc,jenkins,jira,kerberos,ldap,\ - leveldb,microsoft.azure,microsoft.mssql,microsoft.psrp,microsoft.winrm,mongo,mysql,\ - neo4j,odbc,openfaas,opsgenie,oracle,pagerduty,pandas,papermill,password,plexus,\ - postgres,presto,qubole,rabbitmq,redis,salesforce,samba,segment,sendgrid,sentry,\ - sftp,singularity,slack,snowflake,sqlite,ssh,statsd,tableau,telegram,trino,vertica,\ - virtualenv,yandex,zendesk" + python -c 'from pathlib import Path; import json; + providers = json.loads(Path("generated/provider_dependencies.json").read_text()); + provider_keys = ",".join(providers.keys()); + print("AIRFLOW_EXTRAS={}".format(provider_keys))' >> $GITHUB_ENV + - name: "Install and test provider packages and airflow on Airflow 2.3 files" + run: > + breeze release-management verify-provider-packages --use-airflow-version 2.3.0 + --use-packages-from-dist --package-format wheel --airflow-constraints-reference constraints-2.3.0 - name: "Fix ownership" run: breeze ci fix-ownership if: always() diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 138c8110de679..2bc2017e22750 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -360,10 +360,9 @@ repos: pass_filenames: false entry: ./scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py additional_dependencies: ['rich>=12.4.4'] - # This check might be removed when min-airflow-version in providers is 2.2 - - id: check-airflow-2-2-compatibility - name: Check that providers are 2.2 compatible. - entry: ./scripts/ci/pre_commit/pre_commit_check_2_2_compatibility.py + - id: check-airflow-provider-compatibility + name: Check compatibility of Providers with Airflow + entry: ./scripts/ci/pre_commit/pre_commit_check_provider_airflow_compatibility.py language: python pass_filenames: true files: ^airflow/providers/.*\.py$ diff --git a/README.md b/README.md index 895f17d7f7227..a9e697fca2c22 100644 --- a/README.md +++ b/README.md @@ -407,8 +407,8 @@ that we increase the minimum Airflow version, when 12 months passed since the first release for the MINOR version of Airflow. For example this means that by default we upgrade the minimum version of Airflow supported by providers -to 2.3.0 in the first Provider's release after 11th of October 2022 (11th of October 2021 is the date when the -first `PATCHLEVEL` of 2.2 (2.2.0) has been released. +to 2.4.0 in the first Provider's release after 30th of April 2023. The 30th of April 2022 is the date when the +first `PATCHLEVEL` of 2.3 (2.3.0) has been released. Providers are often connected with some stakeholders that are vitally interested in maintaining backwards compatibilities in their integrations (for example cloud providers, or specific service providers). But, diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst index 7fd5c6a9192ba..747abb58e3a12 100644 --- a/STATIC_CODE_CHECKS.rst +++ b/STATIC_CODE_CHECKS.rst @@ -138,10 +138,10 @@ require Breeze Docker image to be build locally. +--------------------------------------------------------+------------------------------------------------------------------+---------+ | blacken-docs | Run black on python code blocks in documentation files | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ -| check-airflow-2-2-compatibility | Check that providers are 2.2 compatible. | | -+--------------------------------------------------------+------------------------------------------------------------------+---------+ | check-airflow-config-yaml-consistent | Checks for consistency between config.yml and default_config.cfg | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ +| check-airflow-provider-compatibility | Check compatibility of Providers with Airflow | | ++--------------------------------------------------------+------------------------------------------------------------------+---------+ | check-apache-license-rat | Check if licenses are OK for Apache | | +--------------------------------------------------------+------------------------------------------------------------------+---------+ | check-base-operator-partial-arguments | Check BaseOperator and partial() arguments | | diff --git a/airflow/operators/email.py b/airflow/operators/email.py index d0016f7f499fd..1d310f63fe6e6 100644 --- a/airflow/operators/email.py +++ b/airflow/operators/email.py @@ -19,7 +19,7 @@ from typing import Any, Sequence -from airflow.models import BaseOperator +from airflow.models.baseoperator import BaseOperator from airflow.utils.context import Context from airflow.utils.email import send_email diff --git a/airflow/providers/airbyte/provider.yaml b/airflow/providers/airbyte/provider.yaml index 94f6a2ccacd4d..05e04b8983acf 100644 --- a/airflow/providers/airbyte/provider.yaml +++ b/airflow/providers/airbyte/provider.yaml @@ -33,7 +33,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-http integrations: diff --git a/airflow/providers/alibaba/provider.yaml b/airflow/providers/alibaba/provider.yaml index b52d598f4cb2b..f89d4eaa20b00 100644 --- a/airflow/providers/alibaba/provider.yaml +++ b/airflow/providers/alibaba/provider.yaml @@ -31,7 +31,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - oss2>=2.14.0 integrations: diff --git a/airflow/providers/amazon/aws/links/base_aws.py b/airflow/providers/amazon/aws/links/base_aws.py index 973b746eeb76b..fba2f17e96f39 100644 --- a/airflow/providers/amazon/aws/links/base_aws.py +++ b/airflow/providers/amazon/aws/links/base_aws.py @@ -17,7 +17,6 @@ # under the License. from __future__ import annotations -from datetime import datetime from typing import TYPE_CHECKING, ClassVar from airflow.models import BaseOperatorLink, XCom @@ -63,30 +62,18 @@ def format_link(self, **kwargs) -> str: def get_link( self, - operator, - dttm: datetime | None = None, - ti_key: TaskInstanceKey | None = None, + operator: BaseOperator, + *, + ti_key: TaskInstanceKey, ) -> str: """ Link to Amazon Web Services Console. :param operator: airflow operator :param ti_key: TaskInstance ID to return link for - :param dttm: execution date. Uses for compatibility with Airflow 2.2 :return: link to external system """ - if ti_key is not None: - conf = XCom.get_value(key=self.key, ti_key=ti_key) - elif not dttm: - conf = {} - else: - conf = XCom.get_one( - key=self.key, - dag_id=operator.dag.dag_id, - task_id=operator.task_id, - execution_date=dttm, - ) - + conf = XCom.get_value(key=self.key, ti_key=ti_key) return self.format_link(**conf) if conf else "" @classmethod diff --git a/airflow/providers/amazon/aws/operators/appflow.py b/airflow/providers/amazon/aws/operators/appflow.py index c7e16387ed5c0..b077b04bb59dd 100644 --- a/airflow/providers/amazon/aws/operators/appflow.py +++ b/airflow/providers/amazon/aws/operators/appflow.py @@ -24,7 +24,7 @@ from airflow.models import BaseOperator from airflow.operators.python import ShortCircuitOperator from airflow.providers.amazon.aws.hooks.appflow import AppflowHook -from airflow.providers.amazon.aws.utils import datetime_to_epoch_ms, get_airflow_version +from airflow.providers.amazon.aws.utils import datetime_to_epoch_ms if TYPE_CHECKING: from mypy_boto3_appflow.type_defs import ( @@ -400,7 +400,7 @@ class AppflowRecordsShortCircuitOperator(ShortCircuitOperator): :param flow_name: The flow name :param appflow_run_task_id: Run task ID from where this operator should extract the execution ID - :param ignore_downstream_trigger_rules: Ignore downstream trigger rules (Ignored for Airflow < 2.3) + :param ignore_downstream_trigger_rules: Ignore downstream trigger rules :param aws_conn_id: aws connection to use :param region: aws region to use """ @@ -417,19 +417,13 @@ def __init__( region: str | None = None, **kwargs, ) -> None: - if get_airflow_version() >= (2, 3): - kwargs["ignore_downstream_trigger_rules"] = ignore_downstream_trigger_rules - else: - self.log.warning( - "Ignoring argument ignore_downstream_trigger_rules (%s) - Only supported for Airflow >= 2.3", - ignore_downstream_trigger_rules, - ) super().__init__( python_callable=self._has_new_records_func, op_kwargs={ "flow_name": flow_name, "appflow_run_task_id": appflow_run_task_id, }, + ignore_downstream_trigger_rules=ignore_downstream_trigger_rules, **kwargs, ) self.aws_conn_id = aws_conn_id diff --git a/airflow/providers/amazon/aws/operators/redshift_sql.py b/airflow/providers/amazon/aws/operators/redshift_sql.py index 93534719af2f4..af93c5f6cfb13 100644 --- a/airflow/providers/amazon/aws/operators/redshift_sql.py +++ b/airflow/providers/amazon/aws/operators/redshift_sql.py @@ -20,7 +20,6 @@ from typing import Sequence from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator -from airflow.www import utils as wwwutils class RedshiftSQLOperator(SQLExecuteQueryOperator): @@ -46,10 +45,7 @@ class RedshiftSQLOperator(SQLExecuteQueryOperator): "redshift_conn_id", ) template_ext: Sequence[str] = (".sql",) - # TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement. - template_fields_renderers = { - "sql": "postgresql" if "postgresql" in wwwutils.get_attr_renderer() else "sql" - } + template_fields_renderers = {"sql": "postgresql"} def __init__(self, *, redshift_conn_id: str = "redshift_default", **kwargs) -> None: super().__init__(conn_id=redshift_conn_id, **kwargs) diff --git a/airflow/providers/amazon/aws/utils/connection_wrapper.py b/airflow/providers/amazon/aws/utils/connection_wrapper.py index 5892bf1176e36..f6ae3fdf01837 100644 --- a/airflow/providers/amazon/aws/utils/connection_wrapper.py +++ b/airflow/providers/amazon/aws/utils/connection_wrapper.py @@ -29,15 +29,7 @@ from airflow.providers.amazon.aws.utils import trim_none_values from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.log.secrets_masker import mask_secret - -try: - from airflow.utils.types import NOTSET, ArgNotSet -except ImportError: # TODO: Remove when the provider has an Airflow 2.3+ requirement. - - class ArgNotSet: # type: ignore[no-redef] - """Sentinel type for annotations, useful when None is not viable.""" - - NOTSET = ArgNotSet() +from airflow.utils.types import NOTSET, ArgNotSet if TYPE_CHECKING: from airflow.models.connection import Connection # Avoid circular imports. diff --git a/airflow/providers/amazon/provider.yaml b/airflow/providers/amazon/provider.yaml index ad645239a3472..aaa65a271892f 100644 --- a/airflow/providers/amazon/provider.yaml +++ b/airflow/providers/amazon/provider.yaml @@ -46,7 +46,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.3.0 - boto3>=1.15.0 # watchtower 3 has been released end Jan and introduced breaking change across the board that might diff --git a/airflow/providers/apache/beam/provider.yaml b/airflow/providers/apache/beam/provider.yaml index f106e53476dbc..c7655bafb833b 100644 --- a/airflow/providers/apache/beam/provider.yaml +++ b/airflow/providers/apache/beam/provider.yaml @@ -35,7 +35,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-beam>=2.39.0 integrations: diff --git a/airflow/providers/apache/cassandra/provider.yaml b/airflow/providers/apache/cassandra/provider.yaml index c376066d47c0b..4891abeaf5313 100644 --- a/airflow/providers/apache/cassandra/provider.yaml +++ b/airflow/providers/apache/cassandra/provider.yaml @@ -33,7 +33,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - cassandra-driver>=3.13.0 integrations: diff --git a/airflow/providers/apache/drill/provider.yaml b/airflow/providers/apache/drill/provider.yaml index c94df8458c24e..bc09b071521ad 100644 --- a/airflow/providers/apache/drill/provider.yaml +++ b/airflow/providers/apache/drill/provider.yaml @@ -33,7 +33,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.3.0 - sqlalchemy-drill>=1.1.0 diff --git a/airflow/providers/apache/druid/provider.yaml b/airflow/providers/apache/druid/provider.yaml index d932b2c4411ec..8e7367a1e8718 100644 --- a/airflow/providers/apache/druid/provider.yaml +++ b/airflow/providers/apache/druid/provider.yaml @@ -40,7 +40,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.2.0 - pydruid>=0.4.1 diff --git a/airflow/providers/apache/hdfs/provider.yaml b/airflow/providers/apache/hdfs/provider.yaml index f4632614ef4bb..fccf2475b2827 100644 --- a/airflow/providers/apache/hdfs/provider.yaml +++ b/airflow/providers/apache/hdfs/provider.yaml @@ -37,7 +37,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - snakebite-py3 - hdfs[avro,dataframe,kerberos]>=2.0.4 diff --git a/airflow/providers/apache/hive/operators/hive.py b/airflow/providers/apache/hive/operators/hive.py index 30627b22ca9f6..23f6c32edd3d7 100644 --- a/airflow/providers/apache/hive/operators/hive.py +++ b/airflow/providers/apache/hive/operators/hive.py @@ -103,13 +103,11 @@ def __init__( self.mapred_queue_priority = mapred_queue_priority self.mapred_job_name = mapred_job_name - job_name_template = conf.get( + job_name_template = conf.get_mandatory_value( "hive", "mapred_job_name_template", fallback="Airflow HiveOperator task for {hostname}.{dag_id}.{task_id}.{execution_date}", ) - if job_name_template is None: - raise ValueError("Job name template should be set !") self.mapred_job_name_template: str = job_name_template # assigned lazily - just for consistency we can create the attribute with a diff --git a/airflow/providers/apache/hive/provider.yaml b/airflow/providers/apache/hive/provider.yaml index 9c32c78e183dc..5c359460e02de 100644 --- a/airflow/providers/apache/hive/provider.yaml +++ b/airflow/providers/apache/hive/provider.yaml @@ -42,7 +42,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.2.0 - hmsclient>=0.1.0 - pandas>=0.17.1 diff --git a/airflow/providers/apache/hive/transfers/hive_to_mysql.py b/airflow/providers/apache/hive/transfers/hive_to_mysql.py index c8eacd5111460..b1a3669d7171c 100644 --- a/airflow/providers/apache/hive/transfers/hive_to_mysql.py +++ b/airflow/providers/apache/hive/transfers/hive_to_mysql.py @@ -25,14 +25,10 @@ from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook from airflow.providers.mysql.hooks.mysql import MySqlHook from airflow.utils.operator_helpers import context_to_airflow_vars -from airflow.www import utils as wwwutils if TYPE_CHECKING: from airflow.utils.context import Context -# TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement. -MYSQL_RENDERER = "mysql" if "mysql" in wwwutils.get_attr_renderer() else "sql" - class HiveToMySqlOperator(BaseOperator): """ @@ -64,8 +60,8 @@ class HiveToMySqlOperator(BaseOperator): template_ext: Sequence[str] = (".sql",) template_fields_renderers = { "sql": "hql", - "mysql_preoperator": MYSQL_RENDERER, - "mysql_postoperator": MYSQL_RENDERER, + "mysql_preoperator": "mysql", + "mysql_postoperator": "mysql", } ui_color = "#a0e08c" diff --git a/airflow/providers/apache/hive/transfers/mssql_to_hive.py b/airflow/providers/apache/hive/transfers/mssql_to_hive.py index 1c2c437d18782..9cdd581911238 100644 --- a/airflow/providers/apache/hive/transfers/mssql_to_hive.py +++ b/airflow/providers/apache/hive/transfers/mssql_to_hive.py @@ -28,7 +28,6 @@ from airflow.models import BaseOperator from airflow.providers.apache.hive.hooks.hive import HiveCliHook from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook -from airflow.www import utils as wwwutils if TYPE_CHECKING: from airflow.utils.context import Context @@ -66,8 +65,7 @@ class MsSqlToHiveOperator(BaseOperator): template_fields: Sequence[str] = ("sql", "partition", "hive_table") template_ext: Sequence[str] = (".sql",) - # TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement. - template_fields_renderers = {"sql": "tsql" if "tsql" in wwwutils.get_attr_renderer() else "sql"} + template_fields_renderers = {"sql": "tsql"} ui_color = "#a0e08c" def __init__( diff --git a/airflow/providers/apache/kylin/provider.yaml b/airflow/providers/apache/kylin/provider.yaml index fbdbf9ef00958..62a64074fc45a 100644 --- a/airflow/providers/apache/kylin/provider.yaml +++ b/airflow/providers/apache/kylin/provider.yaml @@ -32,7 +32,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - kylinpy>=2.6 integrations: diff --git a/airflow/providers/apache/livy/provider.yaml b/airflow/providers/apache/livy/provider.yaml index 4c4eb3c1784b1..0290564dfb90c 100644 --- a/airflow/providers/apache/livy/provider.yaml +++ b/airflow/providers/apache/livy/provider.yaml @@ -35,7 +35,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-http integrations: diff --git a/airflow/providers/apache/pig/provider.yaml b/airflow/providers/apache/pig/provider.yaml index 8b1ad67daa222..998029c78b742 100644 --- a/airflow/providers/apache/pig/provider.yaml +++ b/airflow/providers/apache/pig/provider.yaml @@ -32,7 +32,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 integrations: - integration-name: Apache Pig diff --git a/airflow/providers/apache/pinot/provider.yaml b/airflow/providers/apache/pinot/provider.yaml index 6493795635fa9..d4f0c266b556c 100644 --- a/airflow/providers/apache/pinot/provider.yaml +++ b/airflow/providers/apache/pinot/provider.yaml @@ -35,7 +35,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.2.0 - pinotdb>0.4.7 diff --git a/airflow/providers/apache/spark/hooks/spark_submit.py b/airflow/providers/apache/spark/hooks/spark_submit.py index 114394dc5764f..472f1f8cda7df 100644 --- a/airflow/providers/apache/spark/hooks/spark_submit.py +++ b/airflow/providers/apache/spark/hooks/spark_submit.py @@ -627,9 +627,7 @@ def on_kill(self) -> None: # we still attempt to kill the yarn application renew_from_kt(self._principal, self._keytab, exit_on_fail=False) env = os.environ.copy() - ccacche = airflow_conf.get("kerberos", "ccache") - if ccacche is None: - raise ValueError("The kerberos/ccache config should be set here!") + ccacche = airflow_conf.get_mandatory_value("kerberos", "ccache") env["KRB5CCNAME"] = ccacche with subprocess.Popen( diff --git a/airflow/providers/apache/spark/provider.yaml b/airflow/providers/apache/spark/provider.yaml index 629ed74d3997c..e827ad9d5cbf7 100644 --- a/airflow/providers/apache/spark/provider.yaml +++ b/airflow/providers/apache/spark/provider.yaml @@ -37,7 +37,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - pyspark integrations: diff --git a/airflow/providers/apache/sqoop/provider.yaml b/airflow/providers/apache/sqoop/provider.yaml index db7e8e7e9af87..ada15c3cd2290 100644 --- a/airflow/providers/apache/sqoop/provider.yaml +++ b/airflow/providers/apache/sqoop/provider.yaml @@ -34,7 +34,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 integrations: - integration-name: Apache Sqoop diff --git a/airflow/providers/arangodb/provider.yaml b/airflow/providers/arangodb/provider.yaml index 09cc3f27f5d78..0932447bfec5e 100644 --- a/airflow/providers/arangodb/provider.yaml +++ b/airflow/providers/arangodb/provider.yaml @@ -22,7 +22,7 @@ description: | `ArangoDB `__ dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - python-arango>=7.3.2 versions: diff --git a/airflow/providers/asana/provider.yaml b/airflow/providers/asana/provider.yaml index 96f94715a68be..a08572e9780e0 100644 --- a/airflow/providers/asana/provider.yaml +++ b/airflow/providers/asana/provider.yaml @@ -31,7 +31,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - asana>=0.10 integrations: diff --git a/airflow/providers/atlassian/jira/provider.yaml b/airflow/providers/atlassian/jira/provider.yaml index 92285d56c5650..395bada494000 100644 --- a/airflow/providers/atlassian/jira/provider.yaml +++ b/airflow/providers/atlassian/jira/provider.yaml @@ -25,7 +25,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - JIRA>1.0.7 integrations: diff --git a/airflow/providers/celery/provider.yaml b/airflow/providers/celery/provider.yaml index 53fe5a96a92c3..18859d70080bd 100644 --- a/airflow/providers/celery/provider.yaml +++ b/airflow/providers/celery/provider.yaml @@ -33,7 +33,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 # The Celery is known to introduce problems when upgraded to a MAJOR version. Airflow Core # Uses Celery for CeleryExecutor, and we also know that Kubernetes Python client follows SemVer # (https://docs.celeryq.dev/en/stable/contributing.html?highlight=semver#versions). diff --git a/airflow/providers/cloudant/provider.yaml b/airflow/providers/cloudant/provider.yaml index 91225e4de6ed2..f0630d28fbe94 100644 --- a/airflow/providers/cloudant/provider.yaml +++ b/airflow/providers/cloudant/provider.yaml @@ -32,7 +32,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - cloudant>=2.0 integrations: diff --git a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py index 5ae71af64358a..447d185ddb40f 100644 --- a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py +++ b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py @@ -329,8 +329,7 @@ def _get_ti_pod_labels(context: Context | None = None, include_try_number: bool "kubernetes_pod_operator": "True", } - # If running on Airflow 2.3+: - map_index = getattr(ti, "map_index", -1) + map_index = ti.map_index if map_index >= 0: labels["map_index"] = map_index diff --git a/airflow/providers/common/sql/hooks/sql.py b/airflow/providers/common/sql/hooks/sql.py index 6a42ca11cc46b..25d0ecc533497 100644 --- a/airflow/providers/common/sql/hooks/sql.py +++ b/airflow/providers/common/sql/hooks/sql.py @@ -16,7 +16,6 @@ # under the License. from __future__ import annotations -import warnings from contextlib import closing from datetime import datetime from typing import Any, Callable, Iterable, Mapping, Optional @@ -28,8 +27,6 @@ from airflow import AirflowException from airflow.hooks.base import BaseHook -from airflow.providers_manager import ProvidersManager -from airflow.utils.module_loading import import_string from airflow.version import version @@ -41,27 +38,6 @@ def fetch_all_handler(cursor) -> list[tuple] | None: return None -def _backported_get_hook(connection, *, hook_params=None): - """Return hook based on conn_type - For supporting Airflow versions < 2.3, we backport "get_hook()" method. This should be removed - when "apache-airflow-providers-slack" will depend on Airflow >= 2.3. - """ - hook = ProvidersManager().hooks.get(connection.conn_type, None) - - if hook is None: - raise AirflowException(f'Unknown hook type "{connection.conn_type}"') - try: - hook_class = import_string(hook.hook_class_name) - except ImportError: - warnings.warn( - f"Could not import {hook.hook_class_name} when discovering {hook.hook_name} {hook.package_name}", - ) - raise - if hook_params is None: - hook_params = {} - return hook_class(**{hook.connection_id_attribute_name: connection.conn_id}, **hook_params) - - class ConnectorProtocol(Protocol): """A protocol where you can connect to a database.""" diff --git a/airflow/providers/common/sql/operators/sql.py b/airflow/providers/common/sql/operators/sql.py index 6330509a63449..66984a802f862 100644 --- a/airflow/providers/common/sql/operators/sql.py +++ b/airflow/providers/common/sql/operators/sql.py @@ -21,14 +21,11 @@ import re from typing import TYPE_CHECKING, Any, Callable, Iterable, Mapping, Sequence, SupportsAbs -from packaging.version import Version - from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook from airflow.models import BaseOperator, SkipMixin -from airflow.providers.common.sql.hooks.sql import DbApiHook, _backported_get_hook, fetch_all_handler -from airflow.version import version +from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler if TYPE_CHECKING: from airflow.utils.context import Context @@ -118,13 +115,7 @@ def _hook(self): """Get DB Hook based on connection type""" self.log.debug("Get connection for %s", self.conn_id) conn = BaseHook.get_connection(self.conn_id) - if Version(version) >= Version("2.3"): - # "hook_params" were introduced to into "get_hook()" only in Airflow 2.3. - hook = conn.get_hook(hook_params=self.hook_params) # ignore airflow compat check - else: - # For supporting Airflow versions < 2.3, we backport "get_hook()" method. This should be removed - # when "apache-airflow-providers-common-sql" will depend on Airflow >= 2.3. - hook = _backported_get_hook(conn, hook_params=self.hook_params) + hook = conn.get_hook(hook_params=self.hook_params) if not isinstance(hook, DbApiHook): from airflow.hooks.dbapi_hook import DbApiHook as _DbApiHook diff --git a/airflow/providers/common/sql/sensors/sql.py b/airflow/providers/common/sql/sensors/sql.py index 7f90b5b006ba8..d58802dc98b03 100644 --- a/airflow/providers/common/sql/sensors/sql.py +++ b/airflow/providers/common/sql/sensors/sql.py @@ -18,13 +18,10 @@ from typing import Any, Sequence -from packaging.version import Version - from airflow import AirflowException from airflow.hooks.base import BaseHook -from airflow.providers.common.sql.hooks.sql import DbApiHook, _backported_get_hook +from airflow.providers.common.sql.hooks.sql import DbApiHook from airflow.sensors.base import BaseSensorOperator -from airflow.version import version class SqlSensor(BaseSensorOperator): @@ -80,13 +77,7 @@ def __init__( def _get_hook(self): conn = BaseHook.get_connection(self.conn_id) - if Version(version) >= Version("2.3"): - # "hook_params" were introduced to into "get_hook()" only in Airflow 2.3. - hook = conn.get_hook(hook_params=self.hook_params) # ignore airflow compat check - else: - # For supporting Airflow versions < 2.3, we backport "get_hook()" method. This should be removed - # when "apache-airflow-providers-common-sql" will depend on Airflow >= 2.3. - hook = _backported_get_hook(conn, hook_params=self.hook_params) + hook = conn.get_hook(hook_params=self.hook_params) if not isinstance(hook, DbApiHook): raise AirflowException( f"The connection type is not supported by {self.__class__.__name__}. " diff --git a/airflow/providers/databricks/hooks/databricks_sql.py b/airflow/providers/databricks/hooks/databricks_sql.py index c8d5ccdb201d3..5e456a9ca5f3f 100644 --- a/airflow/providers/databricks/hooks/databricks_sql.py +++ b/airflow/providers/databricks/hooks/databricks_sql.py @@ -51,6 +51,7 @@ class DatabricksSqlHook(BaseDatabricksHook, DbApiHook): """ hook_name = "Databricks SQL" + _test_connection_sql = "select 42" def __init__( self, @@ -197,14 +198,6 @@ def run( else: return results - def test_connection(self): - """Test the Databricks SQL connection by running a simple query.""" - try: - self.run(sql="select 42") - except Exception as e: - return False, str(e) - return True, "Connection successfully checked" - def bulk_dump(self, table, tmp_file): raise NotImplementedError() diff --git a/airflow/providers/databricks/operators/databricks.py b/airflow/providers/databricks/operators/databricks.py index 7a2c6b6039155..dbb44c70829a5 100644 --- a/airflow/providers/databricks/operators/databricks.py +++ b/airflow/providers/databricks/operators/databricks.py @@ -144,23 +144,11 @@ class DatabricksJobRunLink(BaseOperatorLink): def get_link( self, - operator, - dttm=None, + operator: BaseOperator, *, - ti_key: TaskInstanceKey | None = None, + ti_key: TaskInstanceKey, ) -> str: - if ti_key is not None: - run_page_url = XCom.get_value(key=XCOM_RUN_PAGE_URL_KEY, ti_key=ti_key) - else: - assert dttm - run_page_url = XCom.get_one( - key=XCOM_RUN_PAGE_URL_KEY, - dag_id=operator.dag.dag_id, - task_id=operator.task_id, - execution_date=dttm, - ) - - return run_page_url + return XCom.get_value(key=XCOM_RUN_PAGE_URL_KEY, ti_key=ti_key) class DatabricksSubmitRunOperator(BaseOperator): diff --git a/airflow/providers/databricks/provider.yaml b/airflow/providers/databricks/provider.yaml index ad25de857e960..429bfc33ee72c 100644 --- a/airflow/providers/databricks/provider.yaml +++ b/airflow/providers/databricks/provider.yaml @@ -40,7 +40,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.3.0 - requests>=2.27,<3 - databricks-sql-connector>=2.0.0, <3.0.0 diff --git a/airflow/providers/databricks/triggers/databricks.py b/airflow/providers/databricks/triggers/databricks.py index 6f67f0bc6e9aa..cd2421c376989 100644 --- a/airflow/providers/databricks/triggers/databricks.py +++ b/airflow/providers/databricks/triggers/databricks.py @@ -18,20 +18,10 @@ from __future__ import annotations import asyncio -import logging from typing import Any from airflow.providers.databricks.hooks.databricks import DatabricksHook - -try: - from airflow.triggers.base import BaseTrigger, TriggerEvent -except ImportError: - logging.getLogger(__name__).warning( - "Deferrable Operators only work starting Airflow 2.2", - exc_info=True, - ) - BaseTrigger = object # type: ignore - TriggerEvent = None # type: ignore +from airflow.triggers.base import BaseTrigger, TriggerEvent class DatabricksExecutionTrigger(BaseTrigger): diff --git a/airflow/providers/datadog/provider.yaml b/airflow/providers/datadog/provider.yaml index 5daf04bd69bf5..2a82a08456e58 100644 --- a/airflow/providers/datadog/provider.yaml +++ b/airflow/providers/datadog/provider.yaml @@ -32,7 +32,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - datadog>=0.14.0 integrations: diff --git a/airflow/providers/dbt/cloud/operators/dbt.py b/airflow/providers/dbt/cloud/operators/dbt.py index 37a723421c000..6f8080097bf04 100644 --- a/airflow/providers/dbt/cloud/operators/dbt.py +++ b/airflow/providers/dbt/cloud/operators/dbt.py @@ -34,16 +34,8 @@ class DbtCloudRunJobOperatorLink(BaseOperatorLink): name = "Monitor Job Run" - def get_link(self, operator, dttm=None, *, ti_key=None): - if ti_key is not None: - job_run_url = XCom.get_value(key="job_run_url", ti_key=ti_key) - else: - assert dttm - job_run_url = XCom.get_one( - dag_id=operator.dag.dag_id, task_id=operator.task_id, execution_date=dttm, key="job_run_url" - ) - - return job_run_url + def get_link(self, operator: BaseOperator, *, ti_key=None): + return XCom.get_value(key="job_run_url", ti_key=ti_key) class DbtCloudRunJobOperator(BaseOperator): diff --git a/airflow/providers/dbt/cloud/provider.yaml b/airflow/providers/dbt/cloud/provider.yaml index 6fd5ce57fbfbe..2dc162a461263 100644 --- a/airflow/providers/dbt/cloud/provider.yaml +++ b/airflow/providers/dbt/cloud/provider.yaml @@ -30,7 +30,7 @@ versions: - 1.0.1 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-http integrations: diff --git a/airflow/providers/dingding/provider.yaml b/airflow/providers/dingding/provider.yaml index 8032435b3648c..5a230fa33a442 100644 --- a/airflow/providers/dingding/provider.yaml +++ b/airflow/providers/dingding/provider.yaml @@ -33,7 +33,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-http integrations: diff --git a/airflow/providers/discord/provider.yaml b/airflow/providers/discord/provider.yaml index f5f0d650ffa32..0941025114c33 100644 --- a/airflow/providers/discord/provider.yaml +++ b/airflow/providers/discord/provider.yaml @@ -33,7 +33,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-http integrations: diff --git a/airflow/providers/elasticsearch/provider.yaml b/airflow/providers/elasticsearch/provider.yaml index d4fbb67f88505..80260766f90a6 100644 --- a/airflow/providers/elasticsearch/provider.yaml +++ b/airflow/providers/elasticsearch/provider.yaml @@ -42,7 +42,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.2.0 - elasticsearch>7 - elasticsearch-dbapi diff --git a/airflow/providers/exasol/provider.yaml b/airflow/providers/exasol/provider.yaml index 02c37aa3b8e77..2b0076309a28e 100644 --- a/airflow/providers/exasol/provider.yaml +++ b/airflow/providers/exasol/provider.yaml @@ -37,7 +37,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.3.0 - pyexasol>=0.5.1 - pandas>=0.17.1 diff --git a/airflow/providers/facebook/provider.yaml b/airflow/providers/facebook/provider.yaml index 8d18a2869f69f..39618f54fcba6 100644 --- a/airflow/providers/facebook/provider.yaml +++ b/airflow/providers/facebook/provider.yaml @@ -36,7 +36,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - facebook-business>=6.0.2 integrations: diff --git a/airflow/providers/github/provider.yaml b/airflow/providers/github/provider.yaml index 10f5f2188c433..119f8b4e95bb2 100644 --- a/airflow/providers/github/provider.yaml +++ b/airflow/providers/github/provider.yaml @@ -23,7 +23,7 @@ description: | `GitHub `__ dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - pygithub versions: diff --git a/airflow/providers/google/cloud/links/base.py b/airflow/providers/google/cloud/links/base.py index 9a7fdef882faa..6539043a86bcd 100644 --- a/airflow/providers/google/cloud/links/base.py +++ b/airflow/providers/google/cloud/links/base.py @@ -17,12 +17,12 @@ # under the License. from __future__ import annotations -from datetime import datetime from typing import TYPE_CHECKING, ClassVar from airflow.models import BaseOperatorLink, XCom if TYPE_CHECKING: + from airflow.models import BaseOperator from airflow.models.taskinstance import TaskInstanceKey @@ -38,20 +38,11 @@ class BaseGoogleLink(BaseOperatorLink): def get_link( self, - operator, - dttm: datetime | None = None, - ti_key: TaskInstanceKey | None = None, + operator: BaseOperator, + *, + ti_key: TaskInstanceKey, ) -> str: - if ti_key is not None: - conf = XCom.get_value(key=self.key, ti_key=ti_key) - else: - assert dttm - conf = XCom.get_one( - key=self.key, - dag_id=operator.dag.dag_id, - task_id=operator.task_id, - execution_date=dttm, - ) + conf = XCom.get_value(key=self.key, ti_key=ti_key) if not conf: return "" if self.format_str.startswith(BASE_LINK): diff --git a/airflow/providers/google/cloud/links/dataproc.py b/airflow/providers/google/cloud/links/dataproc.py index ec543bbde86a0..573621aa1491f 100644 --- a/airflow/providers/google/cloud/links/dataproc.py +++ b/airflow/providers/google/cloud/links/dataproc.py @@ -18,13 +18,13 @@ """This module contains Google Dataproc links.""" from __future__ import annotations -from datetime import datetime from typing import TYPE_CHECKING from airflow.models import BaseOperatorLink, XCom from airflow.providers.google.cloud.links.base import BASE_LINK if TYPE_CHECKING: + from airflow.models import BaseOperator from airflow.models.taskinstance import TaskInstanceKey from airflow.utils.context import Context @@ -67,17 +67,11 @@ def persist( def get_link( self, - operator, - dttm: datetime | None = None, - ti_key: TaskInstanceKey | None = None, + operator: BaseOperator, + *, + ti_key: TaskInstanceKey, ) -> str: - if ti_key is not None: - conf = XCom.get_value(key=self.key, ti_key=ti_key) - else: - assert dttm - conf = XCom.get_one( - key=self.key, dag_id=operator.dag.dag_id, task_id=operator.task_id, execution_date=dttm - ) + conf = XCom.get_value(key=self.key, ti_key=ti_key) return ( conf["url"].format( region=conf["region"], project_id=conf["project_id"], resource=conf["resource"] @@ -110,20 +104,11 @@ def persist( def get_link( self, - operator, - dttm: datetime | None = None, - ti_key: TaskInstanceKey | None = None, + operator: BaseOperator, + *, + ti_key: TaskInstanceKey, ) -> str: - if ti_key is not None: - list_conf = XCom.get_value(key=self.key, ti_key=ti_key) - else: - assert dttm - list_conf = XCom.get_one( - key=self.key, - dag_id=operator.dag.dag_id, - task_id=operator.task_id, - execution_date=dttm, - ) + list_conf = XCom.get_value(key=self.key, ti_key=ti_key) return ( list_conf["url"].format( project_id=list_conf["project_id"], diff --git a/airflow/providers/google/cloud/operators/bigquery.py b/airflow/providers/google/cloud/operators/bigquery.py index 9d9614e65117d..8db80d993ac28 100644 --- a/airflow/providers/google/cloud/operators/bigquery.py +++ b/airflow/providers/google/cloud/operators/bigquery.py @@ -21,7 +21,6 @@ import enum import json import warnings -from datetime import datetime from typing import TYPE_CHECKING, Any, Iterable, Optional, Sequence, SupportsAbs import attr @@ -76,20 +75,11 @@ class BigQueryConsoleLink(BaseOperatorLink): def get_link( self, - operator, - dttm: datetime | None = None, - ti_key: TaskInstanceKey | None = None, + operator: BaseOperator, + *, + ti_key: TaskInstanceKey, ): - if ti_key is not None: - job_id = XCom.get_value(key="job_id", ti_key=ti_key) - else: - assert dttm is not None - job_id = XCom.get_one( - dag_id=operator.dag.dag_id, - task_id=operator.task_id, - execution_date=dttm, - key="job_id", - ) + job_id = XCom.get_value(key="job_id", ti_key=ti_key) return BIGQUERY_JOB_DETAILS_LINK_FMT.format(job_id=job_id) if job_id else "" @@ -105,17 +95,11 @@ def name(self) -> str: def get_link( self, - operator, - dttm: datetime | None = None, - ti_key: TaskInstanceKey | None = None, + operator: BaseOperator, + *, + ti_key: TaskInstanceKey, ): - if ti_key is not None: - job_ids = XCom.get_value(key="job_id", ti_key=ti_key) - else: - assert dttm is not None - job_ids = XCom.get_one( - key="job_id", dag_id=operator.dag.dag_id, task_id=operator.task_id, execution_date=dttm - ) + job_ids = XCom.get_value(key="job_id", ti_key=ti_key) if not job_ids: return None if len(job_ids) < self.index: diff --git a/airflow/providers/google/cloud/operators/dataproc_metastore.py b/airflow/providers/google/cloud/operators/dataproc_metastore.py index 452a3c23ff81a..6e6e9fcfe3f91 100644 --- a/airflow/providers/google/cloud/operators/dataproc_metastore.py +++ b/airflow/providers/google/cloud/operators/dataproc_metastore.py @@ -18,7 +18,6 @@ """This module contains Google Dataproc Metastore operators.""" from __future__ import annotations -from datetime import datetime from time import sleep from typing import TYPE_CHECKING, Sequence @@ -82,20 +81,11 @@ def persist( def get_link( self, - operator, - dttm: datetime | None = None, - ti_key: TaskInstanceKey | None = None, + operator: BaseOperator, + *, + ti_key: TaskInstanceKey, ) -> str: - if ti_key is not None: - conf = XCom.get_value(key=self.key, ti_key=ti_key) - else: - assert dttm - conf = XCom.get_one( - dag_id=operator.dag.dag_id, - task_id=operator.task_id, - execution_date=dttm, - key=self.key, - ) + conf = XCom.get_value(key=self.key, ti_key=ti_key) return ( conf["url"].format( region=conf["region"], @@ -136,20 +126,11 @@ def persist( def get_link( self, - operator, - dttm: datetime | None = None, - ti_key: TaskInstanceKey | None = None, + operator: BaseOperator, + *, + ti_key: TaskInstanceKey, ) -> str: - if ti_key is not None: - conf = XCom.get_value(key=self.key, ti_key=ti_key) - else: - assert dttm - conf = XCom.get_one( - dag_id=operator.dag.dag_id, - task_id=operator.task_id, - execution_date=dttm, - key=DataprocMetastoreDetailedLink.key, - ) + conf = XCom.get_value(key=self.key, ti_key=ti_key) return ( conf["url"].format( region=conf["region"], diff --git a/airflow/providers/google/cloud/triggers/cloud_composer.py b/airflow/providers/google/cloud/triggers/cloud_composer.py index 4af241fcfbef7..9a745e7819a2c 100644 --- a/airflow/providers/google/cloud/triggers/cloud_composer.py +++ b/airflow/providers/google/cloud/triggers/cloud_composer.py @@ -18,21 +18,11 @@ from __future__ import annotations import asyncio -import logging from typing import Any, Sequence from airflow import AirflowException from airflow.providers.google.cloud.hooks.cloud_composer import CloudComposerAsyncHook - -try: - from airflow.triggers.base import BaseTrigger, TriggerEvent -except ImportError: - logging.getLogger(__name__).warning( - "Deferrable Operators only work starting Airflow 2.2", - exc_info=True, - ) - BaseTrigger = object # type: ignore - TriggerEvent = None # type: ignore +from airflow.triggers.base import BaseTrigger, TriggerEvent class CloudComposerExecutionTrigger(BaseTrigger): diff --git a/airflow/providers/google/leveldb/hooks/leveldb.py b/airflow/providers/google/leveldb/hooks/leveldb.py index e251ce35817ef..a60ba33ff06b7 100644 --- a/airflow/providers/google/leveldb/hooks/leveldb.py +++ b/airflow/providers/google/leveldb/hooks/leveldb.py @@ -17,23 +17,13 @@ """Hook for Level DB""" from __future__ import annotations +from airflow.exceptions import AirflowException, AirflowOptionalProviderFeatureException +from airflow.hooks.base import BaseHook + try: import plyvel from plyvel import DB - - from airflow.exceptions import AirflowException - from airflow.hooks.base import BaseHook - except ImportError as e: - # Plyvel is an optional feature and if imports are missing, it should be silently ignored - # As of Airflow 2.3 and above the operator can throw OptionalProviderFeatureException - try: - from airflow.exceptions import AirflowOptionalProviderFeatureException - except ImportError: - # However, in order to keep backwards-compatibility with Airflow 2.1 and 2.2, if the - # 2.3 exception cannot be imported, the original ImportError should be raised. - # This try/except can be removed when the provider depends on Airflow >= 2.3.0 - raise e from None raise AirflowOptionalProviderFeatureException(e) DB_NOT_INITIALIZED_BEFORE = "The `get_conn` method should be called before!" diff --git a/airflow/providers/google/provider.yaml b/airflow/providers/google/provider.yaml index 970827ea2abf2..fb0a3d434cfeb 100644 --- a/airflow/providers/google/provider.yaml +++ b/airflow/providers/google/provider.yaml @@ -54,7 +54,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.2.0 # Google has very clear rules on what dependencies should be used. All the limits below # follow strict guidelines of Google Libraries as quoted here: diff --git a/airflow/providers/grpc/provider.yaml b/airflow/providers/grpc/provider.yaml index 133aa0c334598..80702e33a6364 100644 --- a/airflow/providers/grpc/provider.yaml +++ b/airflow/providers/grpc/provider.yaml @@ -32,7 +32,7 @@ versions: - 1.0.1 - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 # Google has very clear rules on what dependencies should be used. All the limits below # follow strict guidelines of Google Libraries as quoted here: # While this issue is open, dependents of google-api-core, google-cloud-core. and google-auth diff --git a/airflow/providers/hashicorp/provider.yaml b/airflow/providers/hashicorp/provider.yaml index 9a78fa1d74e46..72af7122661a9 100644 --- a/airflow/providers/hashicorp/provider.yaml +++ b/airflow/providers/hashicorp/provider.yaml @@ -37,7 +37,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - hvac>=0.10 integrations: diff --git a/airflow/providers/influxdb/provider.yaml b/airflow/providers/influxdb/provider.yaml index 628beee808a8c..27699e85dd96c 100644 --- a/airflow/providers/influxdb/provider.yaml +++ b/airflow/providers/influxdb/provider.yaml @@ -24,7 +24,7 @@ description: | `InfluxDB `__ dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - influxdb-client>=1.19.0 - requests>=2.26.0 diff --git a/airflow/providers/jdbc/provider.yaml b/airflow/providers/jdbc/provider.yaml index 30c564e100d89..84850d248b74c 100644 --- a/airflow/providers/jdbc/provider.yaml +++ b/airflow/providers/jdbc/provider.yaml @@ -36,7 +36,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.3.0 - jaydebeapi>=1.1.1 diff --git a/airflow/providers/jenkins/provider.yaml b/airflow/providers/jenkins/provider.yaml index 8f8068f576bf7..a605c4d4635dd 100644 --- a/airflow/providers/jenkins/provider.yaml +++ b/airflow/providers/jenkins/provider.yaml @@ -37,7 +37,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - python-jenkins>=1.0.0 integrations: diff --git a/airflow/providers/microsoft/azure/operators/data_factory.py b/airflow/providers/microsoft/azure/operators/data_factory.py index da1c47d2d819e..fa29d38a06711 100644 --- a/airflow/providers/microsoft/azure/operators/data_factory.py +++ b/airflow/providers/microsoft/azure/operators/data_factory.py @@ -26,34 +26,28 @@ AzureDataFactoryPipelineRunStatus, get_field, ) +from airflow.utils.log.logging_mixin import LoggingMixin if TYPE_CHECKING: from airflow.models.taskinstance import TaskInstanceKey from airflow.utils.context import Context -class AzureDataFactoryPipelineRunLink(BaseOperatorLink): +class AzureDataFactoryPipelineRunLink(LoggingMixin, BaseOperatorLink): """Constructs a link to monitor a pipeline run in Azure Data Factory.""" name = "Monitor Pipeline Run" def get_link( self, - operator, - dttm=None, + operator: BaseOperator, *, - ti_key: TaskInstanceKey | None = None, + ti_key: TaskInstanceKey, ) -> str: - if ti_key is not None: - run_id = XCom.get_value(key="run_id", ti_key=ti_key) - else: - assert dttm - run_id = XCom.get_one( - key="run_id", - dag_id=operator.dag.dag_id, - task_id=operator.task_id, - execution_date=dttm, - ) + if not isinstance(operator, AzureDataFactoryRunPipelineOperator): + self.log.info("The %s is not %s class.", operator.__class__, AzureDataFactoryRunPipelineOperator) + return "" + run_id = XCom.get_value(key="run_id", ti_key=ti_key) conn_id = operator.azure_data_factory_conn_id conn = BaseHook.get_connection(conn_id) extras = conn.extra_dejson diff --git a/airflow/providers/microsoft/azure/provider.yaml b/airflow/providers/microsoft/azure/provider.yaml index e5f2cecd81811..bfa336e006f11 100644 --- a/airflow/providers/microsoft/azure/provider.yaml +++ b/airflow/providers/microsoft/azure/provider.yaml @@ -45,7 +45,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - azure-batch>=8.0.0 - azure-cosmos>=4.0.0 - azure-datalake-store>=0.0.45 diff --git a/airflow/providers/microsoft/mssql/operators/mssql.py b/airflow/providers/microsoft/mssql/operators/mssql.py index e8685e0924776..1b7c47886c05a 100644 --- a/airflow/providers/microsoft/mssql/operators/mssql.py +++ b/airflow/providers/microsoft/mssql/operators/mssql.py @@ -21,7 +21,6 @@ from typing import Sequence from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator -from airflow.www import utils as wwwutils class MsSqlOperator(SQLExecuteQueryOperator): @@ -47,8 +46,7 @@ class MsSqlOperator(SQLExecuteQueryOperator): template_fields: Sequence[str] = ("sql",) template_ext: Sequence[str] = (".sql",) - # TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement. - template_fields_renderers = {"sql": "tsql" if "tsql" in wwwutils.get_attr_renderer() else "sql"} + template_fields_renderers = {"sql": "tsql"} ui_color = "#ededed" def __init__( diff --git a/airflow/providers/microsoft/mssql/provider.yaml b/airflow/providers/microsoft/mssql/provider.yaml index 9d3c9a4feb379..86562830ea65e 100644 --- a/airflow/providers/microsoft/mssql/provider.yaml +++ b/airflow/providers/microsoft/mssql/provider.yaml @@ -37,7 +37,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.3.0 - pymssql>=2.1.5; platform_machine != "aarch64" diff --git a/airflow/providers/microsoft/psrp/operators/psrp.py b/airflow/providers/microsoft/psrp/operators/psrp.py index 617f43353084d..733b8cb29fd3a 100644 --- a/airflow/providers/microsoft/psrp/operators/psrp.py +++ b/airflow/providers/microsoft/psrp/operators/psrp.py @@ -28,12 +28,7 @@ from airflow.models import BaseOperator from airflow.providers.microsoft.psrp.hooks.psrp import PsrpHook from airflow.settings import json - - -# TODO: Replace with airflow.utils.helpers.exactly_one in Airflow 2.3. -def exactly_one(*args): - return len(set(filter(None, args))) == 1 - +from airflow.utils.helpers import exactly_one if TYPE_CHECKING: from airflow.utils.context import Context diff --git a/airflow/providers/microsoft/winrm/provider.yaml b/airflow/providers/microsoft/winrm/provider.yaml index 96c37d0e411fc..fa2e895fbbc8e 100644 --- a/airflow/providers/microsoft/winrm/provider.yaml +++ b/airflow/providers/microsoft/winrm/provider.yaml @@ -35,7 +35,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - pywinrm>=0.4 integrations: diff --git a/airflow/providers/mongo/provider.yaml b/airflow/providers/mongo/provider.yaml index a7fdd51e64cec..530bc09763218 100644 --- a/airflow/providers/mongo/provider.yaml +++ b/airflow/providers/mongo/provider.yaml @@ -34,7 +34,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - dnspython>=1.13.0 # pymongo 4.0.0 removes connection option `ssl_cert_reqs` which is used in providers-mongo/2.2.0 # TODO: Upgrade to pymongo 4.0.0+ diff --git a/airflow/providers/mysql/operators/mysql.py b/airflow/providers/mysql/operators/mysql.py index 886a07b6b7e53..1609d09411a0a 100644 --- a/airflow/providers/mysql/operators/mysql.py +++ b/airflow/providers/mysql/operators/mysql.py @@ -21,7 +21,6 @@ from typing import Sequence from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator -from airflow.www import utils as wwwutils class MySqlOperator(SQLExecuteQueryOperator): @@ -46,9 +45,8 @@ class MySqlOperator(SQLExecuteQueryOperator): """ template_fields: Sequence[str] = ("sql", "parameters") - # TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement. template_fields_renderers = { - "sql": "mysql" if "mysql" in wwwutils.get_attr_renderer() else "sql", + "sql": "mysql", "parameters": "json", } template_ext: Sequence[str] = (".sql", ".json") diff --git a/airflow/providers/mysql/provider.yaml b/airflow/providers/mysql/provider.yaml index 965234657b901..7021768e0e482 100644 --- a/airflow/providers/mysql/provider.yaml +++ b/airflow/providers/mysql/provider.yaml @@ -39,7 +39,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.3.0 - mysql-connector-python>=8.0.11; platform_machine != "aarch64" - mysqlclient>=1.3.6; platform_machine != "aarch64" diff --git a/airflow/providers/mysql/transfers/presto_to_mysql.py b/airflow/providers/mysql/transfers/presto_to_mysql.py index 30e12684fd83e..b38e6b8654d68 100644 --- a/airflow/providers/mysql/transfers/presto_to_mysql.py +++ b/airflow/providers/mysql/transfers/presto_to_mysql.py @@ -22,7 +22,6 @@ from airflow.models import BaseOperator from airflow.providers.mysql.hooks.mysql import MySqlHook from airflow.providers.presto.hooks.presto import PrestoHook -from airflow.www import utils as wwwutils if TYPE_CHECKING: from airflow.utils.context import Context @@ -47,10 +46,9 @@ class PrestoToMySqlOperator(BaseOperator): template_fields: Sequence[str] = ("sql", "mysql_table", "mysql_preoperator") template_ext: Sequence[str] = (".sql",) - # TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement. template_fields_renderers = { "sql": "sql", - "mysql_preoperator": "mysql" if "mysql" in wwwutils.get_attr_renderer() else "sql", + "mysql_preoperator": "mysql", } ui_color = "#a0e08c" diff --git a/airflow/providers/mysql/transfers/trino_to_mysql.py b/airflow/providers/mysql/transfers/trino_to_mysql.py index 5a1763d8ed31e..8ff5ed0446b5e 100644 --- a/airflow/providers/mysql/transfers/trino_to_mysql.py +++ b/airflow/providers/mysql/transfers/trino_to_mysql.py @@ -22,7 +22,6 @@ from airflow.models import BaseOperator from airflow.providers.mysql.hooks.mysql import MySqlHook from airflow.providers.trino.hooks.trino import TrinoHook -from airflow.www import utils as wwwutils if TYPE_CHECKING: from airflow.utils.context import Context @@ -47,10 +46,9 @@ class TrinoToMySqlOperator(BaseOperator): template_fields: Sequence[str] = ("sql", "mysql_table", "mysql_preoperator") template_ext: Sequence[str] = (".sql",) - # TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement. template_fields_renderers = { "sql": "sql", - "mysql_preoperator": "mysql" if "mysql" in wwwutils.get_attr_renderer() else "sql", + "mysql_preoperator": "mysql", } ui_color = "#a0e08c" diff --git a/airflow/providers/mysql/transfers/vertica_to_mysql.py b/airflow/providers/mysql/transfers/vertica_to_mysql.py index c9a4d1709073e..a7df1f029dd24 100644 --- a/airflow/providers/mysql/transfers/vertica_to_mysql.py +++ b/airflow/providers/mysql/transfers/vertica_to_mysql.py @@ -27,14 +27,10 @@ from airflow.models import BaseOperator from airflow.providers.mysql.hooks.mysql import MySqlHook from airflow.providers.vertica.hooks.vertica import VerticaHook -from airflow.www import utils as wwwutils if TYPE_CHECKING: from airflow.utils.context import Context -# TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement. -MYSQL_RENDERER = "mysql" if "mysql" in wwwutils.get_attr_renderer() else "sql" - class VerticaToMySqlOperator(BaseOperator): """ @@ -62,8 +58,8 @@ class VerticaToMySqlOperator(BaseOperator): template_ext: Sequence[str] = (".sql",) template_fields_renderers = { "sql": "sql", - "mysql_preoperator": MYSQL_RENDERER, - "mysql_postoperator": MYSQL_RENDERER, + "mysql_preoperator": "mysql", + "mysql_postoperator": "mysql", } ui_color = "#a0e08c" diff --git a/airflow/providers/neo4j/provider.yaml b/airflow/providers/neo4j/provider.yaml index 9904d5193c1c0..147429644c691 100644 --- a/airflow/providers/neo4j/provider.yaml +++ b/airflow/providers/neo4j/provider.yaml @@ -35,7 +35,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - neo4j>=4.2.1 integrations: diff --git a/airflow/providers/odbc/provider.yaml b/airflow/providers/odbc/provider.yaml index 69df89bb7a599..c45b64facce8f 100644 --- a/airflow/providers/odbc/provider.yaml +++ b/airflow/providers/odbc/provider.yaml @@ -35,7 +35,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.2.0 - pyodbc diff --git a/airflow/providers/openfaas/provider.yaml b/airflow/providers/openfaas/provider.yaml index 3962a571c92c8..ea056029c154f 100644 --- a/airflow/providers/openfaas/provider.yaml +++ b/airflow/providers/openfaas/provider.yaml @@ -32,7 +32,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 integrations: - integration-name: OpenFaaS diff --git a/airflow/providers/opsgenie/provider.yaml b/airflow/providers/opsgenie/provider.yaml index 255f8ecf46e46..06e6f354f166a 100644 --- a/airflow/providers/opsgenie/provider.yaml +++ b/airflow/providers/opsgenie/provider.yaml @@ -35,7 +35,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - opsgenie-sdk>=2.1.5 integrations: diff --git a/airflow/providers/oracle/hooks/oracle.py b/airflow/providers/oracle/hooks/oracle.py index e8afaba443573..0ba7425e166f0 100644 --- a/airflow/providers/oracle/hooks/oracle.py +++ b/airflow/providers/oracle/hooks/oracle.py @@ -99,6 +99,7 @@ class OracleHook(DbApiHook): conn_type = "oracle" hook_name = "Oracle" + _test_connection_sql = "select 1 from dual" supports_autocommit = True def __init__( @@ -424,18 +425,3 @@ def handler(cursor): ) return result - - # TODO: Merge this implementation back to DbApiHook when dropping - # support for Airflow 2.2. - def test_connection(self): - """Tests the connection by executing a select 1 from dual query""" - status, message = False, "" - try: - if self.get_first("select 1 from dual"): - status = True - message = "Connection successfully tested" - except Exception as e: - status = False - message = str(e) - - return status, message diff --git a/airflow/providers/oracle/provider.yaml b/airflow/providers/oracle/provider.yaml index 07419afcb5557..21e713b895e34 100644 --- a/airflow/providers/oracle/provider.yaml +++ b/airflow/providers/oracle/provider.yaml @@ -39,7 +39,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.3.0 - oracledb>=1.0.0 diff --git a/airflow/providers/pagerduty/provider.yaml b/airflow/providers/pagerduty/provider.yaml index 537bca625c4d6..b0c24d6ad6df6 100644 --- a/airflow/providers/pagerduty/provider.yaml +++ b/airflow/providers/pagerduty/provider.yaml @@ -33,7 +33,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - pdpyras>=4.1.2 integrations: diff --git a/airflow/providers/papermill/provider.yaml b/airflow/providers/papermill/provider.yaml index fcc9a0543d04a..cafc3aa58783a 100644 --- a/airflow/providers/papermill/provider.yaml +++ b/airflow/providers/papermill/provider.yaml @@ -35,7 +35,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - papermill[all]>=1.2.1 - scrapbook[all] diff --git a/airflow/providers/plexus/provider.yaml b/airflow/providers/plexus/provider.yaml index 1b9f5054eaffe..e48ed155bac6b 100644 --- a/airflow/providers/plexus/provider.yaml +++ b/airflow/providers/plexus/provider.yaml @@ -32,7 +32,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - arrow>=0.16.0 integrations: diff --git a/airflow/providers/postgres/operators/postgres.py b/airflow/providers/postgres/operators/postgres.py index 10845f0e573e8..561d06c167a46 100644 --- a/airflow/providers/postgres/operators/postgres.py +++ b/airflow/providers/postgres/operators/postgres.py @@ -23,7 +23,6 @@ from psycopg2.sql import SQL, Identifier from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator -from airflow.www import utils as wwwutils class PostgresOperator(SQLExecuteQueryOperator): @@ -42,10 +41,7 @@ class PostgresOperator(SQLExecuteQueryOperator): """ template_fields: Sequence[str] = ("sql",) - # TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement. - template_fields_renderers = { - "sql": "postgresql" if "postgresql" in wwwutils.get_attr_renderer() else "sql" - } + template_fields_renderers = {"sql": "postgresql"} template_ext: Sequence[str] = (".sql",) ui_color = "#ededed" diff --git a/airflow/providers/postgres/provider.yaml b/airflow/providers/postgres/provider.yaml index ebf65ab7d23fe..19b21af4fefca 100644 --- a/airflow/providers/postgres/provider.yaml +++ b/airflow/providers/postgres/provider.yaml @@ -41,7 +41,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.3.0 - psycopg2>=2.8.0 diff --git a/airflow/providers/presto/hooks/presto.py b/airflow/providers/presto/hooks/presto.py index 6a1d25407f7a6..902ae67eac2a1 100644 --- a/airflow/providers/presto/hooks/presto.py +++ b/airflow/providers/presto/hooks/presto.py @@ -48,14 +48,11 @@ def generate_presto_client_info() -> str: ) for format_map in AIRFLOW_VAR_NAME_FORMAT_MAPPING.values() } - # try_number isn't available in context for airflow < 2.2.5 - # https://github.com/apache/airflow/issues/23059 - try_number = context_var.get("try_number", "") task_info = { "dag_id": context_var["dag_id"], "task_id": context_var["task_id"], "execution_date": context_var["execution_date"], - "try_number": try_number, + "try_number": context_var["try_number"], "dag_run_id": context_var["dag_run_id"], "dag_owner": context_var["dag_owner"], } diff --git a/airflow/providers/presto/provider.yaml b/airflow/providers/presto/provider.yaml index 9bb9060248caf..22c7b8b7d01eb 100644 --- a/airflow/providers/presto/provider.yaml +++ b/airflow/providers/presto/provider.yaml @@ -38,7 +38,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.2.0 - presto-python-client>=0.8.2 - pandas>=0.17.1 diff --git a/airflow/providers/qubole/hooks/qubole.py b/airflow/providers/qubole/hooks/qubole.py index 8101586d5f971..7b648203972cc 100644 --- a/airflow/providers/qubole/hooks/qubole.py +++ b/airflow/providers/qubole/hooks/qubole.py @@ -229,9 +229,7 @@ def get_results( """ if fp is None: iso = datetime.datetime.utcnow().isoformat() - base_log_folder = conf.get("logging", "BASE_LOG_FOLDER") - if base_log_folder is None: - raise ValueError("logging/BASE_LOG_FOLDER config value should be set") + base_log_folder = conf.get_mandatory_value("logging", "BASE_LOG_FOLDER") logpath = os.path.expanduser(base_log_folder) resultpath = logpath + "/" + self.dag_id + "/" + self.task_id + "/results" pathlib.Path(resultpath).mkdir(parents=True, exist_ok=True) diff --git a/airflow/providers/qubole/operators/qubole.py b/airflow/providers/qubole/operators/qubole.py index c968657cf3562..710387663ffa2 100644 --- a/airflow/providers/qubole/operators/qubole.py +++ b/airflow/providers/qubole/operators/qubole.py @@ -19,7 +19,6 @@ from __future__ import annotations import re -from datetime import datetime from typing import TYPE_CHECKING, Sequence from airflow.hooks.base import BaseHook @@ -33,6 +32,7 @@ ) if TYPE_CHECKING: + from airflow.models.taskinstance import TaskInstanceKey from airflow.utils.context import Context @@ -45,15 +45,13 @@ class QDSLink(BaseOperatorLink): def get_link( self, operator: BaseOperator, - dttm: datetime | None = None, *, - ti_key: TaskInstanceKey | None = None, + ti_key: TaskInstanceKey, ) -> str: """ Get link to qubole command result page. :param operator: operator - :param dttm: datetime :return: url link """ conn = BaseHook.get_connection( @@ -64,13 +62,7 @@ def get_link( host = re.sub(r"api$", "v2/analyze?command_id=", conn.host) else: host = "https://api.qubole.com/v2/analyze?command_id=" - if ti_key is not None: - qds_command_id = XCom.get_value(key="qbol_cmd_id", ti_key=ti_key) - else: - assert dttm - qds_command_id = XCom.get_one( - key="qbol_cmd_id", dag_id=operator.dag_id, task_id=operator.task_id, execution_date=dttm - ) + qds_command_id = XCom.get_value(key="qbol_cmd_id", ti_key=ti_key) url = host + str(qds_command_id) if qds_command_id else "" return url diff --git a/airflow/providers/qubole/provider.yaml b/airflow/providers/qubole/provider.yaml index f4f31c0c4eda7..6cfaec23ba1a5 100644 --- a/airflow/providers/qubole/provider.yaml +++ b/airflow/providers/qubole/provider.yaml @@ -37,7 +37,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.2.0 - qds-sdk>=1.10.4 diff --git a/airflow/providers/redis/provider.yaml b/airflow/providers/redis/provider.yaml index 15c353d8d8e08..5b5ae05867dc5 100644 --- a/airflow/providers/redis/provider.yaml +++ b/airflow/providers/redis/provider.yaml @@ -32,7 +32,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 # Redis 4 introduced a number of changes that likely need testing including mixins in redis commands # as well as unquoting URLS with `urllib.parse.unquote`: # https://github.com/redis/redis-py/blob/master/CHANGES diff --git a/airflow/providers/salesforce/provider.yaml b/airflow/providers/salesforce/provider.yaml index 2c31a38210f07..406fbc02fb73e 100644 --- a/airflow/providers/salesforce/provider.yaml +++ b/airflow/providers/salesforce/provider.yaml @@ -39,7 +39,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - simple-salesforce>=1.0.0 - pandas>=0.17.1 diff --git a/airflow/providers/samba/provider.yaml b/airflow/providers/samba/provider.yaml index dd9c8674caaeb..41cab949bbbac 100644 --- a/airflow/providers/samba/provider.yaml +++ b/airflow/providers/samba/provider.yaml @@ -33,7 +33,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - smbprotocol>=1.5.0 integrations: diff --git a/airflow/providers/segment/provider.yaml b/airflow/providers/segment/provider.yaml index 0cede0faa0cdc..2233f743cfd10 100644 --- a/airflow/providers/segment/provider.yaml +++ b/airflow/providers/segment/provider.yaml @@ -32,7 +32,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - analytics-python>=1.2.9 integrations: diff --git a/airflow/providers/sendgrid/provider.yaml b/airflow/providers/sendgrid/provider.yaml index 8c8fe945b0684..62148897c4a5e 100644 --- a/airflow/providers/sendgrid/provider.yaml +++ b/airflow/providers/sendgrid/provider.yaml @@ -22,7 +22,7 @@ description: | `Sendgrid `__ dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - sendgrid>=6.0.0 versions: diff --git a/airflow/providers/sftp/provider.yaml b/airflow/providers/sftp/provider.yaml index de703fad14619..d04a842b692a1 100644 --- a/airflow/providers/sftp/provider.yaml +++ b/airflow/providers/sftp/provider.yaml @@ -42,7 +42,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-ssh>=2.1.0 integrations: diff --git a/airflow/providers/singularity/provider.yaml b/airflow/providers/singularity/provider.yaml index 800bc02653949..0aaae5bff2029 100644 --- a/airflow/providers/singularity/provider.yaml +++ b/airflow/providers/singularity/provider.yaml @@ -33,7 +33,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - spython>=0.0.56 integrations: diff --git a/airflow/providers/slack/provider.yaml b/airflow/providers/slack/provider.yaml index 80b858ebc9d9a..34be15a9961e8 100644 --- a/airflow/providers/slack/provider.yaml +++ b/airflow/providers/slack/provider.yaml @@ -37,7 +37,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.2.0 - slack_sdk>=3.0.0 diff --git a/airflow/providers/slack/transfers/sql_to_slack.py b/airflow/providers/slack/transfers/sql_to_slack.py index 00741d7ca0a8c..bdd1cddd2b62c 100644 --- a/airflow/providers/slack/transfers/sql_to_slack.py +++ b/airflow/providers/slack/transfers/sql_to_slack.py @@ -18,16 +18,14 @@ from typing import TYPE_CHECKING, Iterable, Mapping, Sequence -from packaging.version import Version from pandas import DataFrame from tabulate import tabulate from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook from airflow.models import BaseOperator -from airflow.providers.common.sql.hooks.sql import DbApiHook, _backported_get_hook +from airflow.providers.common.sql.hooks.sql import DbApiHook from airflow.providers.slack.hooks.slack_webhook import SlackWebhookHook -from airflow.version import version if TYPE_CHECKING: from airflow.utils.context import Context @@ -102,13 +100,7 @@ def __init__( def _get_hook(self) -> DbApiHook: self.log.debug("Get connection for %s", self.sql_conn_id) conn = BaseHook.get_connection(self.sql_conn_id) - if Version(version) >= Version("2.3"): - # "hook_params" were introduced to into "get_hook()" only in Airflow 2.3. - hook = conn.get_hook(hook_params=self.sql_hook_params) # ignore airflow compat check - else: - # For supporting Airflow versions < 2.3, we backport "get_hook()" method. This should be removed - # when "apache-airflow-providers-slack" will depend on Airflow >= 2.3. - hook = _backported_get_hook(conn, hook_params=self.sql_hook_params) + hook = conn.get_hook(hook_params=self.sql_hook_params) if not callable(getattr(hook, "get_pandas_df", None)): raise AirflowException( "This hook is not supported. The hook class must have get_pandas_df method." diff --git a/airflow/providers/slack/utils/__init__.py b/airflow/providers/slack/utils/__init__.py index f71901ff841cf..dda12656d48ea 100644 --- a/airflow/providers/slack/utils/__init__.py +++ b/airflow/providers/slack/utils/__init__.py @@ -19,14 +19,7 @@ import warnings from typing import Any -try: - from airflow.utils.types import NOTSET -except ImportError: # TODO: Remove when the provider has an Airflow 2.3+ requirement. - - class ArgNotSet: - """Sentinel type for annotations, useful when None is not viable.""" - - NOTSET = ArgNotSet() # type: ignore[assignment] +from airflow.utils.types import NOTSET class ConnectionExtraConfig: diff --git a/airflow/providers/snowflake/hooks/snowflake.py b/airflow/providers/snowflake/hooks/snowflake.py index c4677d6f47106..138025a455652 100644 --- a/airflow/providers/snowflake/hooks/snowflake.py +++ b/airflow/providers/snowflake/hooks/snowflake.py @@ -112,6 +112,7 @@ class SnowflakeHook(DbApiHook): conn_type = "snowflake" hook_name = "Snowflake" supports_autocommit = True + _test_connection_sql = "select 1" @staticmethod def get_connection_form_widgets() -> dict[str, Any]: @@ -390,11 +391,3 @@ def run( return results[-1] else: return results - - def test_connection(self): - """Test the Snowflake connection by running a simple query.""" - try: - self.run(sql="select 1") - except Exception as e: - return False, str(e) - return True, "Connection successfully tested" diff --git a/airflow/providers/snowflake/provider.yaml b/airflow/providers/snowflake/provider.yaml index 98f640a20ed97..c77ee86c71f90 100644 --- a/airflow/providers/snowflake/provider.yaml +++ b/airflow/providers/snowflake/provider.yaml @@ -45,7 +45,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.3.0 - snowflake-connector-python>=2.4.1 - snowflake-sqlalchemy>=1.1.0 diff --git a/airflow/providers/ssh/provider.yaml b/airflow/providers/ssh/provider.yaml index 22e86dcc2e510..76722dd1a75ec 100644 --- a/airflow/providers/ssh/provider.yaml +++ b/airflow/providers/ssh/provider.yaml @@ -41,7 +41,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - paramiko>=2.6.0 - sshtunnel>=0.3.2 diff --git a/airflow/providers/tableau/provider.yaml b/airflow/providers/tableau/provider.yaml index 75e2fbe9248e9..565d821035e5a 100644 --- a/airflow/providers/tableau/provider.yaml +++ b/airflow/providers/tableau/provider.yaml @@ -37,7 +37,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - tableauserverclient integrations: diff --git a/airflow/providers/tabular/provider.yaml b/airflow/providers/tabular/provider.yaml index 65f567bc404f1..360ca5bfd1c48 100644 --- a/airflow/providers/tabular/provider.yaml +++ b/airflow/providers/tabular/provider.yaml @@ -26,7 +26,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 integrations: - integration-name: Tabular diff --git a/airflow/providers/telegram/provider.yaml b/airflow/providers/telegram/provider.yaml index e8097501ff897..37652639ef4e0 100644 --- a/airflow/providers/telegram/provider.yaml +++ b/airflow/providers/telegram/provider.yaml @@ -33,7 +33,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - python-telegram-bot>=13.0 integrations: diff --git a/airflow/providers/trino/hooks/trino.py b/airflow/providers/trino/hooks/trino.py index 7b96373484671..629ea2bf57d2f 100644 --- a/airflow/providers/trino/hooks/trino.py +++ b/airflow/providers/trino/hooks/trino.py @@ -19,7 +19,6 @@ import json import os -from contextlib import closing from typing import Any, Callable, Iterable, Mapping import trino @@ -49,14 +48,11 @@ def generate_trino_client_info() -> str: ) for format_map in AIRFLOW_VAR_NAME_FORMAT_MAPPING.values() } - # try_number isn't available in context for airflow < 2.2.5 - # https://github.com/apache/airflow/issues/23059 - try_number = context_var.get("try_number", "") task_info = { "dag_id": context_var["dag_id"], "task_id": context_var["task_id"], "execution_date": context_var["execution_date"], - "try_number": try_number, + "try_number": context_var["try_number"], "dag_run_id": context_var["dag_run_id"], "dag_owner": context_var["dag_owner"], } @@ -94,6 +90,7 @@ class TrinoHook(DbApiHook): hook_name = "Trino" query_id = "" placeholder = "?" + _test_connection_sql = "select 1" def get_conn(self) -> Connection: """Returns a connection object""" @@ -243,19 +240,3 @@ def insert_rows( commit_every = 0 super().insert_rows(table, rows, target_fields, commit_every, replace) - - def test_connection(self): - """Tests the connection from UI using Trino specific query""" - status, message = False, "" - try: - with closing(self.get_conn()) as conn: - with closing(conn.cursor()) as cur: - cur.execute("select 1") - if cur.fetchone(): - status = True - message = "Connection successfully tested" - except Exception as e: - status = False - message = str(e) - - return status, message diff --git a/airflow/providers/trino/provider.yaml b/airflow/providers/trino/provider.yaml index 57da2ba254ee9..422ddac7a614a 100644 --- a/airflow/providers/trino/provider.yaml +++ b/airflow/providers/trino/provider.yaml @@ -38,7 +38,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.3.0 - pandas>=0.17.1 - trino>=0.301.0 diff --git a/airflow/providers/vertica/provider.yaml b/airflow/providers/vertica/provider.yaml index ccadf7b385848..8127122b11dd2 100644 --- a/airflow/providers/vertica/provider.yaml +++ b/airflow/providers/vertica/provider.yaml @@ -36,7 +36,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - apache-airflow-providers-common-sql>=1.3.0 - vertica-python>=0.5.1 diff --git a/airflow/providers/yandex/provider.yaml b/airflow/providers/yandex/provider.yaml index d7f5fe8697094..af0d1856a6a48 100644 --- a/airflow/providers/yandex/provider.yaml +++ b/airflow/providers/yandex/provider.yaml @@ -34,7 +34,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - yandexcloud>=0.173.0 integrations: diff --git a/airflow/providers/zendesk/provider.yaml b/airflow/providers/zendesk/provider.yaml index 6dc1cf264d044..0276af44cf795 100644 --- a/airflow/providers/zendesk/provider.yaml +++ b/airflow/providers/zendesk/provider.yaml @@ -33,7 +33,7 @@ versions: - 1.0.0 dependencies: - - apache-airflow>=2.2.0 + - apache-airflow>=2.3.0 - zenpy>=2.0.24 integrations: diff --git a/dev/breeze/src/airflow_breeze/pre_commit_ids.py b/dev/breeze/src/airflow_breeze/pre_commit_ids.py index 9e4b5b5bf197c..b64d92b5c4cf7 100644 --- a/dev/breeze/src/airflow_breeze/pre_commit_ids.py +++ b/dev/breeze/src/airflow_breeze/pre_commit_ids.py @@ -25,8 +25,8 @@ "all", "black", "blacken-docs", - "check-airflow-2-2-compatibility", "check-airflow-config-yaml-consistent", + "check-airflow-provider-compatibility", "check-apache-license-rat", "check-base-operator-partial-arguments", "check-base-operator-usage", diff --git a/docs/apache-airflow-providers/howto/create-update-providers.rst b/docs/apache-airflow-providers/howto/create-update-providers.rst index 4b80923986391..2a06c9a9fa395 100644 --- a/docs/apache-airflow-providers/howto/create-update-providers.rst +++ b/docs/apache-airflow-providers/howto/create-update-providers.rst @@ -327,7 +327,7 @@ this (note the ``if ti_key is not None:`` condition). def get_link( self, - operator, + operator: BaseOperator, dttm: Optional[datetime] = None, ti_key: Optional["TaskInstanceKey"] = None, ): diff --git a/docs/apache-airflow/howto/define_extra_link.rst b/docs/apache-airflow/howto/define_extra_link.rst index 9bae547492cea..1436c2ece7c18 100644 --- a/docs/apache-airflow/howto/define_extra_link.rst +++ b/docs/apache-airflow/howto/define_extra_link.rst @@ -34,7 +34,7 @@ The following code shows how to add extra links to an operator via Plugins: class GoogleLink(BaseOperatorLink): name = "Google" - def get_link(self, operator, *, ti_key): + def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey): return "https://www.google.com" @@ -92,7 +92,7 @@ tasks using :class:`~airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Ope # Example: operators = [GCSToS3Operator, GCSToBigQueryOperator] operators = [GCSToS3Operator] - def get_link(self, operator, *, ti_key): + def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey): return "https://s3.amazonaws.com/airflow-logs/{dag_id}/{task_id}/{run_id}".format( dag_id=operator.dag_id, task_id=operator.task_id, @@ -134,7 +134,7 @@ Console, but if we wanted to change that link we could: name = "BigQuery Console" operators = [BigQueryOperator] - def get_link(self, operator, *, ti_key): + def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey): job_id = XCom.get_one(ti_key=ti_key, key="job_id") return BIGQUERY_JOB_DETAILS_LINK_FMT.format(job_id=job_id) if job_id else "" diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index 74fa32863e53c..0be78fbc756d0 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -2,7 +2,7 @@ "airbyte": { "deps": [ "apache-airflow-providers-http", - "apache-airflow>=2.2.0" + "apache-airflow>=2.3.0" ], "cross-providers-deps": [ "http" @@ -10,7 +10,7 @@ }, "alibaba": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "oss2>=2.14.0" ], "cross-providers-deps": [] @@ -18,7 +18,7 @@ "amazon": { "deps": [ "apache-airflow-providers-common-sql>=1.3.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "boto3>=1.15.0", "jsonpath_ng>=1.5.3", "mypy-boto3-appflow>=1.21.0", @@ -44,7 +44,7 @@ }, "apache.beam": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "apache-beam>=2.39.0" ], "cross-providers-deps": [ @@ -53,7 +53,7 @@ }, "apache.cassandra": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "cassandra-driver>=3.13.0" ], "cross-providers-deps": [] @@ -61,7 +61,7 @@ "apache.drill": { "deps": [ "apache-airflow-providers-common-sql>=1.3.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "sqlalchemy-drill>=1.1.0" ], "cross-providers-deps": [ @@ -71,7 +71,7 @@ "apache.druid": { "deps": [ "apache-airflow-providers-common-sql>=1.2.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "pydruid>=0.4.1" ], "cross-providers-deps": [ @@ -81,7 +81,7 @@ }, "apache.hdfs": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "hdfs[avro,dataframe,kerberos]>=2.0.4", "snakebite-py3" ], @@ -90,7 +90,7 @@ "apache.hive": { "deps": [ "apache-airflow-providers-common-sql>=1.2.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "hmsclient>=0.1.0", "pandas>=0.17.1", "pyhive[hive]>=0.6.0", @@ -109,7 +109,7 @@ }, "apache.kylin": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "kylinpy>=2.6" ], "cross-providers-deps": [] @@ -117,7 +117,7 @@ "apache.livy": { "deps": [ "apache-airflow-providers-http", - "apache-airflow>=2.2.0" + "apache-airflow>=2.3.0" ], "cross-providers-deps": [ "http" @@ -125,14 +125,14 @@ }, "apache.pig": { "deps": [ - "apache-airflow>=2.2.0" + "apache-airflow>=2.3.0" ], "cross-providers-deps": [] }, "apache.pinot": { "deps": [ "apache-airflow-providers-common-sql>=1.2.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "pinotdb>0.4.7" ], "cross-providers-deps": [ @@ -141,27 +141,27 @@ }, "apache.spark": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "pyspark" ], "cross-providers-deps": [] }, "apache.sqoop": { "deps": [ - "apache-airflow>=2.2.0" + "apache-airflow>=2.3.0" ], "cross-providers-deps": [] }, "arangodb": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "python-arango>=7.3.2" ], "cross-providers-deps": [] }, "asana": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "asana>=0.10" ], "cross-providers-deps": [] @@ -169,13 +169,13 @@ "atlassian.jira": { "deps": [ "JIRA>1.0.7", - "apache-airflow>=2.2.0" + "apache-airflow>=2.3.0" ], "cross-providers-deps": [] }, "celery": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "celery>=5.2.3,<6", "flower>=1.0.0" ], @@ -183,7 +183,7 @@ }, "cloudant": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "cloudant>=2.0" ], "cross-providers-deps": [] @@ -206,7 +206,7 @@ "deps": [ "aiohttp>=3.6.3, <4", "apache-airflow-providers-common-sql>=1.3.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "databricks-sql-connector>=2.0.0, <3.0.0", "requests>=2.27,<3" ], @@ -216,7 +216,7 @@ }, "datadog": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "datadog>=0.14.0" ], "cross-providers-deps": [] @@ -224,7 +224,7 @@ "dbt.cloud": { "deps": [ "apache-airflow-providers-http", - "apache-airflow>=2.2.0" + "apache-airflow>=2.3.0" ], "cross-providers-deps": [ "http" @@ -233,7 +233,7 @@ "dingding": { "deps": [ "apache-airflow-providers-http", - "apache-airflow>=2.2.0" + "apache-airflow>=2.3.0" ], "cross-providers-deps": [ "http" @@ -242,7 +242,7 @@ "discord": { "deps": [ "apache-airflow-providers-http", - "apache-airflow>=2.2.0" + "apache-airflow>=2.3.0" ], "cross-providers-deps": [ "http" @@ -258,7 +258,7 @@ "elasticsearch": { "deps": [ "apache-airflow-providers-common-sql>=1.2.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "elasticsearch-dbapi", "elasticsearch-dsl>=5.0.0", "elasticsearch>7" @@ -270,7 +270,7 @@ "exasol": { "deps": [ "apache-airflow-providers-common-sql>=1.3.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "pandas>=0.17.1", "pyexasol>=0.5.1" ], @@ -280,7 +280,7 @@ }, "facebook": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "facebook-business>=6.0.2" ], "cross-providers-deps": [] @@ -291,7 +291,7 @@ }, "github": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "pygithub" ], "cross-providers-deps": [] @@ -300,7 +300,7 @@ "deps": [ "PyOpenSSL", "apache-airflow-providers-common-sql>=1.2.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "asgiref>=3.5.2", "gcloud-aio-bigquery>=6.1.2", "gcloud-aio-storage", @@ -371,7 +371,7 @@ }, "grpc": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "google-auth-httplib2>=0.0.1", "google-auth>=1.0.0, <3.0.0", "grpcio>=1.15.0" @@ -380,7 +380,7 @@ }, "hashicorp": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "hvac>=0.10" ], "cross-providers-deps": [ @@ -400,7 +400,7 @@ }, "influxdb": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "influxdb-client>=1.19.0", "requests>=2.26.0" ], @@ -409,7 +409,7 @@ "jdbc": { "deps": [ "apache-airflow-providers-common-sql>=1.3.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "jaydebeapi>=1.1.1" ], "cross-providers-deps": [ @@ -418,14 +418,14 @@ }, "jenkins": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "python-jenkins>=1.0.0" ], "cross-providers-deps": [] }, "microsoft.azure": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "azure-batch>=8.0.0", "azure-cosmos>=4.0.0", "azure-datalake-store>=0.0.45", @@ -451,7 +451,7 @@ "microsoft.mssql": { "deps": [ "apache-airflow-providers-common-sql>=1.3.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "pymssql>=2.1.5; platform_machine != \"aarch64\"" ], "cross-providers-deps": [ @@ -466,14 +466,14 @@ }, "microsoft.winrm": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "pywinrm>=0.4" ], "cross-providers-deps": [] }, "mongo": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "dnspython>=1.13.0", "pymongo>=3.6.0,<4.0.0" ], @@ -482,7 +482,7 @@ "mysql": { "deps": [ "apache-airflow-providers-common-sql>=1.3.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "mysql-connector-python>=8.0.11; platform_machine != \"aarch64\"", "mysqlclient>=1.3.6; platform_machine != \"aarch64\"" ], @@ -496,7 +496,7 @@ }, "neo4j": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "neo4j>=4.2.1" ], "cross-providers-deps": [] @@ -504,7 +504,7 @@ "odbc": { "deps": [ "apache-airflow-providers-common-sql>=1.2.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "pyodbc" ], "cross-providers-deps": [ @@ -513,13 +513,13 @@ }, "openfaas": { "deps": [ - "apache-airflow>=2.2.0" + "apache-airflow>=2.3.0" ], "cross-providers-deps": [] }, "opsgenie": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "opsgenie-sdk>=2.1.5" ], "cross-providers-deps": [] @@ -527,7 +527,7 @@ "oracle": { "deps": [ "apache-airflow-providers-common-sql>=1.3.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "oracledb>=1.0.0" ], "cross-providers-deps": [ @@ -536,14 +536,14 @@ }, "pagerduty": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "pdpyras>=4.1.2" ], "cross-providers-deps": [] }, "papermill": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "papermill[all]>=1.2.1", "scrapbook[all]" ], @@ -551,7 +551,7 @@ }, "plexus": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "arrow>=0.16.0" ], "cross-providers-deps": [] @@ -559,7 +559,7 @@ "postgres": { "deps": [ "apache-airflow-providers-common-sql>=1.3.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "psycopg2>=2.8.0" ], "cross-providers-deps": [ @@ -570,7 +570,7 @@ "presto": { "deps": [ "apache-airflow-providers-common-sql>=1.2.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "pandas>=0.17.1", "presto-python-client>=0.8.2" ], @@ -582,7 +582,7 @@ "qubole": { "deps": [ "apache-airflow-providers-common-sql>=1.2.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "qds-sdk>=1.10.4" ], "cross-providers-deps": [ @@ -591,14 +591,14 @@ }, "redis": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "redis~=3.2" ], "cross-providers-deps": [] }, "salesforce": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "pandas>=0.17.1", "simple-salesforce>=1.0.0" ], @@ -606,7 +606,7 @@ }, "samba": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "smbprotocol>=1.5.0" ], "cross-providers-deps": [] @@ -614,13 +614,13 @@ "segment": { "deps": [ "analytics-python>=1.2.9", - "apache-airflow>=2.2.0" + "apache-airflow>=2.3.0" ], "cross-providers-deps": [] }, "sendgrid": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "sendgrid>=6.0.0" ], "cross-providers-deps": [] @@ -628,7 +628,7 @@ "sftp": { "deps": [ "apache-airflow-providers-ssh>=2.1.0", - "apache-airflow>=2.2.0" + "apache-airflow>=2.3.0" ], "cross-providers-deps": [ "ssh" @@ -636,7 +636,7 @@ }, "singularity": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "spython>=0.0.56" ], "cross-providers-deps": [] @@ -644,7 +644,7 @@ "slack": { "deps": [ "apache-airflow-providers-common-sql>=1.2.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "slack_sdk>=3.0.0" ], "cross-providers-deps": [ @@ -654,7 +654,7 @@ "snowflake": { "deps": [ "apache-airflow-providers-common-sql>=1.3.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "snowflake-connector-python>=2.4.1", "snowflake-sqlalchemy>=1.1.0" ], @@ -673,7 +673,7 @@ }, "ssh": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "paramiko>=2.6.0", "sshtunnel>=0.3.2" ], @@ -681,20 +681,20 @@ }, "tableau": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "tableauserverclient" ], "cross-providers-deps": [] }, "tabular": { "deps": [ - "apache-airflow>=2.2.0" + "apache-airflow>=2.3.0" ], "cross-providers-deps": [] }, "telegram": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "python-telegram-bot>=13.0" ], "cross-providers-deps": [] @@ -702,7 +702,7 @@ "trino": { "deps": [ "apache-airflow-providers-common-sql>=1.3.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "pandas>=0.17.1", "trino>=0.301.0" ], @@ -714,7 +714,7 @@ "vertica": { "deps": [ "apache-airflow-providers-common-sql>=1.3.0", - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "vertica-python>=0.5.1" ], "cross-providers-deps": [ @@ -723,14 +723,14 @@ }, "yandex": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "yandexcloud>=0.173.0" ], "cross-providers-deps": [] }, "zendesk": { "deps": [ - "apache-airflow>=2.2.0", + "apache-airflow>=2.3.0", "zenpy>=2.0.24" ], "cross-providers-deps": [] diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index c53adc58abf59..83e27cb53af82 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -50,7 +50,7 @@ setup:version:d11da4c17a23179830079b646160149c setup:a6bccf7a73bfac49ce5b027c8900ea61 shell:557ee58e7c70af052f5ea7a173b39137 start-airflow:b089191910e9c3f2ffd9c0079cdf38c6 -static-checks:425cd78507278494e345fb7648260c24 +static-checks:8acd42315c78e91537634a54095ee3e6 stop:8ebd8a42f1003495d37b884de5ac7ce6 testing:docker-compose-tests:70c744105ff61025f25d93a2f1f427c2 testing:helm-tests:35f7ecef86fd9c9dbad73f20ebd64496 diff --git a/images/breeze/output_static-checks.svg b/images/breeze/output_static-checks.svg index cd1b21ae680a6..9ac481f8d631e 100644 --- a/images/breeze/output_static-checks.svg +++ b/images/breeze/output_static-checks.svg @@ -19,249 +19,249 @@ font-weight: 700; } - .terminal-1502996596-matrix { + .terminal-2065099438-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1502996596-title { + .terminal-2065099438-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1502996596-r1 { fill: #c5c8c6;font-weight: bold } -.terminal-1502996596-r2 { fill: #c5c8c6 } -.terminal-1502996596-r3 { fill: #d0b344;font-weight: bold } -.terminal-1502996596-r4 { fill: #868887 } -.terminal-1502996596-r5 { fill: #68a0b3;font-weight: bold } -.terminal-1502996596-r6 { fill: #98a84b;font-weight: bold } -.terminal-1502996596-r7 { fill: #8d7b39 } + .terminal-2065099438-r1 { fill: #c5c8c6;font-weight: bold } +.terminal-2065099438-r2 { fill: #c5c8c6 } +.terminal-2065099438-r3 { fill: #d0b344;font-weight: bold } +.terminal-2065099438-r4 { fill: #868887 } +.terminal-2065099438-r5 { fill: #68a0b3;font-weight: bold } +.terminal-2065099438-r6 { fill: #98a84b;font-weight: bold } +.terminal-2065099438-r7 { fill: #8d7b39 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - Command: static-checks + Command: static-checks - + - - -Usage: breeze static-checks [OPTIONS] [PRECOMMIT_ARGS]... - -Run static checks. - -╭─ Pre-commit flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---type-tType(s) of the static checks to run (multiple can be added).                             -(all | black | blacken-docs | check-airflow-2-2-compatibility |                          -check-airflow-config-yaml-consistent | check-apache-license-rat |                        -check-base-operator-partial-arguments | check-base-operator-usage |                      -check-boring-cyborg-configuration | check-breeze-top-dependencies-limited |              -check-builtin-literals | check-changelog-has-no-duplicates |                             -check-core-deprecation-classes | check-daysago-import-from-utils |                       -check-decorated-operator-implements-custom-name | check-docstring-param-types |          -check-example-dags-urls | check-executables-have-shebangs |                              -check-extra-packages-references | check-extras-order | check-for-inclusive-language |    -check-hooks-apply | check-incorrect-use-of-LoggingMixin | check-init-decorator-arguments -| check-lazy-logging | check-merge-conflict | check-newsfragments-are-valid |            -check-no-providers-in-core-examples | check-no-relative-imports |                        -check-persist-credentials-disabled-in-github-workflows |                                 -check-pre-commit-information-consistent | check-provide-create-sessions-imports |        -check-provider-yaml-valid | check-providers-init-file-missing |                          -check-providers-subpackages-init-file-exist | check-pydevd-left-in-code |                -check-revision-heads-map | check-safe-filter-usage-in-html | check-setup-order |         -check-start-date-not-used-in-defaults | check-system-tests-present |                     -check-system-tests-tocs | check-xml | codespell | compile-www-assets |                   -compile-www-assets-dev | create-missing-init-py-files-tests | debug-statements |         -detect-private-key | doctoc | end-of-file-fixer | fix-encoding-pragma | flynt | identity -| insert-license | isort | lint-chart-schema | lint-css | lint-dockerfile |              -lint-helm-chart | lint-json-schema | lint-markdown | lint-openapi | mixed-line-ending |  -pretty-format-json | pydocstyle | python-no-log-warn | pyupgrade |                       -replace-bad-characters | rst-backticks | run-flake8 | run-mypy | run-shellcheck |        -static-check-autoflake | trailing-whitespace | ts-compile-and-lint-javascript |          -update-breeze-cmd-output | update-breeze-readme-config-hash | update-er-diagram |        -update-extras | update-in-the-wild-to-be-sorted | update-inlined-dockerfile-scripts |    -update-local-yml-file | update-migration-references | update-providers-dependencies |    -update-spelling-wordlist-to-be-sorted | update-supported-versions |                      -update-vendored-in-k8s-json-schema | update-version | yamllint | yesqa)                  ---file-fList of files to run the checks on.(PATH) ---all-files-aRun checks on all files. ---show-diff-on-failure-sShow diff for files modified by the checks. ---last-commit-cRun checks for all files in last commit. Mutually exclusive with --commit-ref. ---commit-ref-rRun checks for this commit reference only (can be any git commit-ish reference).         -Mutually exclusive with --last-commit.                                                   -(TEXT)                                                                                   -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + +Usage: breeze static-checks [OPTIONS] [PRECOMMIT_ARGS]... + +Run static checks. + +╭─ Pre-commit flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--type-tType(s) of the static checks to run (multiple can be added).                             +(all | black | blacken-docs | check-airflow-config-yaml-consistent |                     +check-airflow-provider-compatibility | check-apache-license-rat |                        +check-base-operator-partial-arguments | check-base-operator-usage |                      +check-boring-cyborg-configuration | check-breeze-top-dependencies-limited |              +check-builtin-literals | check-changelog-has-no-duplicates |                             +check-core-deprecation-classes | check-daysago-import-from-utils |                       +check-decorated-operator-implements-custom-name | check-docstring-param-types |          +check-example-dags-urls | check-executables-have-shebangs |                              +check-extra-packages-references | check-extras-order | check-for-inclusive-language |    +check-hooks-apply | check-incorrect-use-of-LoggingMixin | check-init-decorator-arguments +| check-lazy-logging | check-merge-conflict | check-newsfragments-are-valid |            +check-no-providers-in-core-examples | check-no-relative-imports |                        +check-persist-credentials-disabled-in-github-workflows |                                 +check-pre-commit-information-consistent | check-provide-create-sessions-imports |        +check-provider-yaml-valid | check-providers-init-file-missing |                          +check-providers-subpackages-init-file-exist | check-pydevd-left-in-code |                +check-revision-heads-map | check-safe-filter-usage-in-html | check-setup-order |         +check-start-date-not-used-in-defaults | check-system-tests-present |                     +check-system-tests-tocs | check-xml | codespell | compile-www-assets |                   +compile-www-assets-dev | create-missing-init-py-files-tests | debug-statements |         +detect-private-key | doctoc | end-of-file-fixer | fix-encoding-pragma | flynt | identity +| insert-license | isort | lint-chart-schema | lint-css | lint-dockerfile |              +lint-helm-chart | lint-json-schema | lint-markdown | lint-openapi | mixed-line-ending |  +pretty-format-json | pydocstyle | python-no-log-warn | pyupgrade |                       +replace-bad-characters | rst-backticks | run-flake8 | run-mypy | run-shellcheck |        +static-check-autoflake | trailing-whitespace | ts-compile-and-lint-javascript |          +update-breeze-cmd-output | update-breeze-readme-config-hash | update-er-diagram |        +update-extras | update-in-the-wild-to-be-sorted | update-inlined-dockerfile-scripts |    +update-local-yml-file | update-migration-references | update-providers-dependencies |    +update-spelling-wordlist-to-be-sorted | update-supported-versions |                      +update-vendored-in-k8s-json-schema | update-version | yamllint | yesqa)                  +--file-fList of files to run the checks on.(PATH) +--all-files-aRun checks on all files. +--show-diff-on-failure-sShow diff for files modified by the checks. +--last-commit-cRun checks for all files in last commit. Mutually exclusive with --commit-ref. +--commit-ref-rRun checks for this commit reference only (can be any git commit-ish reference).         +Mutually exclusive with --last-commit.                                                   +(TEXT)                                                                                   +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/scripts/ci/pre_commit/pre_commit_check_2_2_compatibility.py b/scripts/ci/pre_commit/pre_commit_check_2_2_compatibility.py deleted file mode 100755 index 86e746ff8caad..0000000000000 --- a/scripts/ci/pre_commit/pre_commit_check_2_2_compatibility.py +++ /dev/null @@ -1,115 +0,0 @@ -#!/usr/bin/env python -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import re -import sys -from pathlib import Path - -from rich.console import Console - -if __name__ not in ("__main__", "__mp_main__"): - raise SystemExit( - "This file is intended to be executed as an executable program. You cannot use it as a module." - f"To run this script, run the ./{__file__} command [FILE] ..." - ) - -console = Console(color_system="standard", width=200) - -errors: list[str] = [] - -SKIP_COMP_CHECK = "# ignore airflow compat check" -TRY_NUM_MATCHER = re.compile(r".*context.*\[[\"']try_number[\"']].*") -GET_MANDATORY_MATCHER = re.compile(r".*conf\.get_mandatory_value") -GET_AIRFLOW_APP_MATCHER = re.compile(r".*get_airflow_app\(\)") -HOOK_PARAMS_MATCHER = re.compile(r".*get_hook\(hook_params") - - -def _check_file(_file: Path): - lines = _file.read_text().splitlines() - - for index, line in enumerate(lines): - if SKIP_COMP_CHECK in line: - continue - - if "XCom.get_value(" in line: - if "if ti_key is not None:" not in lines[index - 1]: - errors.append( - f"[red]In {_file}:{index} there is a forbidden construct " - "(Airflow 2.3.0 only):[/]\n\n" - f"{lines[index-1]}\n{lines[index]}\n\n" - "[yellow]When you use XCom.get_value( in providers, it should be in the form:[/]\n\n" - "if ti_key is not None:\n" - " value = XCom.get_value(...., ti_key=ti_key)\n\n" - "See: https://airflow.apache.org/docs/apache-airflow-providers/" - "howto/create-update-providers.html#using-providers-with-dynamic-task-mapping\n" - ) - if "ti.map_index" in line: - errors.append( - f"[red]In {_file}:{index} there is a forbidden construct " - "(Airflow 2.3+ only):[/]\n\n" - f"{lines[index]}\n\n" - "[yellow]You should not use map_index field in providers " - "as it is only available in Airflow 2.3+[/]" - ) - - if TRY_NUM_MATCHER.match(line): - errors.append( - f"[red]In {_file}:{index} there is a forbidden construct " - "(Airflow 2.3+ only):[/]\n\n" - f"{lines[index]}\n\n" - "[yellow]You should not expect try_number field for context in providers " - "as it is only available in Airflow 2.3+[/]" - ) - - if GET_MANDATORY_MATCHER.match(line): - errors.append( - f"[red]In {_file}:{index} there is a forbidden construct " - "(Airflow 2.3+ only):[/]\n\n" - f"{lines[index]}\n\n" - "[yellow]You should not use conf.get_mandatory_value in providers " - "as it is only available in Airflow 2.3+[/]" - ) - - if HOOK_PARAMS_MATCHER.match(line): - errors.append( - f"[red]In {_file}:{index} there is a forbidden construct " - "(Airflow 2.3+ only):[/]\n\n" - f"{lines[index]}\n\n" - "[yellow]You should not use 'hook_params' in get_hook as it has been added in providers " - "as it is not available in Airflow 2.3+. Use get_hook() instead.[/]" - ) - - if GET_AIRFLOW_APP_MATCHER.match(line): - errors.append( - f"[red]In {_file}:{index} there is a forbidden construct " - "(Airflow 2.4+ only):[/]\n\n" - f"{lines[index]}\n\n" - "[yellow]You should not use airflow.utils.airflow_flask_app.get_airflow_app() in providers " - "as it is not available in Airflow 2.4+. Use current_app instead.[/]" - ) - - -if __name__ == "__main__": - for file in sys.argv[1:]: - _check_file(Path(file)) - if errors: - console.print("[red]Found Airflow 2.2 compatibility problems in providers:[/]\n") - for error in errors: - console.print(f"{error}") - sys.exit(1) diff --git a/scripts/ci/pre_commit/pre_commit_check_provider_airflow_compatibility.py b/scripts/ci/pre_commit/pre_commit_check_provider_airflow_compatibility.py new file mode 100755 index 0000000000000..57aa9154f79f1 --- /dev/null +++ b/scripts/ci/pre_commit/pre_commit_check_provider_airflow_compatibility.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import re +import sys +from pathlib import Path + +from rich.console import Console + +if __name__ not in ("__main__", "__mp_main__"): + raise SystemExit( + "This file is intended to be executed as an executable program. You cannot use it as a module." + f"To run this script, run the ./{__file__} command [FILE] ..." + ) + +console = Console(color_system="standard", width=200) + +errors: list[str] = [] + +SKIP_COMP_CHECK = "# ignore airflow compat check" +GET_AIRFLOW_APP_MATCHER = re.compile(r".*get_airflow_app\(\)") + + +def _check_file(_file: Path): + lines = _file.read_text().splitlines() + + for index, line in enumerate(lines): + if SKIP_COMP_CHECK in line: + continue + + if GET_AIRFLOW_APP_MATCHER.match(line): + errors.append( + f"[red]In {_file}:{index} there is a forbidden construct " + "(Airflow 2.4+ only):[/]\n\n" + f"{lines[index]}\n\n" + "[yellow]You should not use airflow.utils.airflow_flask_app.get_airflow_app() in providers " + "as it is not available in Airflow 2.4+. Use current_app instead.[/]" + ) + + +if __name__ == "__main__": + for file in sys.argv[1:]: + _check_file(Path(file)) + if errors: + console.print("[red]Found Airflow 2.3+ compatibility problems in providers:[/]\n") + for error in errors: + console.print(f"{error}") + sys.exit(1) diff --git a/scripts/in_container/verify_providers.py b/scripts/in_container/verify_providers.py index 4415b41a52d78..ad5a6b0f735d5 100755 --- a/scripts/in_container/verify_providers.py +++ b/scripts/in_container/verify_providers.py @@ -161,6 +161,11 @@ class ProviderPackageDetails(NamedTuple): " adheres to: 'pyarrow<3.1.0,>=3.0.0; extra == \"pandas\"'", "snowflake", ), + ( + "You have an incompatible version of 'pyarrow' installed (5.0.0), please install a version that" + " adheres to: 'pyarrow<6.1.0,>=6.0.0; extra == \"pandas\"'", + "snowflake", + ), ( "You have an incompatible version of 'pyarrow' installed (6.0.1), please install a version that" " adheres to: 'pyarrow<5.1.0,>=5.0.0; extra == \"pandas\"'", diff --git a/tests/providers/snowflake/hooks/test_snowflake.py b/tests/providers/snowflake/hooks/test_snowflake.py index 7abc50d121775..87c661043a643 100644 --- a/tests/providers/snowflake/hooks/test_snowflake.py +++ b/tests/providers/snowflake/hooks/test_snowflake.py @@ -553,23 +553,23 @@ def test_run_storing_query_ids_extra(self, mock_conn, sql, expected_sql, expecte assert hook.query_ids == expected_query_ids cur.close.assert_called() - @mock.patch("airflow.providers.snowflake.hooks.snowflake.SnowflakeHook.run") - def test_connection_success(self, mock_run): + @mock.patch("airflow.providers.common.sql.hooks.sql.DbApiHook.get_first") + def test_connection_success(self, mock_get_first): with unittest.mock.patch.dict( "os.environ", AIRFLOW_CONN_SNOWFLAKE_DEFAULT=Connection(**BASE_CONNECTION_KWARGS).get_uri() ): hook = SnowflakeHook() - mock_run.return_value = [{"1": 1}] + mock_get_first.return_value = [{"1": 1}] status, msg = hook.test_connection() assert status is True assert msg == "Connection successfully tested" - mock_run.assert_called_once_with(sql="select 1") + mock_get_first.assert_called_once_with("select 1") @mock.patch( - "airflow.providers.snowflake.hooks.snowflake.SnowflakeHook.run", + "airflow.providers.common.sql.hooks.sql.DbApiHook.get_first", side_effect=Exception("Connection Errors"), ) - def test_connection_failure(self, mock_run): + def test_connection_failure(self, mock_get_first): with unittest.mock.patch.dict( "os.environ", AIRFLOW_CONN_SNOWFLAKE_DEFAULT=Connection(**BASE_CONNECTION_KWARGS).get_uri() ): @@ -577,7 +577,7 @@ def test_connection_failure(self, mock_run): status, msg = hook.test_connection() assert status is False assert msg == "Connection Errors" - mock_run.assert_called_once_with(sql="select 1") + mock_get_first.assert_called_once_with("select 1") def test_empty_sql_parameter(self): hook = SnowflakeHook()