Skip to content

Commit

Permalink
Fix system tests for SnowflakeOperator (apache#27204)
Browse files Browse the repository at this point in the history
  • Loading branch information
mik-laj authored Oct 23, 2022
1 parent 57bb54d commit dd40e60
Show file tree
Hide file tree
Showing 8 changed files with 151 additions and 60 deletions.
6 changes: 5 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,11 @@ repos:
- id: insert-license
name: Add license for all SQL files
files: \.sql$
exclude: ^\.github/.*$|^airflow/_vendor/
exclude: |
(?x)
^\.github/|
^airflow/_vendor/|
^tests/system/providers/snowflake/example_snowflake_snowflake_op_template_file\.sql$
args:
- --comment-style
- "/*||*/"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ a file format (see `docs <https://docs.snowflake.com/en/sql-reference/sql/create

An example usage of the S3ToSnowflakeOperator is as follows:

.. exampleinclude:: /../../tests/system/providers/snowflake/example_snowflake.py
.. exampleinclude:: /../../tests/system/providers/snowflake/example_s3_to_snowflake.py
:language: python
:start-after: [START howto_operator_s3_to_snowflake]
:end-before: [END howto_operator_s3_to_snowflake]
:dedent: 4
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,8 @@ An example usage of the SnowflakeOperator is as follows:
:language: python
:start-after: [START howto_operator_snowflake]
:end-before: [END howto_operator_snowflake]
:dedent: 4


.. note::

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,8 @@ and contain the resulting dataset (e.g. ASCII formatted dataframe).

An example usage of the SnowflakeToSlackOperator is as follows:

.. exampleinclude:: /../../tests/system/providers/snowflake/example_snowflake.py
.. exampleinclude:: /../../tests/system/providers/snowflake/example_snowflake_to_slack.py
:language: python
:start-after: [START howto_operator_snowflake_to_slack]
:end-before: [END howto_operator_snowflake_to_slack]
:dedent: 4
66 changes: 66 additions & 0 deletions tests/system/providers/snowflake/example_s3_to_snowflake.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example use of Snowflake related operators.
"""
from __future__ import annotations

import os
from datetime import datetime

from airflow import DAG
from airflow.providers.snowflake.transfers.s3_to_snowflake import S3ToSnowflakeOperator

SNOWFLAKE_CONN_ID = 'my_snowflake_conn'
# TODO: should be able to rely on connection's schema, but currently param required by S3ToSnowflakeTransfer
SNOWFLAKE_STAGE = 'stage_name'
SNOWFLAKE_SAMPLE_TABLE = 'sample_table'
S3_FILE_PATH = '</path/to/file/sample_file.csv'

ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "example_s3_to_snowflake"

# [START howto_operator_snowflake]

with DAG(
DAG_ID,
start_date=datetime(2021, 1, 1),
default_args={'snowflake_conn_id': SNOWFLAKE_CONN_ID},
tags=['example'],
schedule="@once",
catchup=False,
) as dag:
# [START howto_operator_s3_to_snowflake]

copy_into_table = S3ToSnowflakeOperator(
task_id='copy_into_table',
snowflake_conn_id=SNOWFLAKE_CONN_ID,
s3_keys=[S3_FILE_PATH],
table=SNOWFLAKE_SAMPLE_TABLE,
stage=SNOWFLAKE_STAGE,
file_format="(type = 'CSV',field_delimiter = ';')",
pattern=".*[.]csv",
)

# [END howto_operator_s3_to_snowflake]


from tests.system.utils import get_test_run # noqa: E402

# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
61 changes: 4 additions & 57 deletions tests/system/providers/snowflake/example_snowflake.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,19 +25,9 @@

from airflow import DAG
from airflow.providers.snowflake.operators.snowflake import SnowflakeOperator
from airflow.providers.snowflake.transfers.s3_to_snowflake import S3ToSnowflakeOperator
from airflow.providers.snowflake.transfers.snowflake_to_slack import SnowflakeToSlackOperator

SNOWFLAKE_CONN_ID = 'my_snowflake_conn'
SLACK_CONN_ID = 'my_slack_conn'
# TODO: should be able to rely on connection's schema, but currently param required by S3ToSnowflakeTransfer
SNOWFLAKE_SCHEMA = 'schema_name'
SNOWFLAKE_STAGE = 'stage_name'
SNOWFLAKE_WAREHOUSE = 'warehouse_name'
SNOWFLAKE_DATABASE = 'database_name'
SNOWFLAKE_ROLE = 'role_name'
SNOWFLAKE_SAMPLE_TABLE = 'sample_table'
S3_FILE_PATH = '</path/to/file/sample_file.csv'

# SQL commands
CREATE_TABLE_SQL_STRING = (
Expand All @@ -46,40 +36,25 @@
SQL_INSERT_STATEMENT = f"INSERT INTO {SNOWFLAKE_SAMPLE_TABLE} VALUES ('name', %(id)s)"
SQL_LIST = [SQL_INSERT_STATEMENT % {"id": n} for n in range(0, 10)]
SQL_MULTIPLE_STMTS = "; ".join(SQL_LIST)
SNOWFLAKE_SLACK_SQL = f"SELECT name, id FROM {SNOWFLAKE_SAMPLE_TABLE} LIMIT 10;"
SNOWFLAKE_SLACK_MESSAGE = (
"Results in an ASCII table:\n```{{ results_df | tabulate(tablefmt='pretty', headers='keys') }}```"
)
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "example_snowflake"

# [START howto_operator_snowflake]

with DAG(
DAG_ID,
start_date=datetime(2021, 1, 1),
default_args={'snowflake_conn_id': SNOWFLAKE_CONN_ID},
tags=['example'],
schedule="@once",
catchup=False,
) as dag:
# [START snowflake_example_dag]
snowflake_op_sql_str = SnowflakeOperator(
task_id='snowflake_op_sql_str',
sql=CREATE_TABLE_SQL_STRING,
warehouse=SNOWFLAKE_WAREHOUSE,
database=SNOWFLAKE_DATABASE,
schema=SNOWFLAKE_SCHEMA,
role=SNOWFLAKE_ROLE,
)
# [START howto_operator_snowflake]
snowflake_op_sql_str = SnowflakeOperator(task_id='snowflake_op_sql_str', sql=CREATE_TABLE_SQL_STRING)

snowflake_op_with_params = SnowflakeOperator(
task_id='snowflake_op_with_params',
sql=SQL_INSERT_STATEMENT,
parameters={"id": 56},
warehouse=SNOWFLAKE_WAREHOUSE,
database=SNOWFLAKE_DATABASE,
schema=SNOWFLAKE_SCHEMA,
role=SNOWFLAKE_ROLE,
)

snowflake_op_sql_list = SnowflakeOperator(task_id='snowflake_op_sql_list', sql=SQL_LIST)
Expand All @@ -91,48 +66,20 @@

snowflake_op_template_file = SnowflakeOperator(
task_id='snowflake_op_template_file',
sql='/path/to/sql/<filename>.sql',
sql='example_snowflake_snowflake_op_template_file.sql',
)

# [END howto_operator_snowflake]

# [START howto_operator_s3_to_snowflake]

copy_into_table = S3ToSnowflakeOperator(
task_id='copy_into_table',
s3_keys=[S3_FILE_PATH],
table=SNOWFLAKE_SAMPLE_TABLE,
schema=SNOWFLAKE_SCHEMA,
stage=SNOWFLAKE_STAGE,
file_format="(type = 'CSV',field_delimiter = ';')",
pattern=".*[.]csv",
)

# [END howto_operator_s3_to_snowflake]

# [START howto_operator_snowflake_to_slack]

slack_report = SnowflakeToSlackOperator(
task_id="slack_report",
sql=SNOWFLAKE_SLACK_SQL,
slack_message=SNOWFLAKE_SLACK_MESSAGE,
slack_conn_id=SLACK_CONN_ID,
)

# [END howto_operator_snowflake_to_slack]

(
snowflake_op_sql_str
>> [
snowflake_op_with_params,
snowflake_op_sql_list,
snowflake_op_template_file,
copy_into_table,
snowflake_op_sql_multiple_stmts,
]
>> slack_report
)
# [END snowflake_example_dag]


from tests.system.utils import get_test_run # noqa: E402
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
CREATE TABLE IF NOT EXISTS RANDOM_DATA AS SELECT
UNIFORM(1, 10, 1234) AS COL1
FROM
TABLE(GENERATOR(ROWCOUNT => 5));
66 changes: 66 additions & 0 deletions tests/system/providers/snowflake/example_snowflake_to_slack.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example use of Snowflake related operators.
"""
from __future__ import annotations

import os
from datetime import datetime

from airflow import DAG
from airflow.providers.snowflake.transfers.snowflake_to_slack import SnowflakeToSlackOperator

SNOWFLAKE_CONN_ID = 'my_snowflake_conn'
SLACK_CONN_ID = 'my_slack_conn'
SNOWFLAKE_SAMPLE_TABLE = 'sample_table'

# SQL commands
SNOWFLAKE_SLACK_SQL = f"SELECT name, id FROM {SNOWFLAKE_SAMPLE_TABLE} LIMIT 10;"
SNOWFLAKE_SLACK_MESSAGE = (
"Results in an ASCII table:\n```{{ results_df | tabulate(tablefmt='pretty', headers='keys') }}```"
)
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "example_snowflake_to_slack"

# [START howto_operator_snowflake]

with DAG(
DAG_ID,
start_date=datetime(2021, 1, 1),
default_args={'snowflake_conn_id': SNOWFLAKE_CONN_ID},
tags=['example'],
schedule="@once",
catchup=False,
) as dag:
# [START howto_operator_snowflake_to_slack]

slack_report = SnowflakeToSlackOperator(
task_id="slack_report",
sql=SNOWFLAKE_SLACK_SQL,
slack_message=SNOWFLAKE_SLACK_MESSAGE,
slack_conn_id=SLACK_CONN_ID,
)

# [END howto_operator_snowflake_to_slack]


from tests.system.utils import get_test_run # noqa: E402

# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)

0 comments on commit dd40e60

Please sign in to comment.