Skip to content

Commit

Permalink
Update sample dag and doc for RDS (apache#23651)
Browse files Browse the repository at this point in the history
  • Loading branch information
vincbeck authored May 22, 2022
1 parent 4829b23 commit 509b277
Show file tree
Hide file tree
Showing 6 changed files with 296 additions and 262 deletions.
149 changes: 0 additions & 149 deletions airflow/providers/amazon/aws/example_dags/example_rds.py

This file was deleted.

58 changes: 58 additions & 0 deletions airflow/providers/amazon/aws/example_dags/example_rds_event.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

from datetime import datetime
from os import getenv

from airflow import DAG
from airflow.models.baseoperator import chain
from airflow.providers.amazon.aws.operators.rds import (
RdsCreateEventSubscriptionOperator,
RdsDeleteEventSubscriptionOperator,
)

SUBSCRIPTION_NAME = getenv("SUBSCRIPTION_NAME", "subscription-name")
SNS_TOPIC_ARN = getenv("SNS_TOPIC_ARN", "arn:aws:sns:<region>:<account number>:MyTopic")
RDS_DB_IDENTIFIER = getenv("RDS_DB_IDENTIFIER", "database-identifier")

with DAG(
dag_id='example_rds_event',
schedule_interval=None,
start_date=datetime(2021, 1, 1),
tags=['example'],
catchup=False,
) as dag:
# [START howto_operator_rds_create_event_subscription]
create_subscription = RdsCreateEventSubscriptionOperator(
task_id='create_subscription',
subscription_name=SUBSCRIPTION_NAME,
sns_topic_arn=SNS_TOPIC_ARN,
source_type='db-instance',
source_ids=[RDS_DB_IDENTIFIER],
event_categories=['availability'],
)
# [END howto_operator_rds_create_event_subscription]

# [START howto_operator_rds_delete_event_subscription]
delete_subscription = RdsDeleteEventSubscriptionOperator(
task_id='delete_subscription',
subscription_name=SUBSCRIPTION_NAME,
)
# [END howto_operator_rds_delete_event_subscription]

chain(create_subscription, delete_subscription)
71 changes: 71 additions & 0 deletions airflow/providers/amazon/aws/example_dags/example_rds_export.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

from datetime import datetime
from os import getenv

from airflow import DAG
from airflow.models.baseoperator import chain
from airflow.providers.amazon.aws.operators.rds import RdsCancelExportTaskOperator, RdsStartExportTaskOperator
from airflow.providers.amazon.aws.sensors.rds import RdsExportTaskExistenceSensor

RDS_EXPORT_TASK_IDENTIFIER = getenv("RDS_EXPORT_TASK_IDENTIFIER", "export-task-identifier")
RDS_EXPORT_SOURCE_ARN = getenv(
"RDS_EXPORT_SOURCE_ARN", "arn:aws:rds:<region>:<account number>:snapshot:snap-id"
)
BUCKET_NAME = getenv("BUCKET_NAME", "bucket-name")
BUCKET_PREFIX = getenv("BUCKET_PREFIX", "bucket-prefix")
ROLE_ARN = getenv("ROLE_ARN", "arn:aws:iam::<account number>:role/Role")
KMS_KEY_ID = getenv("KMS_KEY_ID", "arn:aws:kms:<region>:<account number>:key/key-id")


with DAG(
dag_id='example_rds_export',
schedule_interval=None,
start_date=datetime(2021, 1, 1),
tags=['example'],
catchup=False,
) as dag:
# [START howto_operator_rds_start_export_task]
start_export = RdsStartExportTaskOperator(
task_id='start_export',
export_task_identifier=RDS_EXPORT_TASK_IDENTIFIER,
source_arn=RDS_EXPORT_SOURCE_ARN,
s3_bucket_name=BUCKET_NAME,
s3_prefix=BUCKET_PREFIX,
iam_role_arn=ROLE_ARN,
kms_key_id=KMS_KEY_ID,
)
# [END howto_operator_rds_start_export_task]

# [START howto_operator_rds_cancel_export]
cancel_export = RdsCancelExportTaskOperator(
task_id='cancel_export',
export_task_identifier=RDS_EXPORT_TASK_IDENTIFIER,
)
# [END howto_operator_rds_cancel_export]

# [START howto_sensor_rds_export_task_existence]
export_sensor = RdsExportTaskExistenceSensor(
task_id='export_sensor',
export_task_identifier=RDS_EXPORT_TASK_IDENTIFIER,
target_statuses=['canceled'],
)
# [END howto_sensor_rds_export_task_existence]

chain(start_export, cancel_export, export_sensor)
76 changes: 76 additions & 0 deletions airflow/providers/amazon/aws/example_dags/example_rds_snapshot.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

from datetime import datetime
from os import getenv

from airflow import DAG
from airflow.models.baseoperator import chain
from airflow.providers.amazon.aws.operators.rds import (
RdsCopyDbSnapshotOperator,
RdsCreateDbSnapshotOperator,
RdsDeleteDbSnapshotOperator,
)
from airflow.providers.amazon.aws.sensors.rds import RdsSnapshotExistenceSensor

RDS_DB_IDENTIFIER = getenv("RDS_DB_IDENTIFIER", "database-identifier")
RDS_DB_SNAPSHOT_IDENTIFIER = getenv("RDS_DB_SNAPSHOT_IDENTIFIER", "database-1-snap")

with DAG(
dag_id='example_rds_snapshot',
schedule_interval=None,
start_date=datetime(2021, 1, 1),
tags=['example'],
catchup=False,
) as dag:
# [START howto_operator_rds_create_db_snapshot]
create_snapshot = RdsCreateDbSnapshotOperator(
task_id='create_snapshot',
db_type='instance',
db_identifier=RDS_DB_IDENTIFIER,
db_snapshot_identifier=RDS_DB_SNAPSHOT_IDENTIFIER,
)
# [END howto_operator_rds_create_db_snapshot]

# [START howto_sensor_rds_snapshot_existence]
snapshot_sensor = RdsSnapshotExistenceSensor(
task_id='snapshot_sensor',
db_type='instance',
db_snapshot_identifier=RDS_DB_IDENTIFIER,
target_statuses=['available'],
)
# [END howto_sensor_rds_snapshot_existence]

# [START howto_operator_rds_copy_snapshot]
copy_snapshot = RdsCopyDbSnapshotOperator(
task_id='copy_snapshot',
db_type='instance',
source_db_snapshot_identifier=RDS_DB_IDENTIFIER,
target_db_snapshot_identifier=f'{RDS_DB_IDENTIFIER}-copy',
)
# [END howto_operator_rds_copy_snapshot]

# [START howto_operator_rds_delete_snapshot]
delete_snapshot = RdsDeleteDbSnapshotOperator(
task_id='delete_snapshot',
db_type='instance',
db_snapshot_identifier=RDS_DB_IDENTIFIER,
)
# [END howto_operator_rds_delete_snapshot]

chain(create_snapshot, snapshot_sensor, copy_snapshot, delete_snapshot)
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/sensors/rds.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ class RdsSnapshotExistenceSensor(RdsBaseSensor):
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:RdsSnapshotExistenceSensor`
:ref:`howto/sensor:RdsSnapshotExistenceSensor`
:param db_type: Type of the DB - either "instance" or "cluster"
:param db_snapshot_identifier: The identifier for the DB snapshot
Expand Down Expand Up @@ -113,7 +113,7 @@ class RdsExportTaskExistenceSensor(RdsBaseSensor):
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:RdsExportTaskExistenceSensor`
:ref:`howto/sensor:RdsExportTaskExistenceSensor`
:param export_task_identifier: A unique identifier for the snapshot export task.
:param target_statuses: Target status of export task
Expand Down
Loading

0 comments on commit 509b277

Please sign in to comment.