Source code for tests.system.providers.amazon.aws.example_rds_event
## Licensed to the Apache Software Foundation (ASF) under one# or more contributor license agreements. See the NOTICE file# distributed with this work for additional information# regarding copyright ownership. The ASF licenses this file# to you under the Apache License, Version 2.0 (the# "License"); you may not use this file except in compliance# with the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing,# software distributed under the License is distributed on an# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY# KIND, either express or implied. See the License for the# specific language governing permissions and limitations# under the License.fromdatetimeimportdatetimeimportboto3fromairflowimportDAGfromairflow.decoratorsimporttaskfromairflow.models.baseoperatorimportchainfromairflow.providers.amazon.aws.hooks.rdsimportRdsHookfromairflow.providers.amazon.aws.operators.rdsimport(RdsCreateEventSubscriptionOperator,RdsDeleteEventSubscriptionOperator,)fromairflow.utils.trigger_ruleimportTriggerRulefromtests.system.providers.amazon.aws.utilsimportENV_ID_KEY,SystemTestContextBuilder
[docs]defcreate_rds_instance(db_name,instance_name)->None:rds_client=RdsHook().get_conn()rds_client.create_db_instance(DBName=db_name,DBInstanceIdentifier=instance_name,AllocatedStorage=20,DBInstanceClass='db.t3.micro',Engine='postgres',MasterUsername='username',# NEVER store your production password in plaintext in a DAG like this.# Use Airflow Secrets or a secret manager for this in production.MasterUserPassword='rds_password',)rds_client.get_waiter('db_instance_available').wait(DBInstanceIdentifier=instance_name)
rds_db_name=f'{test_context[ENV_ID_KEY]}_db'rds_instance_name=f'{test_context[ENV_ID_KEY]}-instance'rds_subscription_name=f'{test_context[ENV_ID_KEY]}-subscription'sns_topic=create_sns_topic(test_context[ENV_ID_KEY])# [START howto_operator_rds_create_event_subscription]create_subscription=RdsCreateEventSubscriptionOperator(task_id='create_subscription',subscription_name=rds_subscription_name,sns_topic_arn=sns_topic,source_type='db-instance',source_ids=[rds_instance_name],event_categories=['availability'],)# [END howto_operator_rds_create_event_subscription]# [START howto_operator_rds_delete_event_subscription]delete_subscription=RdsDeleteEventSubscriptionOperator(task_id='delete_subscription',subscription_name=rds_subscription_name,)# [END howto_operator_rds_delete_event_subscription]chain(# TEST SETUPtest_context,sns_topic,create_rds_instance(rds_db_name,rds_instance_name),# TEST BODYcreate_subscription,delete_subscription,# TEST TEARDOWNdelete_db_instance(rds_instance_name),delete_sns_topic(sns_topic),)fromtests.system.utils.watcherimportwatcher# This test needs watcher in order to properly mark success/failure# when "tearDown" task with trigger rule is part of the DAGlist(dag.tasks)>>watcher()fromtests.system.utilsimportget_test_run# noqa: E402# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)