Source code for tests.system.google.cloud.cloud_sql.example_cloud_sql
## Licensed to the Apache Software Foundation (ASF) under one# or more contributor license agreements. See the NOTICE file# distributed with this work for additional information# regarding copyright ownership. The ASF licenses this file# to you under the Apache License, Version 2.0 (the# "License"); you may not use this file except in compliance# with the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing,# software distributed under the License is distributed on an# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY# KIND, either express or implied. See the License for the# specific language governing permissions and limitations# under the License."""Example Airflow DAG that creates, patches and deletes a Cloud SQL instance, and alsocreates, patches and deletes a database inside the instance, in Google Cloud."""from__future__importannotationsimportosfromdatetimeimportdatetimefromurllib.parseimporturlsplitfromairflow.models.dagimportDAGfromairflow.models.xcom_argimportXComArgfromairflow.providers.google.cloud.operators.cloud_sqlimport(CloudSQLCloneInstanceOperator,CloudSQLCreateInstanceDatabaseOperator,CloudSQLCreateInstanceOperator,CloudSQLDeleteInstanceDatabaseOperator,CloudSQLDeleteInstanceOperator,CloudSQLExportInstanceOperator,CloudSQLImportInstanceOperator,CloudSQLInstancePatchOperator,CloudSQLPatchInstanceDatabaseOperator,)fromairflow.providers.google.cloud.operators.gcsimport(GCSBucketCreateAclEntryOperator,GCSCreateBucketOperator,GCSDeleteBucketOperator,GCSObjectCreateAclEntryOperator,)fromairflow.utils.trigger_ruleimportTriggerRulefromproviders.tests.system.googleimportDEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
# ############################################## ## ### INSTANCES SET UP ######################### ## ############################################## ## [START howto_operator_cloudsql_create]sql_instance_create_task=CloudSQLCreateInstanceOperator(body=body,instance=INSTANCE_NAME,task_id="sql_instance_create_task")# [END howto_operator_cloudsql_create]# ############################################## ## ### MODIFYING INSTANCE AND ITS DATABASE ###### ## ############################################## ## [START howto_operator_cloudsql_patch]sql_instance_patch_task=CloudSQLInstancePatchOperator(body=patch_body,instance=INSTANCE_NAME,task_id="sql_instance_patch_task")# [END howto_operator_cloudsql_patch]# [START howto_operator_cloudsql_db_create]sql_db_create_task=CloudSQLCreateInstanceDatabaseOperator(body=db_create_body,instance=INSTANCE_NAME,task_id="sql_db_create_task")# [END howto_operator_cloudsql_db_create]# [START howto_operator_cloudsql_db_patch]sql_db_patch_task=CloudSQLPatchInstanceDatabaseOperator(body=db_patch_body,instance=INSTANCE_NAME,database=DB_NAME,task_id="sql_db_patch_task",)# [END howto_operator_cloudsql_db_patch]# ############################################## ## ### EXPORTING & IMPORTING SQL ################ ## ############################################## #file_url_split=urlsplit(FILE_URI)# For export & import to work we need to add the Cloud SQL instance's Service Account# write access to the destination GCS bucket.service_account_email=XComArg(sql_instance_create_task,key="service_account_email")# [START howto_operator_cloudsql_export_gcs_permissions]sql_gcp_add_bucket_permission_task=GCSBucketCreateAclEntryOperator(entity=f"user-{service_account_email}",role="WRITER",bucket=file_url_split[1],# netloc (bucket)task_id="sql_gcp_add_bucket_permission_task",)# [END howto_operator_cloudsql_export_gcs_permissions]# [START howto_operator_cloudsql_export]sql_export_task=CloudSQLExportInstanceOperator(body=export_body,instance=INSTANCE_NAME,task_id="sql_export_task")# [END howto_operator_cloudsql_export]# [START howto_operator_cloudsql_export_async]sql_export_def_task=CloudSQLExportInstanceOperator(body=export_body_deferrable,instance=INSTANCE_NAME,task_id="sql_export_def_task",deferrable=True,)# [END howto_operator_cloudsql_export_async]# For import to work we need to add the Cloud SQL instance's Service Account# read access to the target GCS object.# [START howto_operator_cloudsql_import_gcs_permissions]sql_gcp_add_object_permission_task=GCSObjectCreateAclEntryOperator(entity=f"user-{service_account_email}",role="READER",bucket=file_url_split[1],# netloc (bucket)object_name=file_url_split[2][1:],# path (strip first '/')task_id="sql_gcp_add_object_permission_task",)# [END howto_operator_cloudsql_import_gcs_permissions]# [START howto_operator_cloudsql_import]sql_import_task=CloudSQLImportInstanceOperator(body=import_body,instance=INSTANCE_NAME,task_id="sql_import_task")# [END howto_operator_cloudsql_import]# ############################################## ## ### CLONE AN INSTANCE ######################## ## ############################################## ## [START howto_operator_cloudsql_clone]sql_instance_clone=CloudSQLCloneInstanceOperator(instance=INSTANCE_NAME,destination_instance_name=CLONED_INSTANCE_NAME,task_id="sql_instance_clone")# [END howto_operator_cloudsql_clone]# ############################################## ## ### DELETING A DATABASE FROM AN INSTANCE ##### ## ############################################## ## [START howto_operator_cloudsql_db_delete]sql_db_delete_task=CloudSQLDeleteInstanceDatabaseOperator(instance=INSTANCE_NAME,database=DB_NAME,task_id="sql_db_delete_task",trigger_rule=TriggerRule.ALL_DONE,)# [END howto_operator_cloudsql_db_delete]# ############################################## ## ### INSTANCES TEAR DOWN ###################### ## ############################################## #sql_instance_clone_delete_task=CloudSQLDeleteInstanceOperator(instance=CLONED_INSTANCE_NAME,task_id="sql_instance_clone_delete_task",trigger_rule=TriggerRule.ALL_DONE,)# [START howto_operator_cloudsql_delete]sql_instance_delete_task=CloudSQLDeleteInstanceOperator(instance=INSTANCE_NAME,task_id="sql_instance_delete_task",trigger_rule=TriggerRule.ALL_DONE)# [END howto_operator_cloudsql_delete]delete_bucket=GCSDeleteBucketOperator(task_id="delete_bucket",bucket_name=BUCKET_NAME,trigger_rule=TriggerRule.ALL_DONE)(# TEST SETUPcreate_bucket# TEST BODY>>sql_instance_create_task>>sql_instance_patch_task>>sql_db_create_task>>sql_db_patch_task>>sql_gcp_add_bucket_permission_task>>sql_export_task>>sql_export_def_task>>sql_gcp_add_object_permission_task>>sql_import_task>>sql_instance_clone>>sql_db_delete_task>>sql_instance_clone_delete_task>>sql_instance_delete_task# TEST TEARDOWN>>delete_bucket)# ### Everything below this line is not part of example #### ### Just for system tests purpose ###fromdev.tests_common.test_utils.watcherimportwatcher# This test needs watcher in order to properly mark success/failure# when "tearDown" task with trigger rule is part of the DAGlist(dag.tasks)>>watcher()fromdev.tests_common.test_utils.system_testsimportget_test_run# noqa: E402# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)