Source code for tests.system.google.cloud.compute.example_compute_ssh_parallel
# Licensed to the Apache Software Foundation (ASF) under one# or more contributor license agreements. See the NOTICE file# distributed with this work for additional information# regarding copyright ownership. The ASF licenses this file# to you under the Apache License, Version 2.0 (the# "License"); you may not use this file except in compliance# with the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing,# software distributed under the License is distributed on an# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY# KIND, either express or implied. See the License for the# specific language governing permissions and limitations# under the License."""Example Airflow DAG that starts, stops and sets the machine type of a Google ComputeEngine instance."""from__future__importannotationsimportosfromdatetimeimportdatetimefromairflow.models.dagimportDAGfromairflow.providers.google.cloud.hooks.compute_sshimportComputeEngineSSHHookfromairflow.providers.google.cloud.operators.computeimport(ComputeEngineDeleteInstanceOperator,ComputeEngineInsertInstanceOperator,)fromairflow.providers.ssh.operators.sshimportSSHOperatorfromairflow.utils.trigger_ruleimportTriggerRulefromproviders.tests.system.googleimportDEFAULT_GCP_SYSTEM_TEST_PROJECT_ID# [START howto_operator_gce_args_common]
# [END howto_operator_gce_insert]# [START howto_execute_command_on_remote_1]metadata_without_iap_tunnel=SSHOperator(task_id="metadata_without_iap_tunnel",ssh_hook=ComputeEngineSSHHook(user="username1",instance_name=GCE_INSTANCE_NAME,zone=LOCATION,project_id=PROJECT_ID,use_oslogin=False,use_iap_tunnel=False,max_retries=5,cmd_timeout=1,),command="echo metadata_without_iap_tunnel",)# [END howto_execute_command_on_remote_1]# [START howto_execute_command_on_remote_2]metadata_with_iap_tunnel=SSHOperator(task_id="metadata_with_iap_tunnel",ssh_hook=ComputeEngineSSHHook(user="username2",instance_name=GCE_INSTANCE_NAME,zone=LOCATION,use_oslogin=False,use_iap_tunnel=True,max_retries=5,cmd_timeout=1,),command="echo metadata_with_iap_tunnel",)# [END howto_execute_command_on_remote_2]# [START howto_operator_gce_delete_no_project_id]gce_instance_delete=ComputeEngineDeleteInstanceOperator(task_id="gcp_compute_delete_instance_task",zone=LOCATION,resource_id=GCE_INSTANCE_NAME,trigger_rule=TriggerRule.ALL_DONE,)# [END howto_operator_gce_delete_no_project_id](# TEST SETUPgce_instance_insert# TEST BODY>>[metadata_without_iap_tunnel,metadata_with_iap_tunnel]# TEST TEARDOWN>>gce_instance_delete)# ### Everything below this line is not part of example #### ### Just for system tests purpose ###fromtests_common.test_utils.watcherimportwatcher# This test needs watcher in order to properly mark success/failure# when "tearDown" task with trigger rule is part of the DAGlist(dag.tasks)>>watcher()fromtests_common.test_utils.system_testsimportget_test_run# noqa: E402# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)