Source code for tests.system.amazon.aws.example_comprehend_document_classifier
# Licensed to the Apache Software Foundation (ASF) under one# or more contributor license agreements. See the NOTICE file# distributed with this work for additional information# regarding copyright ownership. The ASF licenses this file# to you under the Apache License, Version 2.0 (the# "License"); you may not use this file except in compliance# with the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing,# software distributed under the License is distributed on an# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY# KIND, either express or implied. See the License for the# specific language governing permissions and limitations# under the License.from__future__importannotationsfromdatetimeimportdatetimefromairflowimportDAGfromairflow.decoratorsimporttask,task_groupfromairflow.models.baseoperatorimportchainfromairflow.providers.amazon.aws.hooks.comprehendimportComprehendHookfromairflow.providers.amazon.aws.operators.comprehendimport(ComprehendCreateDocumentClassifierOperator,)fromairflow.providers.amazon.aws.operators.s3import(S3CopyObjectOperator,S3CreateBucketOperator,S3CreateObjectOperator,S3DeleteBucketOperator,)fromairflow.providers.amazon.aws.sensors.comprehendimport(ComprehendCreateDocumentClassifierCompletedSensor,)fromairflow.utils.trigger_ruleimportTriggerRulefromproviders.tests.system.amazon.aws.utilsimportSystemTestContextBuilder
env_id=test_context["ENV_ID"]classifier_name=f"{env_id}-custom-document-classifier"bucket_name=f"{env_id}-comprehend-document-classifier"input_data_configurations={"S3Uri":f"s3://{bucket_name}/{ANNOTATION_BUCKET_KEY}","DataFormat":"COMPREHEND_CSV","DocumentType":"SEMI_STRUCTURED_DOCUMENT","Documents":{"S3Uri":f"s3://{bucket_name}/{TRAINING_DATA_PREFIX}/"},"DocumentReaderConfig":{"DocumentReadAction":"TEXTRACT_DETECT_DOCUMENT_TEXT","DocumentReadMode":"SERVICE_DEFAULT",},}output_data_configurations={"S3Uri":f"s3://{bucket_name}/output/"}document_classifier_kwargs={"VersionName":"v1"}create_bucket=S3CreateBucketOperator(task_id="create_bucket",bucket_name=bucket_name,)discharge_kwargs=create_kwargs_discharge()s3_copy_discharge_task=S3CopyObjectOperator.partial(task_id="s3_copy_discharge_task",source_bucket_name=test_context[BUCKET_NAME_KEY],dest_bucket_name=bucket_name,meta_data_directive="REPLACE",).expand_kwargs(discharge_kwargs)doctors_notes_kwargs=create_kwargs_doctors_notes()s3_copy_doctors_notes_task=S3CopyObjectOperator.partial(task_id="s3_copy_doctors_notes_task",source_bucket_name=test_context[BUCKET_NAME_KEY],dest_bucket_name=bucket_name,meta_data_directive="REPLACE",).expand_kwargs(doctors_notes_kwargs)upload_annotation_file=S3CreateObjectOperator(task_id="upload_annotation_file",s3_bucket=bucket_name,s3_key=ANNOTATION_BUCKET_KEY,data=ANNOTATIONS.encode("utf-8"),)delete_bucket=S3DeleteBucketOperator(task_id="delete_bucket",trigger_rule=TriggerRule.ALL_DONE,bucket_name=bucket_name,force_delete=True,)chain(test_context,create_bucket,s3_copy_discharge_task,s3_copy_doctors_notes_task,upload_annotation_file,# TEST BODYdocument_classifier_workflow(),# TEST TEARDOWNdelete_bucket,)fromtests_common.test_utils.watcherimportwatcher# This test needs watcher in order to properly mark success/failure# when "tearDown" task with trigger rule is part of the DAGlist(dag.tasks)>>watcher()fromtests_common.test_utils.system_testsimportget_test_run# noqa: E402# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)