Source code for airflow.providers.amazon.aws.operators.s3_bucket
## Licensed to the Apache Software Foundation (ASF) under one# or more contributor license agreements. See the NOTICE file# distributed with this work for additional information# regarding copyright ownership. The ASF licenses this file# to you under the Apache License, Version 2.0 (the# "License"); you may not use this file except in compliance# with the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing,# software distributed under the License is distributed on an# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY# KIND, either express or implied. See the License for the# specific language governing permissions and limitations# under the License."""This module contains AWS S3 operators."""fromtypingimportOptionalfromairflow.modelsimportBaseOperatorfromairflow.providers.amazon.aws.hooks.s3importS3Hook
[docs]classS3CreateBucketOperator(BaseOperator):""" This operator creates an S3 bucket .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:S3CreateBucketOperator` :param bucket_name: This is bucket name you want to create :type bucket_name: str :param aws_conn_id: The Airflow connection used for AWS credentials. If this is None or empty then the default boto3 behaviour is used. If running Airflow in a distributed manner and aws_conn_id is None or empty, then default boto3 configuration would be used (and must be maintained on each worker node). :type aws_conn_id: Optional[str] :param region_name: AWS region_name. If not specified fetched from connection. :type region_name: Optional[str] """
[docs]defexecute(self,context):s3_hook=S3Hook(aws_conn_id=self.aws_conn_id,region_name=self.region_name)ifnots3_hook.check_for_bucket(self.bucket_name):s3_hook.create_bucket(bucket_name=self.bucket_name,region_name=self.region_name)self.log.info("Created bucket with name: %s",self.bucket_name)else:self.log.info("Bucket with name: %s already exists",self.bucket_name)
[docs]classS3DeleteBucketOperator(BaseOperator):""" This operator deletes an S3 bucket .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:S3DeleteBucketOperator` :param bucket_name: This is bucket name you want to delete :type bucket_name: str :param force_delete: Forcibly delete all objects in the bucket before deleting the bucket :type force_delete: bool :param aws_conn_id: The Airflow connection used for AWS credentials. If this is None or empty then the default boto3 behaviour is used. If running Airflow in a distributed manner and aws_conn_id is None or empty, then default boto3 configuration would be used (and must be maintained on each worker node). :type aws_conn_id: Optional[str] """
[docs]defexecute(self,context):s3_hook=S3Hook(aws_conn_id=self.aws_conn_id)ifs3_hook.check_for_bucket(self.bucket_name):s3_hook.delete_bucket(bucket_name=self.bucket_name,force_delete=self.force_delete)self.log.info("Deleted bucket with name: %s",self.bucket_name)else:self.log.info("Bucket with name: %s doesn't exist",self.bucket_name)