Source code for airflow.providers.amazon.aws.example_dags.example_local_to_s3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
from airflow import models
from airflow.providers.amazon.aws.transfers.local_to_s3 import LocalFilesystemToS3Operator
from airflow.utils.dates import datetime
[docs]S3_BUCKET = os.environ.get("S3_BUCKET", "test-bucket")
[docs]S3_KEY = os.environ.get("S3_KEY", "key")
with models.DAG(
"example_local_to_s3",
schedule_interval=None,
start_date=datetime(2021, 1, 1), # Override to match your needs
catchup=False,
) as dag:
# [START howto_transfer_local_to_s3]
[docs] create_local_to_s3_job = LocalFilesystemToS3Operator(
task_id="create_local_to_s3_job",
filename="relative/path/to/file.csv",
dest_key=S3_KEY,
dest_bucket=S3_BUCKET,
replace=True,
)
# [END howto_transfer_local_to_s3]