Source code for airflow.operators.s3_to_redshift_operator

# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

from airflow.hooks.postgres_hook import PostgresHook
from airflow.hooks.S3_hook import S3Hook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults

[docs]class S3ToRedshiftTransfer(BaseOperator): """ Executes an COPY command to load files from s3 to Redshift :param schema: reference to a specific schema in redshift database :type schema: string :param table: reference to a specific table in redshift database :type table: string :param s3_bucket: reference to a specific S3 bucket :type s3_bucket: string :param s3_key: reference to a specific S3 key :type s3_key: string :param redshift_conn_id: reference to a specific redshift database :type redshift_conn_id: string :param aws_conn_id: reference to a specific S3 connection :type aws_conn_id: string :param copy_options: reference to a list of COPY options :type copy_options: list """ template_fields = () template_ext = () ui_color = '#ededed' @apply_defaults def __init__( self, schema, table, s3_bucket, s3_key, redshift_conn_id='redshift_default', aws_conn_id='aws_default', copy_options=tuple(), autocommit=False, parameters=None, *args, **kwargs): super(S3ToRedshiftTransfer, self).__init__(*args, **kwargs) self.schema = schema self.table = table self.s3_bucket = s3_bucket self.s3_key = s3_key self.redshift_conn_id = redshift_conn_id self.aws_conn_id = aws_conn_id self.copy_options = copy_options self.autocommit = autocommit self.parameters = parameters
[docs] def execute(self, context): self.hook = PostgresHook(postgres_conn_id=self.redshift_conn_id) self.s3 = S3Hook(aws_conn_id=self.aws_conn_id) credentials = self.s3.get_credentials() copy_options = '\n\t\t\t'.join(self.copy_options) copy_query = """ COPY {schema}.{table} FROM 's3://{s3_bucket}/{s3_key}/{table}' with credentials 'aws_access_key_id={access_key};aws_secret_access_key={secret_key}' {copy_options}; """.format(schema=self.schema, table=self.table, s3_bucket=self.s3_bucket, s3_key=self.s3_key, access_key=credentials.access_key, secret_key=credentials.secret_key, copy_options=copy_options)'Executing COPY command...'), self.autocommit)"COPY command complete...")