Source code for airflow.providers.amazon.aws.transfers.salesforce_to_s3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import tempfile
from typing import Dict, Optional
from airflow.models import BaseOperator
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.salesforce.hooks.salesforce import SalesforceHook
[docs]class SalesforceToS3Operator(BaseOperator):
"""
Submits a Salesforce query and uploads the results to AWS S3.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:SalesforceToS3Operator`
:param salesforce_query: The query to send to Salesforce.
:type salesforce_query: str
:param s3_bucket_name: The bucket name to upload to.
:type s3_bucket_name: str
:param s3_key: The object name to set when uploading the file.
:type s3_key: str
:param salesforce_conn_id: The name of the connection that has the parameters needed
to connect to Salesforce.
:type salesforce_conn_id: str
:param export_format: Desired format of files to be exported.
:type export_format: str
:param query_params: Additional optional arguments to be passed to the HTTP request querying Salesforce.
:type query_params: dict
:param include_deleted: True if the query should include deleted records.
:type include_deleted: bool
:param coerce_to_timestamp: True if you want all datetime fields to be converted into Unix timestamps.
False if you want them to be left in the same format as they were in Salesforce.
Leaving the value as False will result in datetimes being strings. Default: False
:type coerce_to_timestamp: bool
:param record_time_added: True if you want to add a Unix timestamp field
to the resulting data that marks when the data was fetched from Salesforce. Default: False
:type record_time_added: bool
:param aws_conn_id: The name of the connection that has the parameters we need to connect to S3.
:type aws_conn_id: str
:type replace: bool
:param encrypt: If True, the file will be encrypted on the server-side by S3 and will
be stored in an encrypted form while at rest in S3.
:type encrypt: bool
:param gzip: If True, the file will be compressed locally.
:type gzip: bool
:param acl_policy: String specifying the canned ACL policy for the file being uploaded
to the S3 bucket.
:type acl_policy: str
"""
[docs] template_fields = ("salesforce_query", "s3_bucket_name", "s3_key")
[docs] template_ext = (".sql",)
[docs] template_fields_renderers = {"salesforce_query": "sql"}
def __init__(
self,
*,
salesforce_query: str,
s3_bucket_name: str,
s3_key: str,
salesforce_conn_id: str,
export_format: str = "csv",
query_params: Optional[Dict] = None,
include_deleted: bool = False,
coerce_to_timestamp: bool = False,
record_time_added: bool = False,
aws_conn_id: str = "aws_default",
replace: bool = False,
encrypt: bool = False,
gzip: bool = False,
acl_policy: Optional[str] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.salesforce_query = salesforce_query
self.s3_bucket_name = s3_bucket_name
self.s3_key = s3_key
self.salesforce_conn_id = salesforce_conn_id
self.export_format = export_format
self.query_params = query_params
self.include_deleted = include_deleted
self.coerce_to_timestamp = coerce_to_timestamp
self.record_time_added = record_time_added
self.aws_conn_id = aws_conn_id
self.replace = replace
self.encrypt = encrypt
self.gzip = gzip
self.acl_policy = acl_policy
[docs] def execute(self, context: Dict) -> str:
salesforce_hook = SalesforceHook(salesforce_conn_id=self.salesforce_conn_id)
response = salesforce_hook.make_query(
query=self.salesforce_query,
include_deleted=self.include_deleted,
query_params=self.query_params,
)
with tempfile.TemporaryDirectory() as tmp:
path = os.path.join(tmp, "salesforce_temp_file")
salesforce_hook.write_object_to_file(
query_results=response["records"],
filename=path,
fmt=self.export_format,
coerce_to_timestamp=self.coerce_to_timestamp,
record_time_added=self.record_time_added,
)
s3_hook = S3Hook(aws_conn_id=self.aws_conn_id)
s3_hook.load_file(
filename=path,
key=self.s3_key,
bucket_name=self.s3_bucket_name,
replace=self.replace,
encrypt=self.encrypt,
gzip=self.gzip,
acl_policy=self.acl_policy,
)
s3_uri = f"s3://{self.s3_bucket_name}/{self.s3_key}"
self.log.info(f"Salesforce data uploaded to S3 at {s3_uri}.")
return s3_uri