Source code for

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
import warnings
from time import sleep

if sys.version_info >= (3, 8):
    from functools import cached_property
    from cached_property import cached_property

from airflow.exceptions import AirflowException
from import AwsBaseHook

[docs]class GlueCrawlerHook(AwsBaseHook): """ Interacts with AWS Glue Crawler. Additional arguments (such as ``aws_conn_id``) may be specified and are passed down to the underlying AwsBaseHook. .. seealso:: :class:`` """ def __init__(self, *args, **kwargs): kwargs['client_type'] = 'glue' super().__init__(*args, **kwargs) @cached_property
[docs] def glue_client(self): """:return: AWS Glue client""" return self.get_conn()
[docs] def has_crawler(self, crawler_name) -> bool: """ Checks if the crawler already exists :param crawler_name: unique crawler name per AWS account :return: Returns True if the crawler already exists and False if not. """"Checking if crawler already exists: %s", crawler_name) try: self.get_crawler(crawler_name) return True except self.glue_client.exceptions.EntityNotFoundException: return False
[docs] def get_crawler(self, crawler_name: str) -> dict: """ Gets crawler configurations :param crawler_name: unique crawler name per AWS account :return: Nested dictionary of crawler configurations """ return self.glue_client.get_crawler(Name=crawler_name)['Crawler']
[docs] def update_crawler(self, **crawler_kwargs) -> bool: """ Updates crawler configurations :param crawler_kwargs: Keyword args that define the configurations used for the crawler :return: True if crawler was updated and false otherwise """ crawler_name = crawler_kwargs['Name'] current_crawler = self.get_crawler(crawler_name) update_config = { key: value for key, value in crawler_kwargs.items() if current_crawler[key] != crawler_kwargs[key] } if update_config != {}:"Updating crawler: %s", crawler_name) self.glue_client.update_crawler(**crawler_kwargs)"Updated configurations: %s", update_config) return True else: return False
[docs] def create_crawler(self, **crawler_kwargs) -> str: """ Creates an AWS Glue Crawler :param crawler_kwargs: Keyword args that define the configurations used to create the crawler :return: Name of the crawler """ crawler_name = crawler_kwargs['Name']"Creating crawler: %s", crawler_name) return self.glue_client.create_crawler(**crawler_kwargs)
[docs] def start_crawler(self, crawler_name: str) -> dict: """ Triggers the AWS Glue crawler :param crawler_name: unique crawler name per AWS account :return: Empty dictionary """"Starting crawler %s", crawler_name) return self.glue_client.start_crawler(Name=crawler_name)
[docs] def wait_for_crawler_completion(self, crawler_name: str, poll_interval: int = 5) -> str: """ Waits until Glue crawler completes and returns the status of the latest crawl run. Raises AirflowException if the crawler fails or is cancelled. :param crawler_name: unique crawler name per AWS account :param poll_interval: Time (in seconds) to wait between two consecutive calls to check crawler status :return: Crawler's status """ failed_status = ['FAILED', 'CANCELLED'] while True: crawler = self.get_crawler(crawler_name) crawler_state = crawler['State'] if crawler_state == 'READY':"State: %s", crawler_state)"crawler_config: %s", crawler) crawler_status = crawler['LastCrawl']['Status'] if crawler_status in failed_status: raise AirflowException(f"Status: {crawler_status}") metrics = self.glue_client.get_crawler_metrics(CrawlerNameList=[crawler_name])[ 'CrawlerMetricsList' ][0]"Status: %s", crawler_status)"Last Runtime Duration (seconds): %s", metrics['LastRuntimeSeconds'])"Median Runtime Duration (seconds): %s", metrics['MedianRuntimeSeconds'])"Tables Created: %s", metrics['TablesCreated'])"Tables Updated: %s", metrics['TablesUpdated'])"Tables Deleted: %s", metrics['TablesDeleted']) return crawler_status else:"Polling for AWS Glue crawler: %s ", crawler_name)"State: %s", crawler_state) metrics = self.glue_client.get_crawler_metrics(CrawlerNameList=[crawler_name])[ 'CrawlerMetricsList' ][0] time_left = int(metrics['TimeLeftSeconds']) if time_left > 0:"Estimated Time Left (seconds): %s", time_left) else:"Crawler should finish soon") sleep(poll_interval)
[docs]class AwsGlueCrawlerHook(GlueCrawlerHook): """ This hook is deprecated. Please use :class:``. """ def __init__(self, *args, **kwargs): warnings.warn( "This hook is deprecated. " "Please use :class:``.", DeprecationWarning, stacklevel=2, ) super().__init__(*args, **kwargs)

Was this entry helpful?