Source code for airflow.exceptions
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Note: Any AirflowException raised is expected to cause the TaskInstance
#       to be marked in an ERROR state
"""Exceptions used by Airflow"""
import datetime
import warnings
from http import HTTPStatus
from typing import Any, Dict, List, NamedTuple, Optional, Sized
[docs]class AirflowException(Exception):
    """
    Base class for all Airflow's errors.
    Each custom exception should be derived from this class.
    """
[docs]    status_code = HTTPStatus.INTERNAL_SERVER_ERROR  
[docs]class AirflowBadRequest(AirflowException):
    """Raise when the application or server cannot handle the request."""
[docs]    status_code = HTTPStatus.BAD_REQUEST  
[docs]class AirflowNotFoundException(AirflowException):
    """Raise when the requested object/resource is not available in the system."""
[docs]    status_code = HTTPStatus.NOT_FOUND  
[docs]class AirflowConfigException(AirflowException):
    """Raise when there is configuration problem.""" 
[docs]class AirflowSensorTimeout(AirflowException):
    """Raise when there is a timeout on sensor polling.""" 
[docs]class AirflowRescheduleException(AirflowException):
    """
    Raise when the task should be re-scheduled at a later time.
    :param reschedule_date: The date when the task should be rescheduled
    """
    def __init__(self, reschedule_date):
        super().__init__()
        self.reschedule_date = reschedule_date 
[docs]class AirflowSmartSensorException(AirflowException):
    """
    Raise after the task register itself in the smart sensor service.
    It should exit without failing a task.
    """ 
[docs]class InvalidStatsNameException(AirflowException):
    """Raise when name of the stats is invalid.""" 
[docs]class AirflowTaskTimeout(AirflowException):
    """Raise when the task execution times-out.""" 
[docs]class AirflowWebServerTimeout(AirflowException):
    """Raise when the web server times out.""" 
[docs]class AirflowSkipException(AirflowException):
    """Raise when the task should be skipped""" 
[docs]class AirflowFailException(AirflowException):
    """Raise when the task should be failed without retrying.""" 
[docs]class AirflowOptionalProviderFeatureException(AirflowException):
    """Raise by providers when imports are missing for optional provider features.""" 
[docs]class UnmappableOperator(AirflowException):
    """Raise when an operator is not implemented to be mappable.""" 
[docs]class UnmappableXComTypePushed(AirflowException):
    """Raise when an unmappable type is pushed as a mapped downstream's dependency."""
    def __init__(self, value: Any) -> None:
        super().__init__(value)
        self.value = value
[docs]    def __str__(self) -> str:
        return f"unmappable return type {type(self.value).__qualname__!r}"  
[docs]class UnmappableXComLengthPushed(AirflowException):
    """Raise when the pushed value is too large to map as a downstream's dependency."""
    def __init__(self, value: Sized, max_length: int) -> None:
        super().__init__(value)
        self.value = value
        self.max_length = max_length
[docs]    def __str__(self) -> str:
        return f"unmappable return value length: {len(self.value)} > {self.max_length}"  
[docs]class AirflowDagCycleException(AirflowException):
    """Raise when there is a cycle in DAG definition.""" 
[docs]class AirflowDagDuplicatedIdException(AirflowException):
    """Raise when a DAG's ID is already used by another DAG."""
    def __init__(self, dag_id: str, incoming: str, existing: str) -> None:
        super().__init__(dag_id, incoming, existing)
        self.dag_id = dag_id
        self.incoming = incoming
        self.existing = existing
[docs]    def __str__(self) -> str:
        return f"Ignoring DAG {self.dag_id} from {self.incoming} - also found in {self.existing}"  
[docs]class AirflowDagInconsistent(AirflowException):
    """Raise when a DAG has inconsistent attributes.""" 
[docs]class AirflowClusterPolicyViolation(AirflowException):
    """Raise when there is a violation of a Cluster Policy in DAG definition.""" 
[docs]class DagNotFound(AirflowNotFoundException):
    """Raise when a DAG is not available in the system.""" 
[docs]class DagCodeNotFound(AirflowNotFoundException):
    """Raise when a DAG code is not available in the system.""" 
[docs]class DagRunNotFound(AirflowNotFoundException):
    """Raise when a DAG Run is not available in the system.""" 
[docs]class DagRunAlreadyExists(AirflowBadRequest):
    """Raise when creating a DAG run for DAG which already has DAG run entry.""" 
[docs]class DagFileExists(AirflowBadRequest):
    """Raise when a DAG ID is still in DagBag i.e., DAG file is in DAG folder."""
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        warnings.warn("DagFileExists is deprecated and will be removed.", DeprecationWarning, stacklevel=2) 
[docs]class DuplicateTaskIdFound(AirflowException):
    """Raise when a Task with duplicate task_id is defined in the same DAG.""" 
[docs]class TaskAlreadyInTaskGroup(AirflowException):
    """Raise when a Task cannot be added to a TaskGroup since it already belongs to another TaskGroup."""
    def __init__(self, task_id: str, existing_group_id: Optional[str], new_group_id: str) -> None:
        super().__init__(task_id, new_group_id)
        self.task_id = task_id
        self.existing_group_id = existing_group_id
        self.new_group_id = new_group_id
[docs]    def __str__(self) -> str:
        if self.existing_group_id is None:
            existing_group = "the DAG's root group"
        else:
            existing_group = f"group {self.existing_group_id!r}"
        return f"cannot add {self.task_id!r} to {self.new_group_id!r} (already in {existing_group})"  
[docs]class SerializationError(AirflowException):
    """A problem occurred when trying to serialize a DAG.""" 
[docs]class ParamValidationError(AirflowException):
    """Raise when DAG params is invalid""" 
[docs]class TaskNotFound(AirflowNotFoundException):
    """Raise when a Task is not available in the system.""" 
[docs]class TaskInstanceNotFound(AirflowNotFoundException):
    """Raise when a task instance is not available in the system.""" 
[docs]class PoolNotFound(AirflowNotFoundException):
    """Raise when a Pool is not available in the system.""" 
[docs]class NoAvailablePoolSlot(AirflowException):
    """Raise when there is not enough slots in pool.""" 
[docs]class DagConcurrencyLimitReached(AirflowException):
    """Raise when DAG max_active_tasks limit is reached.""" 
[docs]class TaskConcurrencyLimitReached(AirflowException):
    """Raise when task max_active_tasks limit is reached.""" 
[docs]class BackfillUnfinished(AirflowException):
    """
    Raises when not all tasks succeed in backfill.
    :param message: The human-readable description of the exception
    :param ti_status: The information about all task statuses
    """
    def __init__(self, message, ti_status):
        super().__init__(message)
        self.ti_status = ti_status 
[docs]class FileSyntaxError(NamedTuple):
    """Information about a single error in a file."""
[docs]    def __str__(self):
        return f"{self.message}. Line number: s{str(self.line_no)},"  
[docs]class AirflowFileParseException(AirflowException):
    """
    Raises when connection or variable file can not be parsed.
    :param msg: The human-readable description of the exception
    :param file_path: A processed file that contains errors
    :param parse_errors: File syntax errors
    """
    def __init__(self, msg: str, file_path: str, parse_errors: List[FileSyntaxError]) -> None:
        super().__init__(msg)
        self.msg = msg
        self.file_path = file_path
        self.parse_errors = parse_errors
[docs]    def __str__(self):
        from airflow.utils.code_utils import prepare_code_snippet
        from airflow.utils.platform import is_tty
        result = f"{self.msg}\nFilename: {self.file_path}\n\n"
        for error_no, parse_error in enumerate(self.parse_errors, 1):
            result += "=" * 20 + f" Parse error {error_no:3} " + "=" * 20 + "\n"
            result += f"{parse_error.message}\n"
            if parse_error.line_no:
                result += f"Line number:  {parse_error.line_no}\n"
                if parse_error.line_no and is_tty():
                    result += "\n" + prepare_code_snippet(self.file_path, parse_error.line_no) + "\n"
        return result  
[docs]class ConnectionNotUnique(AirflowException):
    """Raise when multiple values are found for the same connection ID.""" 
[docs]class TaskDeferred(BaseException):
    """
    Special exception raised to signal that the operator it was raised from
    wishes to defer until a trigger fires.
    """
    def __init__(
        self,
        *,
        trigger,
        method_name: str,
        kwargs: Optional[Dict[str, Any]] = None,
        timeout: Optional[datetime.timedelta] = None,
    ):
        super().__init__()
        self.trigger = trigger
        self.method_name = method_name
        self.kwargs = kwargs
        self.timeout = timeout
        # Check timeout type at runtime
        if self.timeout is not None and not hasattr(self.timeout, "total_seconds"):
            raise ValueError("Timeout value must be a timedelta")
[docs]    def __repr__(self) -> str:
        return f"<TaskDeferred trigger={self.trigger} method={self.method_name}>"  
[docs]class TaskDeferralError(AirflowException):
    """Raised when a task failed during deferral for some reason.""" 
[docs]class PodReconciliationError(AirflowException):
    """Raised when an error is encountered while trying to merge pod configs."""