Source code for airflow.exceptions

#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Note: Any AirflowException raised is expected to cause the TaskInstance
#       to be marked in an ERROR state
"""Exceptions used by Airflow"""
import datetime
import warnings
from typing import Any, Dict, List, NamedTuple, Optional

from airflow.api_connexion.exceptions import NotFound as ApiConnexionNotFound
from airflow.utils.code_utils import prepare_code_snippet
from airflow.utils.platform import is_tty


[docs]class AirflowException(Exception): """ Base class for all Airflow's errors. Each custom exception should be derived from this class """
[docs] status_code = 500
[docs]class AirflowBadRequest(AirflowException): """Raise when the application or server cannot handle the request"""
[docs] status_code = 400
[docs]class AirflowNotFoundException(AirflowException, ApiConnexionNotFound): """Raise when the requested object/resource is not available in the system"""
[docs] status_code = 404
[docs]class AirflowConfigException(AirflowException):
"""Raise when there is configuration problem"""
[docs]class AirflowSensorTimeout(AirflowException):
"""Raise when there is a timeout on sensor polling"""
[docs]class AirflowRescheduleException(AirflowException): """ Raise when the task should be re-scheduled at a later time. :param reschedule_date: The date when the task should be rescheduled :type reschedule_date: datetime.datetime """ def __init__(self, reschedule_date): super().__init__() self.reschedule_date = reschedule_date
[docs]class AirflowSmartSensorException(AirflowException):
""" Raise after the task register itself in the smart sensor service It should exit without failing a task """
[docs]class InvalidStatsNameException(AirflowException):
"""Raise when name of the stats is invalid"""
[docs]class AirflowTaskTimeout(AirflowException):
"""Raise when the task execution times-out"""
[docs]class AirflowWebServerTimeout(AirflowException):
"""Raise when the web server times out"""
[docs]class AirflowSkipException(AirflowException):
"""Raise when the task should be skipped"""
[docs]class AirflowFailException(AirflowException):
"""Raise when the task should be failed without retrying"""
[docs]class AirflowDagCycleException(AirflowException):
"""Raise when there is a cycle in Dag definition"""
[docs]class AirflowDagDuplicatedIdException(AirflowException): """Raise when a Dag's ID is already used by another Dag""" def __init__(self, dag_id: str, incoming: str, existing: str) -> None: super().__init__(dag_id, incoming, existing) self.dag_id = dag_id self.incoming = incoming self.existing = existing
[docs] def __str__(self) -> str: return f"Ignoring DAG {self.dag_id} from {self.incoming} - also found in {self.existing}"
[docs]class AirflowClusterPolicyViolation(AirflowException):
"""Raise when there is a violation of a Cluster Policy in Dag definition"""
[docs]class AirflowTimetableInvalid(AirflowException):
"""Raise when a DAG has an invalid timetable."""
[docs]class DagNotFound(AirflowNotFoundException):
"""Raise when a DAG is not available in the system"""
[docs]class DagCodeNotFound(AirflowNotFoundException):
"""Raise when a DAG code is not available in the system"""
[docs]class DagRunNotFound(AirflowNotFoundException):
"""Raise when a DAG Run is not available in the system"""
[docs]class DagRunAlreadyExists(AirflowBadRequest):
"""Raise when creating a DAG run for DAG which already has DAG run entry"""
[docs]class DagFileExists(AirflowBadRequest): """Raise when a DAG ID is still in DagBag i.e., DAG file is in DAG folder""" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) warnings.warn("DagFileExists is deprecated and will be removed.", DeprecationWarning, stacklevel=2)
[docs]class DuplicateTaskIdFound(AirflowException):
"""Raise when a Task with duplicate task_id is defined in the same DAG"""
[docs]class SerializationError(AirflowException):
"""A problem occurred when trying to serialize a DAG"""
[docs]class TaskNotFound(AirflowNotFoundException):
"""Raise when a Task is not available in the system"""
[docs]class TaskInstanceNotFound(AirflowNotFoundException):
"""Raise when a Task Instance is not available in the system"""
[docs]class PoolNotFound(AirflowNotFoundException):
"""Raise when a Pool is not available in the system"""
[docs]class NoAvailablePoolSlot(AirflowException):
"""Raise when there is not enough slots in pool"""
[docs]class DagConcurrencyLimitReached(AirflowException):
"""Raise when DAG max_active_tasks limit is reached"""
[docs]class TaskConcurrencyLimitReached(AirflowException):
"""Raise when task max_active_tasks limit is reached"""
[docs]class BackfillUnfinished(AirflowException): """ Raises when not all tasks succeed in backfill. :param message: The human-readable description of the exception :param ti_status: The information about all task statuses """ def __init__(self, message, ti_status): super().__init__(message) self.ti_status = ti_status
[docs]class FileSyntaxError(NamedTuple): """Information about a single error in a file."""
[docs] line_no: Optional[int]
[docs] message: str
[docs] def __str__(self): return f"{self.message}. Line number: s{str(self.line_no)},"
[docs]class AirflowFileParseException(AirflowException): """ Raises when connection or variable file can not be parsed :param msg: The human-readable description of the exception :param file_path: A processed file that contains errors :param parse_errors: File syntax errors """ def __init__(self, msg: str, file_path: str, parse_errors: List[FileSyntaxError]) -> None: super().__init__(msg) self.msg = msg self.file_path = file_path self.parse_errors = parse_errors
[docs] def __str__(self): result = f"{self.msg}\nFilename: {self.file_path}\n\n" for error_no, parse_error in enumerate(self.parse_errors, 1): result += "=" * 20 + f" Parse error {error_no:3} " + "=" * 20 + "\n" result += f"{parse_error.message}\n" if parse_error.line_no: result += f"Line number: {parse_error.line_no}\n" if parse_error.line_no and is_tty(): result += "\n" + prepare_code_snippet(self.file_path, parse_error.line_no) + "\n" return result
[docs]class ConnectionNotUnique(AirflowException):
"""Raise when multiple values are found for the same conn_id"""
[docs]class TaskDeferred(BaseException): """ Special exception raised to signal that the operator it was raised from wishes to defer until a trigger fires. """ def __init__( self, *, trigger, method_name: str, kwargs: Optional[Dict[str, Any]] = None, timeout: Optional[datetime.timedelta] = None, ): super().__init__() self.trigger = trigger self.method_name = method_name self.kwargs = kwargs self.timeout = timeout # Check timeout type at runtime if self.timeout is not None and not hasattr(self.timeout, "total_seconds"): raise ValueError("Timeout value must be a timedelta")
[docs] def __repr__(self) -> str: return f"<TaskDeferred trigger={self.trigger} method={self.method_name}>"
[docs]class TaskDeferralError(AirflowException):
"""Raised when a task failed during deferral for some reason."""

Was this entry helpful?