:mod:`airflow.executors`
========================

.. py:module:: airflow.executors




Submodules
----------
.. toctree::
   :titlesonly:
   :maxdepth: 1

   base_executor/index.rst
   
   celery_executor/index.rst
   
   dask_executor/index.rst
   
   local_executor/index.rst
   
   sequential_executor/index.rst
   




Package Contents
----------------






.. py:class:: LoggingMixin(context=None)

   Bases::class:`object`

   

   Convenience super-class to have a logger configured with the class name


   

   

   .. attribute:: logger
      

      



   

   

   .. attribute:: log
      

      



   

   

   

   .. method:: _set_context(self, context)

      











.. py:exception:: AirflowException

   Bases::class:`Exception`

   

   Base class for all Airflow's errors.
   Each custom exception should be derived from this class


   

   

   .. attribute:: status_code
      :annotation: = 500 

      



   









.. py:class:: BaseExecutor(parallelism=PARALLELISM)

   Bases::class:`airflow.utils.log.logging_mixin.LoggingMixin`

   

   

   

   .. method:: start(self)

      
      Executors may need to get things started. For example LocalExecutor
      starts N workers.

      



   

   .. method:: queue_command(self, simple_task_instance, command, priority=1, queue=None)

      



   

   .. method:: queue_task_instance(self, task_instance, mark_success=False, pickle_id=None, ignore_all_deps=False, ignore_depends_on_past=False, ignore_task_deps=False, ignore_ti_state=False, pool=None, cfg_path=None)

      



   

   .. method:: has_task(self, task_instance)

      
      Checks if a task is either queued or running in this executor

      :param task_instance: TaskInstance
      :return: True if the task is known to this executor

      



   

   .. method:: sync(self)

      
      Sync will get called periodically by the heartbeat method.
      Executors should override this to perform gather statuses.

      



   

   .. method:: heartbeat(self)

      



   

   .. method:: change_state(self, key, state)

      



   

   .. method:: fail(self, key)

      



   

   .. method:: success(self, key)

      



   

   .. method:: get_event_buffer(self, dag_ids=None)

      
      Returns and flush the event buffer. In case dag_ids is specified
      it will only return and flush events for the given dag_ids. Otherwise
      it returns and flushes all

      :param dag_ids: to dag_ids to return events for, if None returns all
      :return: a dict of events

      



   

   .. method:: execute_async(self, key, command, queue=None, executor_config=None)

      
      This method will execute the command asynchronously.

      



   

   .. method:: end(self)

      
      This method is called when the caller is done submitting job and is
      wants to wait synchronously for the job submitted previously to be
      all done.

      



   

   .. method:: terminate(self)

      
      This method is called when the daemon receives a SIGTERM

      











.. py:class:: LocalExecutor

   Bases::class:`airflow.executors.base_executor.BaseExecutor`

   

   LocalExecutor executes tasks locally in parallel. It uses the
   multiprocessing Python library and queues to parallelize the execution
   of tasks.


   

   

   .. py:class:: _UnlimitedParallelism(executor)

      Bases::class:`object`

      

      Implements LocalExecutor with unlimited parallelism, starting one process
      per each command to execute.


      

      

      

      .. method:: start(self)

         



      

      .. method:: execute_async(self, key, command)

         
         :param key: the key to identify the TI
         :type key: tuple(dag_id, task_id, execution_date)
         :param command: the command to execute
         :type command: str

         



      

      .. method:: sync(self)

         



      

      .. method:: end(self)

         





   

   

   .. py:class:: _LimitedParallelism(executor)

      Bases::class:`object`

      

      Implements LocalExecutor with limited parallelism using a task queue to
      coordinate work distribution.


      

      

      

      .. method:: start(self)

         



      

      .. method:: execute_async(self, key, command)

         
         :param key: the key to identify the TI
         :type key: tuple(dag_id, task_id, execution_date)
         :param command: the command to execute
         :type command: str

         



      

      .. method:: sync(self)

         



      

      .. method:: end(self)

         





   

   

   .. method:: start(self)

      



   

   .. method:: execute_async(self, key, command, queue=None, executor_config=None)

      



   

   .. method:: sync(self)

      



   

   .. method:: end(self)

      











.. py:class:: SequentialExecutor

   Bases::class:`airflow.executors.base_executor.BaseExecutor`

   

   This executor will only run one task instance at a time, can be used
   for debugging. It is also the only executor that can be used with sqlite
   since sqlite doesn't support multiple connections.

   Since we want airflow to work out of the box, it defaults to this
   SequentialExecutor alongside sqlite as you first install it.


   

   

   

   .. method:: execute_async(self, key, command, queue=None, executor_config=None)

      



   

   .. method:: sync(self)

      



   

   .. method:: end(self)

      











.. data:: DEFAULT_EXECUTOR
   

   









.. function:: _integrate_plugins()

   
   Integrate plugins to the context.

   









.. function:: GetDefaultExecutor()

   
   Creates a new instance of the configured executor if none exists and returns it

   









.. py:class:: Executors

   

   .. attribute:: LocalExecutor
      :annotation: = LocalExecutor 

      



   

   

   .. attribute:: SequentialExecutor
      :annotation: = SequentialExecutor 

      



   

   

   .. attribute:: CeleryExecutor
      :annotation: = CeleryExecutor 

      



   

   

   .. attribute:: DaskExecutor
      :annotation: = DaskExecutor 

      



   

   

   .. attribute:: MesosExecutor
      :annotation: = MesosExecutor 

      



   

   

   .. attribute:: KubernetesExecutor
      :annotation: = KubernetesExecutor 

      



   









.. function:: _get_executor(executor_name)

   
   Creates a new instance of the named executor.
   In case the executor name is not know in airflow,
   look for it in the plugins