diff --git a/airflow/executors/__init__.py b/airflow/executors/__init__.py index e8e9898a1d0ce..695ef5ce7f24b 100644 --- a/airflow/executors/__init__.py +++ b/airflow/executors/__init__.py @@ -5,6 +5,11 @@ from airflow.executors.local_executor import LocalExecutor from airflow.executors.sequential_executor import SequentialExecutor +try: + from airflow.executors.celery_executor import CeleryExecutor +except: + pass + from airflow.utils import AirflowException _EXECUTOR = configuration.get('core', 'EXECUTOR') @@ -12,7 +17,6 @@ if _EXECUTOR == 'LocalExecutor': DEFAULT_EXECUTOR = LocalExecutor() elif _EXECUTOR == 'CeleryExecutor': - from airflow.executors.celery_executor import CeleryExecutor DEFAULT_EXECUTOR = CeleryExecutor() elif _EXECUTOR == 'SequentialExecutor': DEFAULT_EXECUTOR = SequentialExecutor() diff --git a/docs/code.rst b/docs/code.rst index 5db096838063f..aafb52adb94bf 100644 --- a/docs/code.rst +++ b/docs/code.rst @@ -145,20 +145,19 @@ Macros are a way to expose objects to your templates and live under the A few commonly used libraries and methods are made available. + ================================= ==================================== Variable Description ================================= ==================================== -``macros.datetime`` The standard lib's - ``datetime.datetime`` -``macros.timedelta`` The standard lib's - ``datetime.timedelta`` -``macros.dateutil`` A reference to the ``dateutil`` - package +``macros.datetime`` The standard lib's ``datetime.datetime`` +``macros.timedelta`` The standard lib's ``datetime.timedelta`` +``macros.dateutil`` A reference to the ``dateutil`` package ``macros.time`` The standard lib's ``time`` ``macros.uuid`` The standard lib's ``uuid`` ``macros.random`` The standard lib's ``random`` ================================= ==================================== + Some airflow specific macros are also defined: .. automodule:: airflow.macros diff --git a/docs/conf.py b/docs/conf.py index 0a0ef55af3a1b..3bb2d9f3f319d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -13,13 +13,16 @@ # serve to show the default. import os import sys + +# Hack to allow changing for piece of the code to behave differently while +# the docs are being built. The main objective was to alter the +# behavior of the utils.apply_default that was hiding function headers os.environ['BUILDING_AIRFLOW_DOCS'] = 'TRUE' from airflow import settings # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, settings.AIRFLOW_HOME + "/src/airflow") # -- General configuration ------------------------------------------------ @@ -51,16 +54,16 @@ # General information about the project. project = u'Airflow' -copyright = u'2014, Maxime Beauchemin' +copyright = u'2014, Maxime Beauchemin, Airbnb' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = '1.0.0' +#version = '1.0.0' # The full version, including alpha/beta/rc tags. -release = '1.0.0' +#release = '1.0.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -119,10 +122,10 @@ # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +html_title = "Airflow Documentation" # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +html_short_title = "" # The name of an image file (relative to this directory) to place at the top # of the sidebar. diff --git a/docs/index.rst b/docs/index.rst index 72160ac0e52d7..c1c5ec3f0904b 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,23 +1,22 @@ .. image:: img/pin_large.png :width: 70 -Airflow's Documentation +Airflow Documentation ================================ -Airflow is a platform to programmatically author, schedule and monitor +Airflow is a platform to programmatically author, schedule and monitor workflows. -When workflows are defined as code, they become more maintainable, -versionable, testable, and collaborative. - - -Use airflow to author workflows as directed acyclic graphs (DAGs) of tasks. -The airflow scheduler executes your tasks on an array of workers while -following the specified dependencies. Rich command line utilities make -performing complex surgeries on DAGs a snap. The rich user interface -makes it easy to visualize pipelines running in production, +Use airflow to author workflows as directed acyclic graphs (DAGs) of tasks. +The airflow scheduler executes your tasks on an array of workers while +following the specified dependencies. Rich command line utilities make +performing complex surgeries on DAGs a snap. The rich user interface +makes it easy to visualize pipelines running in production, monitor progress, and troubleshoot issues when needed. +When workflows are defined as code, they become more maintainable, +versionable, testable, and collaborative. + ------------ .. image:: img/airflow.gif