Skip to content

Commit

Permalink
Parameterizing DagBag import timeouts
Browse files Browse the repository at this point in the history
  • Loading branch information
mistercrunch committed Mar 7, 2016
1 parent 0fd94d9 commit 3ee0085
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 1 deletion.
4 changes: 4 additions & 0 deletions airflow/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ class AirflowConfigException(Exception):
'dags_are_paused_at_creation': False,
'sql_alchemy_pool_size': 5,
'sql_alchemy_pool_recycle': 3600,
'dagbag_import_timeout': 30,
},
'webserver': {
'base_url': 'http://localhost:8080',
Expand Down Expand Up @@ -183,6 +184,9 @@ class AirflowConfigException(Exception):
# Whether to disable pickling dags
donot_pickle = False
# How long before timing out a python file import while filing the DagBag
dagbag_import_timeout = 30
[webserver]
# The base url of your website as airflow cannot guess what domain or
# cname you are using. This is use in automated emails that
Expand Down
3 changes: 2 additions & 1 deletion airflow/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,8 @@ def process_file(self, filepath, only_if_updated=True, safe_mode=True):
self.logger.info("Importing " + filepath)
if mod_name in sys.modules:
del sys.modules[mod_name]
with utils.timeout(30):
with utils.timeout(
configuration.getint('core', "DAGBAG_IMPORT_TIMEOUT")):
m = imp.load_source(mod_name, filepath)
except Exception as e:
self.logger.exception("Failed to import: " + filepath)
Expand Down

0 comments on commit 3ee0085

Please sign in to comment.