Skip to content

Commit

Permalink
Merge pull request apache#138 from mistercrunch/remove_hook_default
Browse files Browse the repository at this point in the history
Remove hook default
  • Loading branch information
mistercrunch committed Feb 14, 2015
2 parents 9ad800c + e8ba17b commit 614ccc5
Show file tree
Hide file tree
Showing 12 changed files with 16 additions and 29 deletions.
11 changes: 0 additions & 11 deletions airflow/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,6 @@
celery_result_backend = db+mysql://airflow:airflow@localhost:3306/airflow
flower_port = 5555
[hooks]
presto_default_conn_id = presto_default
hive_default_conn_id = hive_default
s3_default_conn_id = s3_default
[misc]
job_heartbeat_sec = 5
id_len = 250
Expand Down Expand Up @@ -73,11 +68,6 @@
celery_result_backend = db+mysql://airflow:airflow@localhost:3306/airflow
flower_port = 5555
[hooks]
presto_default_conn_id = presto_default
hive_default_conn_id = hive_default
s3_default_conn_id = s3_default
[misc]
job_heartbeat_sec = 1
id_len = 250
Expand Down Expand Up @@ -134,7 +124,6 @@ def mkdir_p(path):
def test_mode():
conf = ConfigParser()
conf.read(TEST_CONFIG)
print("Using configuration located at: " + TEST_CONFIG)

conf = ConfigParser()
conf.read(AIRFLOW_CONFIG)
2 changes: 1 addition & 1 deletion airflow/example_dags/example_bash_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
task_id='run_after_loop', bash_command='echo 1', dag=dag)
run_this.set_downstream(run_this_last)

for i in range(5):
for i in range(3):
i = str(i)
task = BashOperator(
task_id='runme_'+i,
Expand Down
3 changes: 1 addition & 2 deletions airflow/hooks/S3_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,7 @@ class S3Hook(BaseHook):
'''
def __init__(
self,
s3_conn_id=conf.get('hooks', 'S3_DEFAULT_CONN_ID'),
):
s3_conn_id='s3_default'):
session = settings.Session()
db = session.query(
Connection).filter(
Expand Down
3 changes: 1 addition & 2 deletions airflow/hooks/hive_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from tempfile import NamedTemporaryFile

from airflow.models import Connection
from airflow.configuration import conf
from airflow import settings

from thrift.transport import TSocket
Expand All @@ -22,7 +21,7 @@ class HiveHook(BaseHook):
'''
def __init__(
self,
hive_conn_id=conf.get('hooks', 'HIVE_DEFAULT_CONN_ID')):
hive_conn_id='hive_default'):
session = settings.Session()
db = session.query(
Connection).filter(
Expand Down
3 changes: 1 addition & 2 deletions airflow/hooks/presto_hook.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from airflow import settings
from airflow.configuration import conf
from airflow.models import Connection
from airflow.hooks.base_hook import BaseHook
from pyhive import presto
Expand All @@ -18,7 +17,7 @@ class PrestoHook(BaseHook):
[[340698]]
"""
def __init__(self, host=None, db=None, port=None,
presto_conn_id=conf.get('hooks', 'PRESTO_DEFAULT_CONN_ID')):
presto_conn_id='presto_default'):
self.user = 'airflow'
if not presto_conn_id:
self.host = host
Expand Down
4 changes: 2 additions & 2 deletions airflow/macros/hive.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

def max_partition(
table, schema="default",
hive_conn_id=conf.get('hooks', 'HIVE_DEFAULT_CONN_ID')):
hive_conn_id='hive_default'):
'''
Gets the max partition for a table.
Expand Down Expand Up @@ -52,7 +52,7 @@ def _closest_date(target_dt, date_list, before_target=None):

def closest_ds_partition(
table, ds, before=True, schema="default",
hive_conn_id=conf.get('hooks', 'HIVE_DEFAULT_CONN_ID')):
hive_conn_id='hive_default'):
'''
This function finds the date in a list closest to the target date.
An optional paramter can be given to get the closest before or after.
Expand Down
2 changes: 1 addition & 1 deletion airflow/operators/hive2samba_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def __init__(
self, hql,
samba_dbid,
destination_filepath,
hive_dbid=conf.get('hooks', 'HIVE_DEFAULT_CONN_ID'),
hive_dbid='hive_default',
*args, **kwargs):
super(Hive2SambaOperator, self).__init__(*args, **kwargs)

Expand Down
2 changes: 1 addition & 1 deletion airflow/operators/hive_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class HiveOperator(BaseOperator):
@apply_defaults
def __init__(
self, hql,
hive_conn_id=conf.get('hooks', 'HIVE_DEFAULT_CONN_ID'),
hive_conn_id='hive_default',
hiveconf_jinja_translate=False,
script_begin_tag=None,
*args, **kwargs):
Expand Down
6 changes: 3 additions & 3 deletions airflow/operators/presto_check_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ class PrestoCheckOperator(BaseOperator):
@apply_defaults
def __init__(
self, sql,
presto_conn_id=conf.get('hooks', 'PRESTO_DEFAULT_CONN_ID'),
presto_conn_id='presto_default',
*args, **kwargs):
super(PrestoCheckOperator, self).__init__(*args, **kwargs)

Expand Down Expand Up @@ -81,7 +81,7 @@ class PrestoValueCheckOperator(BaseOperator):
@apply_defaults
def __init__(
self, sql, pass_value, tolerance=None,
presto_conn_id=conf.get('hooks', 'PRESTO_DEFAULT_CONN_ID'),
presto_conn_id='presto_default',
*args, **kwargs):
super(PrestoValueCheckOperator, self).__init__(*args, **kwargs)
self.presto_conn_id = presto_conn_id
Expand Down Expand Up @@ -146,7 +146,7 @@ class PrestoIntervalCheckOperator(BaseOperator):
def __init__(
self, table, metrics_thresholds,
date_filter_column='ds', days_back=-7,
presto_conn_id=conf.get('hooks', 'PRESTO_DEFAULT_CONN_ID'),
presto_conn_id='presto_default',
*args, **kwargs):
super(PrestoIntervalCheckOperator, self).__init__(*args, **kwargs)
self.presto_conn_id = presto_conn_id
Expand Down
7 changes: 3 additions & 4 deletions airflow/operators/sensors.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from time import sleep

from airflow import settings
from airflow.configuration import conf
from airflow.hooks import HiveHook
from airflow.hooks import S3Hook
from airflow.models import BaseOperator
Expand Down Expand Up @@ -170,7 +169,7 @@ class HivePartitionSensor(BaseSensorOperator):
def __init__(
self,
table, partition="ds='{{ ds }}'",
hive_conn_id=conf.get('hooks', 'HIVE_DEFAULT_CONN_ID'),
hive_conn_id='hive_default',
schema='default',
*args, **kwargs):
super(HivePartitionSensor, self).__init__(*args, **kwargs)
Expand Down Expand Up @@ -253,7 +252,7 @@ class S3KeySensor(BaseSensorOperator):
def __init__(
self, bucket_key,
bucket_name=None,
s3_conn_id=conf.get('hooks', 'S3_DEFAULT_CONN_ID'),
s3_conn_id='s3_default',
*args, **kwargs):
super(S3KeySensor, self).__init__(*args, **kwargs)
session = settings.Session()
Expand Down Expand Up @@ -309,7 +308,7 @@ class S3PrefixSensor(BaseSensorOperator):
def __init__(
self, bucket_name,
prefix, delimiter='/',
s3_conn_id=conf.get('hooks', 'S3_DEFAULT_CONN_ID'),
s3_conn_id='s3_default',
*args, **kwargs):
super(S3PrefixSensor, self).__init__(*args, **kwargs)
session = settings.Session()
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
boto
celery
chartkick
coverage
Expand Down
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
zip_safe=False,
scripts=['airflow/bin/airflow'],
install_requires=[
'boto>=2.36.0',
'celery>=3.1.17',
'chartkick>=0.4.2',
'dill>=0.2.2',
Expand Down

0 comments on commit 614ccc5

Please sign in to comment.