Skip to content

Commit 48f3f19

Browse files
committed
Bump to Airflow 1.10.6
1 parent c654707 commit 48f3f19

5 files changed

+52
-12
lines changed

.circleci/config.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ jobs:
2020
- run:
2121
name: Test docker image
2222
command: |
23-
docker run puckel/docker-airflow version |grep '1.10.4'
23+
docker run puckel/docker-airflow version |grep '1.10.6'
2424
workflows:
2525
version: 2
2626
build_and_test:

Dockerfile

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# VERSION 1.10.4
1+
# VERSION 1.10.6
22
# AUTHOR: Matthieu "Puckel_" Roisil
33
# DESCRIPTION: Basic Airflow container
44
# BUILD: docker build --rm -t puckel/docker-airflow .
@@ -12,7 +12,7 @@ ENV DEBIAN_FRONTEND noninteractive
1212
ENV TERM linux
1313

1414
# Airflow
15-
ARG AIRFLOW_VERSION=1.10.4
15+
ARG AIRFLOW_VERSION=1.10.6
1616
ARG AIRFLOW_USER_HOME=/usr/local/airflow
1717
ARG AIRFLOW_DEPS=""
1818
ARG PYTHON_DEPS=""

config/airflow.cfg

+44-4
Original file line numberDiff line numberDiff line change
@@ -84,9 +84,10 @@ sql_alchemy_max_overflow = 10
8484
# a lower config value will allow the system to recover faster.
8585
sql_alchemy_pool_recycle = 1800
8686

87-
# How many seconds to retry re-establishing a DB connection after
88-
# disconnects. Setting this to 0 disables retries.
89-
sql_alchemy_reconnect_timeout = 300
87+
# Check connection at the start of each connection pool checkout.
88+
# Typically, this is a simple statement like “SELECT 1”.
89+
# More information here: https://docs.sqlalchemy.org/en/13/core/pooling.html#disconnect-handling-pessimistic
90+
sql_alchemy_pool_pre_ping = True
9091

9192
# The schema to use for the metadata database
9293
# SqlAlchemy supports databases with the concept of multiple schemas.
@@ -127,6 +128,9 @@ donot_pickle = False
127128
# How long before timing out a python file import while filling the DagBag
128129
dagbag_import_timeout = 30
129130

131+
# How long before timing out a DagFileProcessor, which processes a dag file
132+
dag_file_processor_timeout = 50
133+
130134
# The class to use for running task instances in a subprocess
131135
task_runner = StandardTaskRunner
132136

@@ -167,6 +171,9 @@ worker_precheck = False
167171
# When discovering DAGs, ignore any files that don't contain the strings `DAG` and `airflow`.
168172
dag_discovery_safe_mode = True
169173

174+
# The number of retries each task is going to have by default. Can be overridden at dag or task level.
175+
default_task_retries = 0
176+
170177
[cli]
171178
# In what way should the cli access the API. The LocalClient will use the
172179
# database directly, while the json_client will use the api running on the
@@ -315,6 +322,10 @@ cookie_samesite =
315322
# Default setting for wrap toggle on DAG code and TI log views.
316323
default_wrap = False
317324

325+
# Send anonymous user activity to your analytics tool
326+
# analytics_tool = # choose from google_analytics, segment, or metarouter
327+
# analytics_id = XXXXXXXXXXX
328+
318329
[email]
319330
email_backend = airflow.utils.email.send_email_smtp
320331

@@ -454,6 +465,13 @@ scheduler_heartbeat_sec = 5
454465
# -1 indicates to run continuously (see also num_runs)
455466
run_duration = -1
456467

468+
# The number of times to try to schedule each DAG file
469+
# -1 indicates unlimited number
470+
num_runs = -1
471+
472+
# The number of seconds to wait between consecutive DAG file processing
473+
processor_poll_interval = 1
474+
457475
# after how much time (seconds) a new DAGs should be picked up from the filesystem
458476
min_file_process_interval = 0
459477

@@ -605,6 +623,11 @@ json_format = False
605623
# Log fields to also attach to the json output, if enabled
606624
json_fields = asctime, filename, lineno, levelname, message
607625

626+
[elasticsearch_configs]
627+
628+
use_ssl = False
629+
verify_certs = True
630+
608631
[kubernetes]
609632
# The repository, tag and imagePullPolicy of the Kubernetes Image for the Worker to Run
610633
worker_container_repository =
@@ -697,10 +720,25 @@ git_dags_folder_mount_point =
697720
git_ssh_key_secret_name =
698721
git_ssh_known_hosts_configmap_name =
699722

723+
# To give the git_sync init container credentials via a secret, create a secret
724+
# with two fields: GIT_SYNC_USERNAME and GIT_SYNC_PASSWORD (example below) and
725+
# add `git_sync_credentials_secret = <secret_name>` to your airflow config under the kubernetes section
726+
#
727+
# Secret Example:
728+
# apiVersion: v1
729+
# kind: Secret
730+
# metadata:
731+
# name: git-credentials
732+
# data:
733+
# GIT_SYNC_USERNAME: <base64_encoded_git_username>
734+
# GIT_SYNC_PASSWORD: <base64_encoded_git_password>
735+
git_sync_credentials_secret =
736+
700737
# For cloning DAGs from git repositories into volumes: https://github.com/kubernetes/git-sync
701738
git_sync_container_repository = k8s.gcr.io/git-sync
702739
git_sync_container_tag = v3.1.1
703740
git_sync_init_container_name = git-sync-clone
741+
git_sync_run_as_user = 65533
704742

705743
# The name of the Kubernetes service account to be associated with airflow workers, if any.
706744
# Service accounts are required for workers that require access to secrets or cluster resources.
@@ -742,7 +780,9 @@ tolerations =
742780
# List of supported params in **kwargs are similar for all core_v1_apis, hence a single config variable for all apis
743781
# See:
744782
# https://raw.githubusercontent.com/kubernetes-client/python/master/kubernetes/client/apis/core_v1_api.py
745-
kube_client_request_args =
783+
# Note that if no _request_timeout is specified, the kubernetes client will wait indefinitely for kubernetes
784+
# api responses, which will cause the scheduler to hang. The timeout is specified as [connect timeout, read timeout]
785+
kube_client_request_args = {{"_request_timeout" : [60,60] }}
746786

747787
# Worker pods security context options
748788
# See:

docker-compose-CeleryExecutor.yml

+4-4
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ services:
1616
# - ./pgdata:/var/lib/postgresql/data/pgdata
1717

1818
webserver:
19-
image: puckel/docker-airflow:1.10.4
19+
image: puckel/docker-airflow:1.10.6
2020
restart: always
2121
depends_on:
2222
- postgres
@@ -43,7 +43,7 @@ services:
4343
retries: 3
4444

4545
flower:
46-
image: puckel/docker-airflow:1.10.4
46+
image: puckel/docker-airflow:1.10.6
4747
restart: always
4848
depends_on:
4949
- redis
@@ -55,7 +55,7 @@ services:
5555
command: flower
5656

5757
scheduler:
58-
image: puckel/docker-airflow:1.10.4
58+
image: puckel/docker-airflow:1.10.6
5959
restart: always
6060
depends_on:
6161
- webserver
@@ -74,7 +74,7 @@ services:
7474
command: scheduler
7575

7676
worker:
77-
image: puckel/docker-airflow:1.10.4
77+
image: puckel/docker-airflow:1.10.6
7878
restart: always
7979
depends_on:
8080
- scheduler

docker-compose-LocalExecutor.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ services:
88
- POSTGRES_DB=airflow
99

1010
webserver:
11-
image: puckel/docker-airflow:1.10.4
11+
image: puckel/docker-airflow:1.10.6
1212
restart: always
1313
depends_on:
1414
- postgres

0 commit comments

Comments
 (0)