Skip to content

Commit 42902d0

Browse files
committedNov 22, 2018
Bump to 1.10.1
1 parent 52165dc commit 42902d0

5 files changed

+40
-17
lines changed
 

‎Dockerfile

+3-6
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# VERSION 1.10.0-5
1+
# VERSION 1.10.1
22
# AUTHOR: Matthieu "Puckel_" Roisil
33
# DESCRIPTION: Basic Airflow container
44
# BUILD: docker build --rm -t puckel/docker-airflow .
@@ -12,7 +12,7 @@ ENV DEBIAN_FRONTEND noninteractive
1212
ENV TERM linux
1313

1414
# Airflow
15-
ARG AIRFLOW_VERSION=1.10.0
15+
ARG AIRFLOW_VERSION=1.10.1
1616
ARG AIRFLOW_HOME=/usr/local/airflow
1717
ARG AIRFLOW_DEPS=""
1818
ARG PYTHON_DEPS=""
@@ -28,7 +28,6 @@ ENV LC_MESSAGES en_US.UTF-8
2828
RUN set -ex \
2929
&& buildDeps=' \
3030
freetds-dev \
31-
python3-dev \
3231
libkrb5-dev \
3332
libsasl2-dev \
3433
libssl-dev \
@@ -42,8 +41,6 @@ RUN set -ex \
4241
$buildDeps \
4342
freetds-bin \
4443
build-essential \
45-
python3-pip \
46-
python3-requests \
4744
default-libmysqlclient-dev \
4845
apt-utils \
4946
curl \
@@ -60,7 +57,7 @@ RUN set -ex \
6057
&& pip install ndg-httpsclient \
6158
&& pip install pyasn1 \
6259
&& pip install apache-airflow[crypto,celery,postgres,hive,jdbc,mysql,ssh${AIRFLOW_DEPS:+,}${AIRFLOW_DEPS}]==${AIRFLOW_VERSION} \
63-
&& pip install 'celery[redis]>=4.1.1,<4.2.0' \
60+
&& pip install 'redis>=2.10.5,<3' \
6461
&& if [ -n "${PYTHON_DEPS}" ]; then pip install ${PYTHON_DEPS}; fi \
6562
&& apt-get purge --auto-remove -yqq $buildDeps \
6663
&& apt-get autoremove -yqq --purge \

‎config/airflow.cfg

+31-6
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@ simple_log_format = %%(asctime)s %%(levelname)s - %%(message)s
3939
# we need to escape the curly braces by adding an additional curly brace
4040
log_filename_template = {{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log
4141
log_processor_filename_template = {{ filename }}.log
42+
dag_processor_manager_log_location = /usr/local/airflow/logs/dag_processor_manager/dag_processor_manager.log
4243

4344
# Hostname by providing a path to a callable, which will resolve the hostname
4445
hostname_callable = socket:getfqdn
@@ -59,6 +60,9 @@ executor = SequentialExecutor
5960
# If SqlAlchemy should pool database connections.
6061
sql_alchemy_pool_enabled = True
6162

63+
# The encoding for the databases
64+
sql_engine_encoding = utf-8
65+
6266
# The SqlAlchemy pool size is the maximum number of database connections
6367
# in the pool. 0 indicates no limit.
6468
sql_alchemy_pool_size = 5
@@ -73,6 +77,10 @@ sql_alchemy_pool_recycle = 1800
7377
# disconnects. Setting this to 0 disables retries.
7478
sql_alchemy_reconnect_timeout = 300
7579

80+
# The schema to use for the metadata database
81+
# SqlAlchemy supports databases with the concept of multiple schemas.
82+
sql_alchemy_schema =
83+
7684
# The amount of parallelism as a setting to the executor. This defines
7785
# the max number of task instances that should run simultaneously
7886
# on this airflow installation
@@ -142,6 +150,9 @@ killed_task_cleanup_time = 60
142150
# `airflow trigger_dag -c`, the key-value pairs will override the existing ones in params.
143151
dag_run_conf_overrides_params = False
144152

153+
# Worker initialisation check to validate Metadata Database connection
154+
worker_precheck = False
155+
145156
[cli]
146157
# In what way should the cli access the API. The LocalClient will use the
147158
# database directly, while the json_client will use the api running on the
@@ -180,6 +191,9 @@ default_gpus = 0
180191
[hive]
181192
# Default mapreduce queue for HiveOperator tasks
182193
default_hive_mapred_queue =
194+
# Template for mapred_job_name in HiveOperator, supports the following named parameters:
195+
# hostname, dag_id, task_id, execution_date
196+
mapred_job_name_template = Airflow HiveOperator task for {hostname}.{dag_id}.{task_id}.{execution_date}
183197

184198
[webserver]
185199
# The base url of your website as airflow cannot guess what domain or
@@ -227,7 +241,10 @@ access_logfile = -
227241
error_logfile = -
228242

229243
# Expose the configuration file in the web server
230-
expose_config = False
244+
# This is only applicable for the flask-admin based web UI (non FAB-based).
245+
# In the FAB-based web UI with RBAC feature,
246+
# access to configuration is controlled by role permissions.
247+
expose_config = True
231248

232249
# Set to true to turn on authentication:
233250
# https://airflow.incubator.apache.org/security.html#web-authentication
@@ -387,9 +404,7 @@ run_duration = -1
387404
# after how much time a new DAGs should be picked up from the filesystem
388405
min_file_process_interval = 0
389406

390-
# How many seconds to wait between file-parsing loops to prevent the logs from being spammed.
391-
min_file_parsing_loop_time = 1
392-
407+
# How often (in seconds) to scan the DAGs directory for new files. Default to 5 minutes.
393408
dag_dir_list_interval = 300
394409

395410
# How often should stats be printed to the logs
@@ -427,6 +442,10 @@ max_threads = 2
427442

428443
authenticate = False
429444

445+
# Turn off scheduler use of cron intervals by setting this to False.
446+
# DAGs submitted manually in the web UI or with trigger_dag will still run.
447+
use_job_schedule = True
448+
430449
[ldap]
431450
# set this to ldaps://<your.ldap.server>:<port>
432451
uri =
@@ -491,7 +510,6 @@ reinit_frequency = 3600
491510
kinit_path = kinit
492511
keytab = airflow.keytab
493512

494-
495513
[github_enterprise]
496514
api_rev = v3
497515

@@ -506,9 +524,11 @@ elasticsearch_log_id_template = {dag_id}-{task_id}-{execution_date}-{try_number}
506524
elasticsearch_end_of_log_mark = end_of_log
507525

508526
[kubernetes]
509-
# The repository and tag of the Kubernetes Image for the Worker to Run
527+
# The repository, tag and imagePullPolicy of the Kubernetes Image for the Worker to Run
510528
worker_container_repository =
511529
worker_container_tag =
530+
worker_container_image_pull_policy = IfNotPresent
531+
worker_dags_folder =
512532

513533
# If True (default), worker pods will be deleted upon termination
514534
delete_worker_pods = True
@@ -562,6 +582,11 @@ gcp_service_account_keys =
562582
# It will raise an exception if called from a process not running in a kubernetes environment.
563583
in_cluster = True
564584

585+
[kubernetes_node_selectors]
586+
# The Key-value pairs to be given to worker pods.
587+
# The worker pods will be scheduled to the nodes of the specified key-value pairs.
588+
# Should be supplied in the format: key = value
589+
565590
[kubernetes_secrets]
566591
# The scheduler mounts the following secrets into your workers as they are launched by the
567592
# scheduler. You may define as many secrets as needed and the kubernetes launcher will parse the

‎docker-compose-CeleryExecutor.yml

+4-4
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ services:
1616
# - ./pgdata:/var/lib/postgresql/data/pgdata
1717

1818
webserver:
19-
image: puckel/docker-airflow:1.10.0-5
19+
image: puckel/docker-airflow:1.10.1
2020
restart: always
2121
depends_on:
2222
- postgres
@@ -43,7 +43,7 @@ services:
4343
retries: 3
4444

4545
flower:
46-
image: puckel/docker-airflow:1.10.0-5
46+
image: puckel/docker-airflow:1.10.1
4747
restart: always
4848
depends_on:
4949
- redis
@@ -55,7 +55,7 @@ services:
5555
command: flower
5656

5757
scheduler:
58-
image: puckel/docker-airflow:1.10.0-5
58+
image: puckel/docker-airflow:1.10.1
5959
restart: always
6060
depends_on:
6161
- webserver
@@ -74,7 +74,7 @@ services:
7474
command: scheduler
7575

7676
worker:
77-
image: puckel/docker-airflow:1.10.0-5
77+
image: puckel/docker-airflow:1.10.1
7878
restart: always
7979
depends_on:
8080
- scheduler

‎docker-compose-LocalExecutor.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ services:
88
- POSTGRES_DB=airflow
99

1010
webserver:
11-
image: puckel/docker-airflow:1.10.0-5
11+
image: puckel/docker-airflow:1.10.1
1212
restart: always
1313
depends_on:
1414
- postgres

‎script/entrypoint.sh

+1
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,7 @@ case "$1" in
8282
exec airflow "$@"
8383
;;
8484
flower)
85+
sleep 10
8586
exec airflow "$@"
8687
;;
8788
version)

0 commit comments

Comments
 (0)
Please sign in to comment.