1
1
[core]
2
- # The folder where your airflow pipelines live, most likely a
3
- # subfolder in a code repository. This path must be absolute.
4
- dags_folder = /opt/airflow/dags
5
-
6
2
# The amount of parallelism as a setting to the executor. This defines
7
3
# the max number of task instances that should run simultaneously
8
4
# on this airflow installation
@@ -16,73 +12,47 @@ max_active_tasks_per_dag = 64
16
12
# environment
17
13
load_examples = False
18
14
19
- # Where your Airflow plugins are stored
20
- plugins_folder = /opt/airflow/plugins
21
-
22
15
# Secret key to save connection passwords in the db
23
16
fernet_key = $FERNET_KEY
24
17
25
18
# How long before timing out a python file import
26
19
dagbag_import_timeout = 600
27
20
21
+ [dag_processor]
28
22
# How long before timing out a DagFileProcessor, which processes a dag file
29
23
dag_file_processor_timeout = 600
30
24
31
-
32
25
[database]
33
26
# The SqlAlchemy connection string to the metadata database.
34
27
# SqlAlchemy supports many different database engine, more information
35
28
# their website
36
29
sql_alchemy_conn = sqlite:////opt/airflow/sqlite/airflow.db
37
30
38
-
39
- [logging]
40
- # The folder where airflow should store its log files
41
- # This path must be absolute
42
- base_log_folder = /opt/airflow/logs
43
-
44
- # Log format for when Colored logs is enabled
45
- colored_log_format = [%%(blue)s%%(asctime)s%%(reset)s] {{%%(blue)s%%(filename)s:%%(reset)s%%(lineno)d}} %%(log_color)s%%(levelname)s%%(reset)s - %%(log_color)s%%(message)s%%(reset)s
46
-
47
- # Format of Log line
48
- log_format = [%%(asctime)s] {{%%(filename)s:%%(lineno)d}} %%(levelname)s - %%(message)s
49
-
50
- dag_processor_manager_log_location = /opt/airflow/logs/dag_processor_manager/dag_processor_manager.log
51
-
52
-
53
31
[api]
54
- # How to authenticate users of the API
55
- auth_backends = airflow.api.auth.backend.default
56
-
57
-
58
- [webserver]
59
- # Number of seconds the webserver waits before killing gunicorn master that doesn't respond
60
- web_server_master_timeout = 600
32
+ # Number of workers to run the Gunicorn web server
33
+ workers = 2
61
34
62
35
# Number of seconds the gunicorn webserver waits before timing out on a worker
63
- web_server_worker_timeout = 600
36
+ worker_timeout = 600
64
37
38
+ # Expose the configuration file in the web server
39
+ expose_config = True
40
+
41
+ # TODO: move it to env var
65
42
# Secret key used to run your flask app
66
43
# It should be as random as possible
67
44
secret_key = l\xba,\xc3\x023\xca\x04\xdb\xf2\xf7\xfa\xb8# \xee>
68
45
69
- # Number of workers to run the Gunicorn web server
70
- workers = 2
71
-
72
- # Expose the configuration file in the web server
73
- expose_config = True
46
+ [api_auth]
47
+ jwt_secret = $JWT_SECRET
74
48
75
- # Allow the UI to be rendered in a frame
76
- x_frame_enabled = True
49
+ [fab]
77
50
78
51
# Minutes of non-activity before logged out from UI
79
52
# 0 means never get forcibly logged out
80
- force_log_out_after = 0
81
-
82
- authenticate = False
83
- auth_backend = airflow.api.auth.backend.default
84
-
53
+ session_lifetime_minutes = 0
85
54
55
+ # TODO: check whether we need it. I don't think we're using celery
86
56
[celery]
87
57
# The concurrency that will be used when starting workers with the
88
58
# ``airflow celery worker`` command. This defines the number of task instances that
@@ -115,10 +85,8 @@ result_backend = db+postgresql://airflow:airflow@postgres/airflow
115
85
116
86
117
87
[scheduler]
118
- child_process_log_directory = /opt/airflow/logs/scheduler
119
-
120
- # Format of the log_id, which is used to query for a given tasks logs
121
- log_id_template = {{dag_id}}-{{task_id}}-{{execution_date}}-{{try_number}}
88
+ # TODO: check whether it exists
89
+ enable_health_check = True
122
90
123
91
124
92
[kubernetes]
@@ -133,9 +101,6 @@ log_id_template = {{dag_id}}-{{task_id}}-{{execution_date}}-{{try_number}}
133
101
# The timeout is specified as [connect timeout, read timeout]
134
102
kube_client_request_args = {{" _request_timeout" : [60,60] }}
135
103
136
- # Specifies the uid to run the first process of the worker pods containers as
137
- run_as_user =
138
-
139
104
# ref: https://airflow.apache.org/docs/apache-airflow/1.10.1/security.html#setting-up-google-authentication
140
105
[google]
141
106
client_id = <check the doc above>
0 commit comments