1
1
[core]
2
- # The folder where your airflow pipelines live, most likely a
3
- # subfolder in a code repository. This path must be absolute.
4
- dags_folder = /opt/airflow/dags
5
-
6
2
# The amount of parallelism as a setting to the executor. This defines
7
3
# the max number of task instances that should run simultaneously
8
4
# on this airflow installation
@@ -16,73 +12,48 @@ max_active_tasks_per_dag = 64
16
12
# environment
17
13
load_examples = False
18
14
19
- # Where your Airflow plugins are stored
20
- plugins_folder = /opt/airflow/plugins
21
-
22
15
# Secret key to save connection passwords in the db
23
16
fernet_key = $FERNET_KEY
24
17
25
18
# How long before timing out a python file import
26
19
dagbag_import_timeout = 600
27
20
21
+ auth_manager = airflow.providers.fab.auth_manager.fab_auth_manager.FabAuthManager
22
+
23
+ [dag_processor]
28
24
# How long before timing out a DagFileProcessor, which processes a dag file
29
25
dag_file_processor_timeout = 600
30
26
31
-
32
27
[database]
33
28
# The SqlAlchemy connection string to the metadata database.
34
29
# SqlAlchemy supports many different database engine, more information
35
30
# their website
36
31
sql_alchemy_conn = sqlite:////opt/airflow/sqlite/airflow.db
37
-
38
-
39
- [logging]
40
- # The folder where airflow should store its log files
41
- # This path must be absolute
42
- base_log_folder = /opt/airflow/logs
43
-
44
- # Log format for when Colored logs is enabled
45
- colored_log_format = [%%(blue)s%%(asctime)s%%(reset)s] {{%%(blue)s%%(filename)s:%%(reset)s%%(lineno)d}} %%(log_color)s%%(levelname)s%%(reset)s - %%(log_color)s%%(message)s%%(reset)s
46
-
47
- # Format of Log line
48
- log_format = [%%(asctime)s] {{%%(filename)s:%%(lineno)d}} %%(levelname)s - %%(message)s
49
-
50
- dag_processor_manager_log_location = /opt/airflow/logs/dag_processor_manager/dag_processor_manager.log
32
+ external_db_managers = airflow.providers.fab.auth_manager.models.db.FABDBManager
51
33
52
34
53
35
[api]
54
- # How to authenticate users of the API
55
- auth_backends = airflow.api.auth.backend.default
56
-
57
-
58
- [webserver]
59
- # Number of seconds the webserver waits before killing gunicorn master that doesn't respond
60
- web_server_master_timeout = 600
61
-
62
- # Number of seconds the gunicorn webserver waits before timing out on a worker
63
- web_server_worker_timeout = 600
64
-
65
- # Secret key used to run your flask app
66
- # It should be as random as possible
67
- secret_key = l\xba,\xc3\x023\xca\x04\xdb\xf2\xf7\xfa\xb8# \xee>
68
-
69
36
# Number of workers to run the Gunicorn web server
70
37
workers = 2
71
38
39
+ # Number of seconds the gunicorn webserver waits before timing out on a worker
40
+ worker_timeout = 600
41
+
72
42
# Expose the configuration file in the web server
73
43
expose_config = True
74
44
75
- # Allow the UI to be rendered in a frame
76
- x_frame_enabled = True
77
-
78
- # Minutes of non-activity before logged out from UI
79
- # 0 means never get forcibly logged out
80
- force_log_out_after = 0
45
+ # TODO: move it to env var
46
+ # Secret key used to run your flask app
47
+ # It should be as random as possible
48
+ secret_key = l\xba,\xc3\x023\xca\x04\xdb\xf2\xf7\xfa\xb8# \xee>
81
49
82
- authenticate = False
83
- auth_backend = airflow.api.auth.backend.default
50
+ [api_auth]
51
+ jwt_secret = $JWT_SECRET
84
52
53
+ [fab]
54
+ auth_backends = airflow.providers.fab.auth_manager.api.auth.backend.session
85
55
56
+ # TODO: check whether we need it. I don't think we're using celery
86
57
[celery]
87
58
# The concurrency that will be used when starting workers with the
88
59
# ``airflow celery worker`` command. This defines the number of task instances that
@@ -115,10 +86,8 @@ result_backend = db+postgresql://airflow:airflow@postgres/airflow
115
86
116
87
117
88
[scheduler]
118
- child_process_log_directory = /opt/airflow/logs/scheduler
119
-
120
- # Format of the log_id, which is used to query for a given tasks logs
121
- log_id_template = {{dag_id}}-{{task_id}}-{{execution_date}}-{{try_number}}
89
+ # TODO: check whether it exists
90
+ enable_health_check = True
122
91
123
92
124
93
[kubernetes]
@@ -133,9 +102,6 @@ log_id_template = {{dag_id}}-{{task_id}}-{{execution_date}}-{{try_number}}
133
102
# The timeout is specified as [connect timeout, read timeout]
134
103
kube_client_request_args = {{" _request_timeout" : [60,60] }}
135
104
136
- # Specifies the uid to run the first process of the worker pods containers as
137
- run_as_user =
138
-
139
105
# ref: https://airflow.apache.org/docs/apache-airflow/1.10.1/security.html#setting-up-google-authentication
140
106
[google]
141
107
client_id = <check the doc above>
0 commit comments