Skip to content

Commit 5b76a0a

Browse files
committed
fix(airflow.cfg): migrate to airflow 3
remove airflow config that does not exist or the value is the same as default value
1 parent 54ec5b2 commit 5b76a0a

File tree

1 file changed

+15
-50
lines changed

1 file changed

+15
-50
lines changed

airflow.cfg

Lines changed: 15 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,4 @@
11
[core]
2-
# The folder where your airflow pipelines live, most likely a
3-
# subfolder in a code repository. This path must be absolute.
4-
dags_folder = /opt/airflow/dags
5-
62
# The amount of parallelism as a setting to the executor. This defines
73
# the max number of task instances that should run simultaneously
84
# on this airflow installation
@@ -16,73 +12,47 @@ max_active_tasks_per_dag = 64
1612
# environment
1713
load_examples = False
1814

19-
# Where your Airflow plugins are stored
20-
plugins_folder = /opt/airflow/plugins
21-
2215
# Secret key to save connection passwords in the db
2316
fernet_key = $FERNET_KEY
2417

2518
# How long before timing out a python file import
2619
dagbag_import_timeout = 600
2720

21+
[dag_processor]
2822
# How long before timing out a DagFileProcessor, which processes a dag file
2923
dag_file_processor_timeout = 600
3024

31-
3225
[database]
3326
# The SqlAlchemy connection string to the metadata database.
3427
# SqlAlchemy supports many different database engine, more information
3528
# their website
3629
sql_alchemy_conn = sqlite:////opt/airflow/sqlite/airflow.db
3730

38-
39-
[logging]
40-
# The folder where airflow should store its log files
41-
# This path must be absolute
42-
base_log_folder = /opt/airflow/logs
43-
44-
# Log format for when Colored logs is enabled
45-
colored_log_format = [%%(blue)s%%(asctime)s%%(reset)s] {{%%(blue)s%%(filename)s:%%(reset)s%%(lineno)d}} %%(log_color)s%%(levelname)s%%(reset)s - %%(log_color)s%%(message)s%%(reset)s
46-
47-
# Format of Log line
48-
log_format = [%%(asctime)s] {{%%(filename)s:%%(lineno)d}} %%(levelname)s - %%(message)s
49-
50-
dag_processor_manager_log_location = /opt/airflow/logs/dag_processor_manager/dag_processor_manager.log
51-
52-
5331
[api]
54-
# How to authenticate users of the API
55-
auth_backends = airflow.api.auth.backend.default
56-
57-
58-
[webserver]
59-
# Number of seconds the webserver waits before killing gunicorn master that doesn't respond
60-
web_server_master_timeout = 600
32+
# Number of workers to run the Gunicorn web server
33+
workers = 2
6134

6235
# Number of seconds the gunicorn webserver waits before timing out on a worker
63-
web_server_worker_timeout = 600
36+
worker_timeout = 600
6437

38+
# Expose the configuration file in the web server
39+
expose_config = True
40+
41+
# TODO: move it to env var
6542
# Secret key used to run your flask app
6643
# It should be as random as possible
6744
secret_key = l\xba,\xc3\x023\xca\x04\xdb\xf2\xf7\xfa\xb8#\xee>
6845

69-
# Number of workers to run the Gunicorn web server
70-
workers = 2
71-
72-
# Expose the configuration file in the web server
73-
expose_config = True
46+
[api_auth]
47+
jwt_secret = $JWT_SECRET
7448

75-
# Allow the UI to be rendered in a frame
76-
x_frame_enabled = True
49+
[fab]
7750

7851
# Minutes of non-activity before logged out from UI
7952
# 0 means never get forcibly logged out
80-
force_log_out_after = 0
81-
82-
authenticate = False
83-
auth_backend = airflow.api.auth.backend.default
84-
53+
session_lifetime_minutes = 0
8554

55+
# TODO: check whether we need it. I don't think we're using celery
8656
[celery]
8757
# The concurrency that will be used when starting workers with the
8858
# ``airflow celery worker`` command. This defines the number of task instances that
@@ -115,10 +85,8 @@ result_backend = db+postgresql://airflow:airflow@postgres/airflow
11585

11686

11787
[scheduler]
118-
child_process_log_directory = /opt/airflow/logs/scheduler
119-
120-
# Format of the log_id, which is used to query for a given tasks logs
121-
log_id_template = {{dag_id}}-{{task_id}}-{{execution_date}}-{{try_number}}
88+
# TODO: check whether it exists
89+
enable_health_check = True
12290

12391

12492
[kubernetes]
@@ -133,9 +101,6 @@ log_id_template = {{dag_id}}-{{task_id}}-{{execution_date}}-{{try_number}}
133101
# The timeout is specified as [connect timeout, read timeout]
134102
kube_client_request_args = {{"_request_timeout" : [60,60] }}
135103

136-
# Specifies the uid to run the first process of the worker pods containers as
137-
run_as_user =
138-
139104
# ref: https://airflow.apache.org/docs/apache-airflow/1.10.1/security.html#setting-up-google-authentication
140105
[google]
141106
client_id = <check the doc above>

0 commit comments

Comments
 (0)