Skip to content

Commit 5f13802

Browse files
Merge pull request #166 from pycontw/cleanup
Cleanup not used configuration and dag
2 parents b09ea8a + 62cef58 commit 5f13802

File tree

2 files changed

+0
-91
lines changed

2 files changed

+0
-91
lines changed

airflow.cfg

Lines changed: 0 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -57,55 +57,9 @@ jwt_secret = $JWT_SECRET
5757
[fab]
5858
auth_backends = airflow.providers.fab.auth_manager.api.auth.backend.session
5959

60-
# TODO: check whether we need it. I don't think we're using celery
61-
[celery]
62-
# The concurrency that will be used when starting workers with the
63-
# ``airflow celery worker`` command. This defines the number of task instances that
64-
# a worker will take, so size up your workers based on the resources on
65-
# your worker box and the nature of your tasks
66-
worker_concurrency = 32
67-
68-
# The maximum and minimum concurrency that will be used when starting workers with the
69-
# ``airflow celery worker`` command (always keep minimum processes, but grow
70-
# to maximum if necessary). Note the value should be max_concurrency,min_concurrency
71-
# Pick these numbers based on resources on worker box and the nature of the task.
72-
# If autoscale option is available, worker_concurrency will be ignored.
73-
# http://docs.celeryproject.org/en/latest/reference/celery.bin.worker.html#cmdoption-celery-worker-autoscale
74-
# Example: worker_autoscale = 16,12
75-
worker_autoscale = 32,12
76-
77-
# The Celery broker URL. Celery supports RabbitMQ, Redis and experimentally
78-
# a sqlalchemy database. Refer to the Celery documentation for more
79-
# information.
80-
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#broker-settings
81-
broker_url = redis://redis:6379/1
82-
83-
# The Celery result_backend. When a job finishes, it needs to update the
84-
# metadata of the job. Therefore it will post a message on a message bus,
85-
# or insert it into a database (depending of the backend)
86-
# This status is used by the scheduler to update the state of the task
87-
# The use of a database is highly recommended
88-
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-result-backend-settings
89-
result_backend = db+postgresql://airflow:airflow@postgres/airflow
90-
91-
9260
[scheduler]
93-
# TODO: check whether it exists
9461
enable_health_check = True
9562

96-
97-
[kubernetes]
98-
# Keyword parameters to pass while calling a kubernetes client core_v1_api methods
99-
# from Kubernetes Executor provided as a single line formatted JSON dictionary string.
100-
# List of supported params are similar for all core_v1_apis, hence a single config
101-
# variable for all apis.
102-
# See:
103-
# https://raw.githubusercontent.com/kubernetes-client/python/master/kubernetes/client/apis/core_v1_api.py
104-
# Note that if no _request_timeout is specified, the kubernetes client will wait indefinitely
105-
# for kubernetes api responses, which will cause the scheduler to hang.
106-
# The timeout is specified as [connect timeout, read timeout]
107-
kube_client_request_args = {{"_request_timeout" : [60,60] }}
108-
10963
# ref: https://airflow.apache.org/docs/apache-airflow/1.10.1/security.html#setting-up-google-authentication
11064
[google]
11165
client_id = <check the doc above>

dags/app/channel_reminder/dag.py

Lines changed: 0 additions & 45 deletions
This file was deleted.

0 commit comments

Comments
 (0)