Difference between revisions of "/root/airflow/airflow.cfg"

From wikieduonline
Jump to navigation Jump to search
(Created page with "<pre> [core] # The folder where your airflow pipelines live, most likely a # subfolder in a code repository. This path must be absolute. dags_folder = /root/airflow/dags # Ho...")
 
Line 250: Line 250:
 
# More information here:
 
# More information here:
 
# https://docs.sqlalchemy.org/en/14/core/pooling.html#disconnect-handling-pessimistic
 
# https://docs.sqlalchemy.org/en/14/core/pooling.html#disconnect-handling-pessimistic
 +
sql_alchemy_pool_pre_ping = True
 +
 +
# The schema to use for the metadata database.
 +
# SqlAlchemy supports databases with the concept of multiple schemas.
 +
sql_alchemy_schema =
 +
 +
# Import path for connect args in SqlAlchemy. Defaults to an empty dict.
 +
# This is useful when you want to configure db engine args that SqlAlchemy won't parse
 +
# in connection string.
 +
# See https://docs.sqlalchemy.org/en/14/core/engines.html#sqlalchemy.create_engine.params.connect_args
 +
# sql_alchemy_connect_args =
 +
 +
# Whether to load the default connections that ship with Airflow. It's good to
 +
# get started, but you probably want to set this to ``False`` in a production
 +
# environment
 +
load_default_connections = True
 +
 +
# Number of times the code should be retried in case of DB Operational Errors.
 +
# Not all transactions will be retried as it can cause undesired state.
 +
# Currently it is only used in ``DagFileProcessor.process_file`` to retry ``dagbag.sync_to_db``.
 +
max_db_retries = 3
 +
 +
[logging]
 +
# The folder where airflow should store its log files.
 +
# This path must be absolute.
 +
# There are a few existing configurations that assume this is set to the default.
 +
# If you choose to override this you may need to update the dag_processor_manager_log_location and
 +
# dag_processor_manager_log_location settings as well.
 +
base_log_folder = /root/airflow/logs
 +
 +
# Airflow can store logs remotely in AWS S3, Google Cloud Storage or Elastic Search.
 +
# Set this to True if you want to enable remote logging.
 +
remote_logging = False
 +
 +
# Users must supply an Airflow connection id that provides access to the storage
 +
# location. Depending on your remote logging service, this may only be used for
 +
# reading logs, not writing them.
 +
remote_log_conn_id =
 +
 +
# Path to Google Credential JSON file. If omitted, authorization based on `the Application Default
 +
# Credentials
 +
# <https://cloud.google.com/docs/authentication/production#finding_credentials_automatically>`__ will
 +
# be used.
 +
google_key_path =
 +
 +
# Storage bucket URL for remote logging
 +
# S3 buckets should start with "s3://"
 +
# Cloudwatch log groups should start with "cloudwatch://"
 +
# GCS buckets should start with "gs://"
 +
# WASB buckets should start with "wasb" just to help Airflow select correct handler
 +
# Stackdriver logs should start with "stackdriver://"
 +
remote_base_log_folder =
 +
 +
# Use server-side encryption for logs stored in S3
 +
encrypt_s3_logs = False
 +
 +
# Logging level.
 +
#
 +
# Supported values: ``CRITICAL``, ``ERROR``, ``WARNING``, ``INFO``, ``DEBUG``.
 +
logging_level = INFO
 +
 +
# Logging level for celery. If not set, it uses the value of logging_level
 +
#
 +
# Supported values: ``CRITICAL``, ``ERROR``, ``WARNING``, ``INFO``, ``DEBUG``.
 +
celery_logging_level =
 +
 +
# Logging level for Flask-appbuilder UI.
 +
#
 +
# Supported values: ``CRITICAL``, ``ERROR``, ``WARNING``, ``INFO``, ``DEBUG``.
 +
fab_logging_level = WARNING
 +
 +
# Logging class
 +
# Specify the class that will specify the logging configuration
 +
# This class has to be on the python classpath
 +
# Example: logging_config_class = my.path.default_local_settings.LOGGING_CONFIG
 +
logging_config_class =
 +
 +
# Flag to enable/disable Colored logs in Console
 +
# Colour the logs when the controlling terminal is a TTY.
 +
colored_console_log = True
 +
 +
# Log format for when Colored logs is enabled
 +
colored_log_format = [%%(blue)s%%(asctime)s%%(reset)s] {%%(blue)s%%(filename)s:%%(reset)s%%(lineno)d} %%(log_color)s%%(levelname)s%%(reset)s - %%(log_color)s%%(message)s%%(reset)s
 +
colored_formatter_class = airflow.utils.log.colored_log.CustomTTYColoredFormatter
 +
 +
# Format of Log line
 +
log_format = [%%(asctime)s] {%%(filename)s:%%(lineno)d} %%(levelname)s - %%(message)s
 +
simple_log_format = %%(asctime)s %%(levelname)s - %%(message)s
 +
 +
# Where to send dag parser logs. If "file", logs are sent to log files defined by child_process_log_directory.
 +
dag_processor_log_target = file
 +
 +
# Format of Dag Processor Log line
 +
dag_processor_log_format = [%%(asctime)s] [SOURCE:DAG_PROCESSOR] {%%(filename)s:%%(lineno)d} %%(levelname)s - %%(message)s
 +
log_formatter_class = airflow.utils.log.timezone_aware.TimezoneAware
 +
 +
# Specify prefix pattern like mentioned below with stream handler TaskHandlerWithCustomFormatter
 +
# Example: task_log_prefix_template = {ti.dag_id}-{ti.task_id}-{execution_date}-{try_number}
 +
task_log_prefix_template =
 +
 +
# Formatting for how airflow generates file names/paths for each task run.
 +
log_filename_template = dag_id={{ ti.dag_id }}/run_id={{ ti.run_id }}/task_id={{ ti.task_id }}/{%% if ti.map_index >= 0 %%}map_index={{ ti.map_index }}/{%% endif %%}attempt={{ try_number }}.log
 +
 +
# Formatting for how airflow generates file names for log
 +
log_processor_filename_template = {{ filename }}.log
 +
 +
# Full path of dag_processor_manager logfile.
 +
dag_processor_manager_log_location = /root/airflow/logs/dag_processor_manager/dag_processor_manager.log
 +
 +
# Name of handler to read task instance logs.
 +
# Defaults to use ``task`` handler.
 +
task_log_reader = task
 +
 +
# A comma\-separated list of third-party logger names that will be configured to print messages to
 +
# consoles\.
 +
# Example: extra_logger_names = connexion,sqlalchemy
 +
extra_logger_names =
 +
 +
# When you start an airflow worker, airflow starts a tiny web server
 +
# subprocess to serve the workers local log files to the airflow main
 +
# web server, who then builds pages and sends them to users. This defines
 +
# the port on which the logs are served. It needs to be unused, and open
 +
# visible from the main web server to connect into the workers.
 +
worker_log_server_port = 8793
 +
 +
[metrics]
 +
 +
# StatsD (https://github.com/etsy/statsd) integration settings.
 +
# Enables sending metrics to StatsD.
 +
statsd_on = False
 +
statsd_host = localhost
 +
statsd_port = 8125
 +
statsd_prefix = airflow
 +
 +
# If you want to avoid sending all the available metrics to StatsD,
 +
# you can configure an allow list of prefixes (comma separated) to send only the metrics that
 +
# start with the elements of the list (e.g: "scheduler,executor,dagrun")
 +
statsd_allow_list =
 +
 +
# A function that validate the StatsD stat name, apply changes to the stat name if necessary and return
 +
# the transformed stat name.
 +
#
 +
# The function should have the following signature:
 +
# def func_name(stat_name: str) -> str:
 +
stat_name_handler =
 +
 +
# To enable datadog integration to send airflow metrics.
 +
statsd_datadog_enabled = False
 +
 +
# List of datadog tags attached to all metrics(e.g: key1:value1,key2:value2)
 +
statsd_datadog_tags =
 +
 +
# If you want to utilise your own custom StatsD client set the relevant
 +
# module path below.
 +
# Note: The module path must exist on your PYTHONPATH for Airflow to pick it up
 +
# statsd_custom_client_path =
 +
 +
[secrets]
 +
# Full class name of secrets backend to enable (will precede env vars and metastore in search path)
 +
# Example: backend = airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend
 +
backend =
 +
 +
# The backend_kwargs param is loaded into a dictionary and passed to __init__ of secrets backend class.
 +
# See documentation for the secrets backend you are using. JSON is expected.
 +
# Example for AWS Systems Manager ParameterStore:
 +
# ``{"connections_prefix": "/airflow/connections", "profile_name": "default"}``
 +
backend_kwargs =
 +
 +
[cli]
 +
# In what way should the cli access the API. The LocalClient will use the
 +
# database directly, while the json_client will use the api running on the
 +
# webserver
 +
api_client = airflow.api.client.local_client
 +
 +
# If you set web_server_url_prefix, do NOT forget to append it here, ex:
 +
# ``endpoint_url = http://localhost:8080/myroot``
 +
# So api will look like: ``http://localhost:8080/myroot/api/experimental/...``
 +
endpoint_url = http://localhost:8080
 +
 +
[debug]
 +
# Used only with ``DebugExecutor``. If set to ``True`` DAG will fail with first
 +
# failed task. Helpful for debugging purposes.
 +
fail_fast = False
 +
 +
[api]
 +
# Enables the deprecated experimental API. Please note that these APIs do not have access control.
 +
# The authenticated user has full access.
 +
#
 +
# .. warning::
 +
#
 +
#  This `Experimental REST API <https://airflow.readthedocs.io/en/latest/rest-api-ref.html>`__ is
 +
#  deprecated since version 2.0. Please consider using
 +
#  `the Stable REST API <https://airflow.readthedocs.io/en/latest/stable-rest-api-ref.html>`__.
 +
#  For more information on migration, see
 +
#  `RELEASE_NOTES.rst <https://github.com/apache/airflow/blob/main/RELEASE_NOTES.rst>`_
 +
enable_experimental_api = False
 +
 +
# Comma separated list of auth backends to authenticate users of the API. See
 +
# https://airflow.apache.org/docs/apache-airflow/stable/security/api.html for possible values.
 +
# ("airflow.api.auth.backend.default" allows all requests for historic reasons)
 +
auth_backends = airflow.api.auth.backend.session
 +
 +
# Used to set the maximum page limit for API requests
 +
maximum_page_limit = 100
 +
 +
# Used to set the default page limit when limit is zero. A default limit
 +
# of 100 is set on OpenApi spec. However, this particular default limit
 +
# only work when limit is set equal to zero(0) from API requests.
 +
# If no limit is supplied, the OpenApi spec default is used.
 +
fallback_page_limit = 100
 +
 +
# The intended audience for JWT token credentials used for authorization. This value must match on the client and server sides. If empty, audience will not be tested.
 +
# Example: google_oauth2_audience = project-id-random-value.apps.googleusercontent.com
 +
google_oauth2_audience =
 +
 +
# Path to Google Cloud Service Account key file (JSON). If omitted, authorization based on
 +
# `the Application Default Credentials
 +
# <https://cloud.google.com/docs/authentication/production#finding_credentials_automatically>`__ will
 +
# be used.
 +
# Example: google_key_path = /files/service-account-json
 +
google_key_path =
 +
 +
# Used in response to a preflight request to indicate which HTTP
 +
# headers can be used when making the actual request. This header is
 +
# the server side response to the browser's
 +
# Access-Control-Request-Headers header.
 +
access_control_allow_headers =
 +
 +
# Specifies the method or methods allowed when accessing the resource.
 +
access_control_allow_methods =
 +
 +
# Indicates whether the response can be shared with requesting code from the given origins.
 +
# Separate URLs with space.
 +
access_control_allow_origins =
 +
 +
[lineage]
 +
# what lineage backend to use
 +
backend =
 +
 +
[atlas]
 +
sasl_enabled = False
 +
host =
 +
port = 21000
 +
username =
 +
password =
 +
 +
[operators]
 +
# The default owner assigned to each new operator, unless
 +
# provided explicitly or passed via ``default_args``
 +
default_owner = airflow
 +
  
  

Revision as of 13:58, 14 November 2022

[core]
# The folder where your airflow pipelines live, most likely a
# subfolder in a code repository. This path must be absolute.
dags_folder = /root/airflow/dags

# Hostname by providing a path to a callable, which will resolve the hostname.
# The format is "package.function".
#
# For example, default value "airflow.utils.net.getfqdn" means that result from patched
# version of socket.getfqdn() - see https://github.com/python/cpython/issues/49254.
#
# No argument should be required in the function specified.
# If using IP address as hostname is preferred, use value ``airflow.utils.net.get_host_ip_address``
hostname_callable = airflow.utils.net.getfqdn

# Default timezone in case supplied date times are naive
# can be utc (default), system, or any IANA timezone string (e.g. Europe/Amsterdam)
default_timezone = utc

# The executor class that airflow should use. Choices include
# ``SequentialExecutor``, ``LocalExecutor``, ``CeleryExecutor``, ``DaskExecutor``,
# ``KubernetesExecutor``, ``CeleryKubernetesExecutor`` or the
# full import path to the class when using a custom executor.
executor = SequentialExecutor

# This defines the maximum number of task instances that can run concurrently per scheduler in
# Airflow, regardless of the worker count. Generally this value, multiplied by the number of
# schedulers in your cluster, is the maximum number of task instances with the running
# state in the metadata database.
parallelism = 32

# The maximum number of task instances allowed to run concurrently in each DAG. To calculate
# the number of tasks that is running concurrently for a DAG, add up the number of running
# tasks for all DAG runs of the DAG. This is configurable at the DAG level with ``max_active_tasks``,
# which is defaulted as ``max_active_tasks_per_dag``.
#
# An example scenario when this would be useful is when you want to stop a new dag with an early
# start date from stealing all the executor slots in a cluster.
max_active_tasks_per_dag = 16

# Are DAGs paused by default at creation
dags_are_paused_at_creation = True

# The maximum number of active DAG runs per DAG. The scheduler will not create more DAG runs
# if it reaches the limit. This is configurable at the DAG level with ``max_active_runs``,
# which is defaulted as ``max_active_runs_per_dag``.
max_active_runs_per_dag = 16

# Whether to load the DAG examples that ship with Airflow. It's good to
# get started, but you probably want to set this to ``False`` in a production
# environment
load_examples = True

# Path to the folder containing Airflow plugins
plugins_folder = /root/airflow/plugins

# Should tasks be executed via forking of the parent process ("False",
# the speedier option) or by spawning a new python process ("True" slow,
# but means plugin changes picked up by tasks straight away)
execute_tasks_new_python_interpreter = False

# Secret key to save connection passwords in the db
fernet_key =

# Whether to disable pickling dags
donot_pickle = True

# How long before timing out a python file import
dagbag_import_timeout = 30.0

# Should a traceback be shown in the UI for dagbag import errors,
# instead of just the exception message
dagbag_import_error_tracebacks = True

# If tracebacks are shown, how many entries from the traceback should be shown
dagbag_import_error_traceback_depth = 2

# How long before timing out a DagFileProcessor, which processes a dag file
dag_file_processor_timeout = 50

# The class to use for running task instances in a subprocess.
# Choices include StandardTaskRunner, CgroupTaskRunner or the full import path to the class
# when using a custom task runner.
task_runner = StandardTaskRunner

# If set, tasks without a ``run_as_user`` argument will be run with this user
# Can be used to de-elevate a sudo user running Airflow when executing tasks
default_impersonation =

# What security module to use (for example kerberos)
security =

# Turn unit test mode on (overwrites many configuration options with test
# values at runtime)
unit_test_mode = False

# Whether to enable pickling for xcom (note that this is insecure and allows for
# RCE exploits).
enable_xcom_pickling = False

# When a task is killed forcefully, this is the amount of time in seconds that
# it has to cleanup after it is sent a SIGTERM, before it is SIGKILLED
killed_task_cleanup_time = 60

# Whether to override params with dag_run.conf. If you pass some key-value pairs
# through ``airflow dags backfill -c`` or
# ``airflow dags trigger -c``, the key-value pairs will override the existing ones in params.
dag_run_conf_overrides_params = True

# When discovering DAGs, ignore any files that don't contain the strings ``DAG`` and ``airflow``.
dag_discovery_safe_mode = True

# The pattern syntax used in the ".airflowignore" files in the DAG directories. Valid values are
# ``regexp`` or ``glob``.
dag_ignore_file_syntax = regexp

# The number of retries each task is going to have by default. Can be overridden at dag or task level.
default_task_retries = 0

# The number of seconds each task is going to wait by default between retries. Can be overridden at
# dag or task level.
default_task_retry_delay = 300

# The weighting method used for the effective total priority weight of the task
default_task_weight_rule = downstream

# The default task execution_timeout value for the operators. Expected an integer value to
# be passed into timedelta as seconds. If not specified, then the value is considered as None,
# meaning that the operators are never timed out by default.
default_task_execution_timeout =

# Updating serialized DAG can not be faster than a minimum interval to reduce database write rate.
min_serialized_dag_update_interval = 30

# If True, serialized DAGs are compressed before writing to DB.
# Note: this will disable the DAG dependencies view
compress_serialized_dags = False

# Fetching serialized DAG can not be faster than a minimum interval to reduce database
# read rate. This config controls when your DAGs are updated in the Webserver
min_serialized_dag_fetch_interval = 10

# Maximum number of Rendered Task Instance Fields (Template Fields) per task to store
# in the Database.
# All the template_fields for each of Task Instance are stored in the Database.
# Keeping this number small may cause an error when you try to view ``Rendered`` tab in
# TaskInstance view for older tasks.
max_num_rendered_ti_fields_per_task = 30

# On each dagrun check against defined SLAs
check_slas = True

# Path to custom XCom class that will be used to store and resolve operators results
# Example: xcom_backend = path.to.CustomXCom
xcom_backend = airflow.models.xcom.BaseXCom

# By default Airflow plugins are lazily-loaded (only loaded when required). Set it to ``False``,
# if you want to load plugins whenever 'airflow' is invoked via cli or loaded from module.
lazy_load_plugins = True

# By default Airflow providers are lazily-discovered (discovery and imports happen only when required).
# Set it to False, if you want to discover providers whenever 'airflow' is invoked via cli or
# loaded from module.
lazy_discover_providers = True

# Hide sensitive Variables or Connection extra json keys from UI and task logs when set to True
#
# (Connection passwords are always hidden in logs)
hide_sensitive_var_conn_fields = True

# A comma-separated list of extra sensitive keywords to look for in variables names or connection's
# extra JSON.
sensitive_var_conn_names =

# Task Slot counts for ``default_pool``. This setting would not have any effect in an existing
# deployment where the ``default_pool`` is already created. For existing deployments, users can
# change the number of slots using Webserver, API or the CLI
default_pool_task_slot_count = 128

# The maximum list/dict length an XCom can push to trigger task mapping. If the pushed list/dict has a
# length exceeding this value, the task pushing the XCom will be failed automatically to prevent the
# mapped tasks from clogging the scheduler.
max_map_length = 1024

# The default umask to use for process when run in daemon mode (scheduler, worker,  etc.)
#
# This controls the file-creation mode mask which determines the initial value of file permission bits
# for newly created files.
#
# This value is treated as an octal-integer.
daemon_umask = 0o077

# Class to use as dataset manager.
# Example: dataset_manager_class = airflow.datasets.manager.DatasetManager
# dataset_manager_class =

# Kwargs to supply to dataset manager.
# Example: dataset_manager_kwargs = {"some_param": "some_value"}
# dataset_manager_kwargs =

[database]
# The SqlAlchemy connection string to the metadata database.
# SqlAlchemy supports many different database engines.
# More information here:
# http://airflow.apache.org/docs/apache-airflow/stable/howto/set-up-database.html#database-uri
sql_alchemy_conn = sqlite:////root/airflow/airflow.db

# Extra engine specific keyword args passed to SQLAlchemy's create_engine, as a JSON-encoded value
# Example: sql_alchemy_engine_args = {"arg1": True}
# sql_alchemy_engine_args =

# The encoding for the databases
sql_engine_encoding = utf-8

# Collation for ``dag_id``, ``task_id``, ``key``, ``external_executor_id`` columns
# in case they have different encoding.
# By default this collation is the same as the database collation, however for ``mysql`` and ``mariadb``
# the default is ``utf8mb3_bin`` so that the index sizes of our index keys will not exceed
# the maximum size of allowed index when collation is set to ``utf8mb4`` variant
# (see https://github.com/apache/airflow/pull/17603#issuecomment-901121618).
# sql_engine_collation_for_ids =

# If SqlAlchemy should pool database connections.
sql_alchemy_pool_enabled = True

# The SqlAlchemy pool size is the maximum number of database connections
# in the pool. 0 indicates no limit.
sql_alchemy_pool_size = 5

# The maximum overflow size of the pool.
# When the number of checked-out connections reaches the size set in pool_size,
# additional connections will be returned up to this limit.
# When those additional connections are returned to the pool, they are disconnected and discarded.
# It follows then that the total number of simultaneous connections the pool will allow
# is pool_size + max_overflow,
# and the total number of "sleeping" connections the pool will allow is pool_size.
# max_overflow can be set to ``-1`` to indicate no overflow limit;
# no limit will be placed on the total number of concurrent connections. Defaults to ``10``.
sql_alchemy_max_overflow = 10

# The SqlAlchemy pool recycle is the number of seconds a connection
# can be idle in the pool before it is invalidated. This config does
# not apply to sqlite. If the number of DB connections is ever exceeded,
# a lower config value will allow the system to recover faster.
sql_alchemy_pool_recycle = 1800

# Check connection at the start of each connection pool checkout.
# Typically, this is a simple statement like "SELECT 1".
# More information here:
# https://docs.sqlalchemy.org/en/14/core/pooling.html#disconnect-handling-pessimistic
sql_alchemy_pool_pre_ping = True

# The schema to use for the metadata database.
# SqlAlchemy supports databases with the concept of multiple schemas.
sql_alchemy_schema =

# Import path for connect args in SqlAlchemy. Defaults to an empty dict.
# This is useful when you want to configure db engine args that SqlAlchemy won't parse
# in connection string.
# See https://docs.sqlalchemy.org/en/14/core/engines.html#sqlalchemy.create_engine.params.connect_args
# sql_alchemy_connect_args =

# Whether to load the default connections that ship with Airflow. It's good to
# get started, but you probably want to set this to ``False`` in a production
# environment
load_default_connections = True

# Number of times the code should be retried in case of DB Operational Errors.
# Not all transactions will be retried as it can cause undesired state.
# Currently it is only used in ``DagFileProcessor.process_file`` to retry ``dagbag.sync_to_db``.
max_db_retries = 3

[logging]
# The folder where airflow should store its log files.
# This path must be absolute.
# There are a few existing configurations that assume this is set to the default.
# If you choose to override this you may need to update the dag_processor_manager_log_location and
# dag_processor_manager_log_location settings as well.
base_log_folder = /root/airflow/logs

# Airflow can store logs remotely in AWS S3, Google Cloud Storage or Elastic Search.
# Set this to True if you want to enable remote logging.
remote_logging = False

# Users must supply an Airflow connection id that provides access to the storage
# location. Depending on your remote logging service, this may only be used for
# reading logs, not writing them.
remote_log_conn_id =

# Path to Google Credential JSON file. If omitted, authorization based on `the Application Default
# Credentials
# <https://cloud.google.com/docs/authentication/production#finding_credentials_automatically>`__ will
# be used.
google_key_path =

# Storage bucket URL for remote logging
# S3 buckets should start with "s3://"
# Cloudwatch log groups should start with "cloudwatch://"
# GCS buckets should start with "gs://"
# WASB buckets should start with "wasb" just to help Airflow select correct handler
# Stackdriver logs should start with "stackdriver://"
remote_base_log_folder =

# Use server-side encryption for logs stored in S3
encrypt_s3_logs = False

# Logging level.
#
# Supported values: ``CRITICAL``, ``ERROR``, ``WARNING``, ``INFO``, ``DEBUG``.
logging_level = INFO

# Logging level for celery. If not set, it uses the value of logging_level
#
# Supported values: ``CRITICAL``, ``ERROR``, ``WARNING``, ``INFO``, ``DEBUG``.
celery_logging_level =

# Logging level for Flask-appbuilder UI.
#
# Supported values: ``CRITICAL``, ``ERROR``, ``WARNING``, ``INFO``, ``DEBUG``.
fab_logging_level = WARNING

# Logging class
# Specify the class that will specify the logging configuration
# This class has to be on the python classpath
# Example: logging_config_class = my.path.default_local_settings.LOGGING_CONFIG
logging_config_class =

# Flag to enable/disable Colored logs in Console
# Colour the logs when the controlling terminal is a TTY.
colored_console_log = True

# Log format for when Colored logs is enabled
colored_log_format = [%%(blue)s%%(asctime)s%%(reset)s] {%%(blue)s%%(filename)s:%%(reset)s%%(lineno)d} %%(log_color)s%%(levelname)s%%(reset)s - %%(log_color)s%%(message)s%%(reset)s
colored_formatter_class = airflow.utils.log.colored_log.CustomTTYColoredFormatter

# Format of Log line
log_format = [%%(asctime)s] {%%(filename)s:%%(lineno)d} %%(levelname)s - %%(message)s
simple_log_format = %%(asctime)s %%(levelname)s - %%(message)s

# Where to send dag parser logs. If "file", logs are sent to log files defined by child_process_log_directory.
dag_processor_log_target = file

# Format of Dag Processor Log line
dag_processor_log_format = [%%(asctime)s] [SOURCE:DAG_PROCESSOR] {%%(filename)s:%%(lineno)d} %%(levelname)s - %%(message)s
log_formatter_class = airflow.utils.log.timezone_aware.TimezoneAware

# Specify prefix pattern like mentioned below with stream handler TaskHandlerWithCustomFormatter
# Example: task_log_prefix_template = {ti.dag_id}-{ti.task_id}-{execution_date}-{try_number}
task_log_prefix_template =

# Formatting for how airflow generates file names/paths for each task run.
log_filename_template = dag_id={{ ti.dag_id }}/run_id={{ ti.run_id }}/task_id={{ ti.task_id }}/{%% if ti.map_index >= 0 %%}map_index={{ ti.map_index }}/{%% endif %%}attempt={{ try_number }}.log

# Formatting for how airflow generates file names for log
log_processor_filename_template = {{ filename }}.log

# Full path of dag_processor_manager logfile.
dag_processor_manager_log_location = /root/airflow/logs/dag_processor_manager/dag_processor_manager.log

# Name of handler to read task instance logs.
# Defaults to use ``task`` handler.
task_log_reader = task

# A comma\-separated list of third-party logger names that will be configured to print messages to
# consoles\.
# Example: extra_logger_names = connexion,sqlalchemy
extra_logger_names =

# When you start an airflow worker, airflow starts a tiny web server
# subprocess to serve the workers local log files to the airflow main
# web server, who then builds pages and sends them to users. This defines
# the port on which the logs are served. It needs to be unused, and open
# visible from the main web server to connect into the workers.
worker_log_server_port = 8793

[metrics]

# StatsD (https://github.com/etsy/statsd) integration settings.
# Enables sending metrics to StatsD.
statsd_on = False
statsd_host = localhost
statsd_port = 8125
statsd_prefix = airflow

# If you want to avoid sending all the available metrics to StatsD,
# you can configure an allow list of prefixes (comma separated) to send only the metrics that
# start with the elements of the list (e.g: "scheduler,executor,dagrun")
statsd_allow_list =

# A function that validate the StatsD stat name, apply changes to the stat name if necessary and return
# the transformed stat name.
#
# The function should have the following signature:
# def func_name(stat_name: str) -> str:
stat_name_handler =

# To enable datadog integration to send airflow metrics.
statsd_datadog_enabled = False

# List of datadog tags attached to all metrics(e.g: key1:value1,key2:value2)
statsd_datadog_tags =

# If you want to utilise your own custom StatsD client set the relevant
# module path below.
# Note: The module path must exist on your PYTHONPATH for Airflow to pick it up
# statsd_custom_client_path =

[secrets]
# Full class name of secrets backend to enable (will precede env vars and metastore in search path)
# Example: backend = airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend
backend =

# The backend_kwargs param is loaded into a dictionary and passed to __init__ of secrets backend class.
# See documentation for the secrets backend you are using. JSON is expected.
# Example for AWS Systems Manager ParameterStore:
# ``{"connections_prefix": "/airflow/connections", "profile_name": "default"}``
backend_kwargs =

[cli]
# In what way should the cli access the API. The LocalClient will use the
# database directly, while the json_client will use the api running on the
# webserver
api_client = airflow.api.client.local_client

# If you set web_server_url_prefix, do NOT forget to append it here, ex:
# ``endpoint_url = http://localhost:8080/myroot``
# So api will look like: ``http://localhost:8080/myroot/api/experimental/...``
endpoint_url = http://localhost:8080

[debug]
# Used only with ``DebugExecutor``. If set to ``True`` DAG will fail with first
# failed task. Helpful for debugging purposes.
fail_fast = False

[api]
# Enables the deprecated experimental API. Please note that these APIs do not have access control.
# The authenticated user has full access.
#
# .. warning::
#
#   This `Experimental REST API <https://airflow.readthedocs.io/en/latest/rest-api-ref.html>`__ is
#   deprecated since version 2.0. Please consider using
#   `the Stable REST API <https://airflow.readthedocs.io/en/latest/stable-rest-api-ref.html>`__.
#   For more information on migration, see
#   `RELEASE_NOTES.rst <https://github.com/apache/airflow/blob/main/RELEASE_NOTES.rst>`_
enable_experimental_api = False

# Comma separated list of auth backends to authenticate users of the API. See
# https://airflow.apache.org/docs/apache-airflow/stable/security/api.html for possible values.
# ("airflow.api.auth.backend.default" allows all requests for historic reasons)
auth_backends = airflow.api.auth.backend.session

# Used to set the maximum page limit for API requests
maximum_page_limit = 100

# Used to set the default page limit when limit is zero. A default limit
# of 100 is set on OpenApi spec. However, this particular default limit
# only work when limit is set equal to zero(0) from API requests.
# If no limit is supplied, the OpenApi spec default is used.
fallback_page_limit = 100

# The intended audience for JWT token credentials used for authorization. This value must match on the client and server sides. If empty, audience will not be tested.
# Example: google_oauth2_audience = project-id-random-value.apps.googleusercontent.com
google_oauth2_audience =

# Path to Google Cloud Service Account key file (JSON). If omitted, authorization based on
# `the Application Default Credentials
# <https://cloud.google.com/docs/authentication/production#finding_credentials_automatically>`__ will
# be used.
# Example: google_key_path = /files/service-account-json
google_key_path =

# Used in response to a preflight request to indicate which HTTP
# headers can be used when making the actual request. This header is
# the server side response to the browser's
# Access-Control-Request-Headers header.
access_control_allow_headers =

# Specifies the method or methods allowed when accessing the resource.
access_control_allow_methods =

# Indicates whether the response can be shared with requesting code from the given origins.
# Separate URLs with space.
access_control_allow_origins =

[lineage]
# what lineage backend to use
backend =

[atlas]
sasl_enabled = False
host =
port = 21000
username =
password =

[operators]
# The default owner assigned to each new operator, unless
# provided explicitly or passed via ``default_args``
default_owner = airflow







Advertising: