diff --git a/UPDATING.md b/UPDATING.md index 7ecc1a298db8e..941d4d72df63b 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -57,6 +57,7 @@ assists people when migrating to a new version. translations inside the python package. This includes the .mo files needed by pybabel on the backend, as well as the .json files used by the frontend. If you were doing anything before as part of your bundling to expose translation packages, it's probably not needed anymore. +- [29471](https://github.com/apache/superset/pull/29471) We now support .env file. This change will not affect the impact of .env file in the docker folder. ### Potential Downtime diff --git a/superset/config.py b/superset/config.py index e4dc202537acc..2d1ced26781a1 100644 --- a/superset/config.py +++ b/superset/config.py @@ -41,6 +41,7 @@ import click import pkg_resources from celery.schedules import crontab +from dotenv import load_dotenv from flask import Blueprint from flask_appbuilder.security.manager import AUTH_DB from flask_caching.backends.base import BaseCache @@ -64,6 +65,7 @@ from superset.utils.log import DBEventLogger from superset.utils.logging_configurator import DefaultLoggingConfigurator +load_dotenv() logger = logging.getLogger(__name__) if TYPE_CHECKING: @@ -200,7 +202,8 @@ def _try_json_readsha(filepath: str, length: int) -> str | None: # isolation level is READ COMMITTED. All backends should use READ COMMITTED (or similar) # to help ensure consistent behavior. SQLALCHEMY_ENGINE_OPTIONS = { - "isolation_level": "SERIALIZABLE", # SQLite does not support READ COMMITTED. + # SQLite does not support READ COMMITTED. + "isolation_level": "SERIALIZABLE", } # In order to hook up a custom password store for all SQLALCHEMY connections @@ -276,7 +279,8 @@ def _try_json_readsha(filepath: str, length: int) -> str | None: # Use all X-Forwarded headers when ENABLE_PROXY_FIX is True. # When proxying to a different port, set "x_port" to 0 to avoid downstream issues. ENABLE_PROXY_FIX = False -PROXY_FIX_CONFIG = {"x_for": 1, "x_proto": 1, "x_host": 1, "x_port": 1, "x_prefix": 1} +PROXY_FIX_CONFIG = {"x_for": 1, "x_proto": 1, + "x_host": 1, "x_port": 1, "x_prefix": 1} # Configuration for scheduling queries from SQL Lab. SCHEDULED_QUERIES: dict[str, Any] = {} @@ -567,7 +571,7 @@ class D3TimeFormat(TypedDict, total=False): # Feature flags may also be set via 'SUPERSET_FEATURE_' prefixed environment vars. DEFAULT_FEATURE_FLAGS.update( { - k[len("SUPERSET_FEATURE_") :]: parse_boolean_string(v) + k[len("SUPERSET_FEATURE_"):]: parse_boolean_string(v) for k, v in os.environ.items() if re.search(r"^SUPERSET_FEATURE_\w+", k) } @@ -590,7 +594,8 @@ class D3TimeFormat(TypedDict, total=False): # if hasattr(g, "user") and g.user.is_active: # feature_flags_dict['some_feature'] = g.user and g.user.get_id() == 5 # return feature_flags_dict -GET_FEATURE_FLAGS_FUNC: Callable[[dict[str, bool]], dict[str, bool]] | None = None +GET_FEATURE_FLAGS_FUNC: Callable[[ + dict[str, bool]], dict[str, bool]] | None = None # A function that receives a feature flag name and an optional default value. # Has a similar utility to GET_FEATURE_FLAGS_FUNC but it's useful to not force the # evaluation of all feature flags when just evaluating a single one. @@ -689,7 +694,8 @@ class D3TimeFormat(TypedDict, total=False): THUMBNAIL_DASHBOARD_DIGEST_FUNC: ( None | (Callable[[Dashboard, ExecutorType, str], str]) ) = None -THUMBNAIL_CHART_DIGEST_FUNC: Callable[[Slice, ExecutorType, str], str] | None = None +THUMBNAIL_CHART_DIGEST_FUNC: Callable[[ + Slice, ExecutorType, str], str] | None = None THUMBNAIL_CACHE_CONFIG: CacheConfig = { "CACHE_TYPE": "NullCache", @@ -1216,7 +1222,7 @@ def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC( # pylint: disable=invalid-name # lambda url, query: url if is_fresh(query) else None # ) # pylint: disable-next=unnecessary-lambda-assignment -TRACKING_URL_TRANSFORMER = lambda url: url # noqa: E731 +def TRACKING_URL_TRANSFORMER(url): return url # noqa: E731 # customize the polling time of each engine @@ -1368,7 +1374,8 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument ALERT_REPORTS_WORKING_TIME_OUT_LAG = int(timedelta(seconds=10).total_seconds()) # if ALERT_REPORTS_WORKING_TIME_OUT_KILL is True, set a celery hard timeout # Equal to working timeout + ALERT_REPORTS_WORKING_SOFT_TIME_OUT_LAG -ALERT_REPORTS_WORKING_SOFT_TIME_OUT_LAG = int(timedelta(seconds=1).total_seconds()) +ALERT_REPORTS_WORKING_SOFT_TIME_OUT_LAG = int( + timedelta(seconds=1).total_seconds()) # Default values that user using when creating alert ALERT_REPORTS_DEFAULT_WORKING_TIMEOUT = 3600 ALERT_REPORTS_DEFAULT_RETENTION = 90 @@ -1514,7 +1521,8 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument CONTENT_SECURITY_POLICY_WARNING = True # Do you want Talisman enabled? -TALISMAN_ENABLED = utils.cast_to_boolean(os.environ.get("TALISMAN_ENABLED", True)) +TALISMAN_ENABLED = utils.cast_to_boolean( + os.environ.get("TALISMAN_ENABLED", True)) # If you want Talisman, how do you want it configured?? TALISMAN_CONFIG = { @@ -1635,7 +1643,9 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument # conventions and such. You can find examples in the tests. # pylint: disable-next=unnecessary-lambda-assignment -SQLA_TABLE_MUTATOR = lambda table: table # noqa: E731 + + +def SQLA_TABLE_MUTATOR(table): return table # noqa: E731 # Global async query config options. @@ -1811,7 +1821,8 @@ class ExtraDynamicQueryFilters(TypedDict, total=False): if key.isupper(): setattr(module, key, getattr(override_conf, key)) - click.secho(f"Loaded your LOCAL configuration at [{cfg_path}]", fg="cyan") + click.secho( + f"Loaded your LOCAL configuration at [{cfg_path}]", fg="cyan") except Exception: logger.exception( "Failed to import config for %s=%s", CONFIG_PATH_ENV_VAR, cfg_path