From 5755c0628b8d0b9f8b28b9d1ed53fc33c88e7674 Mon Sep 17 00:00:00 2001 From: Hiren Soni Date: Wed, 3 Jul 2024 15:52:21 +0200 Subject: [PATCH 1/3] Fixed reading env file when running project locally without docker --- superset/config.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/superset/config.py b/superset/config.py index e4dc202537ac..582c16d74dc9 100644 --- a/superset/config.py +++ b/superset/config.py @@ -63,7 +63,9 @@ from superset.utils.encrypt import SQLAlchemyUtilsAdapter from superset.utils.log import DBEventLogger from superset.utils.logging_configurator import DefaultLoggingConfigurator +from dotenv import load_dotenv +load_dotenv() logger = logging.getLogger(__name__) if TYPE_CHECKING: From de133207960d44b7732d8d26a0fb95980ea5e79b Mon Sep 17 00:00:00 2001 From: Hiren Soni Date: Thu, 4 Jul 2024 11:44:44 +0200 Subject: [PATCH 2/3] Changed the position of the import to keep it in right order alphabatically --- superset/config.py | 31 ++++++++++++++++++++----------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/superset/config.py b/superset/config.py index 582c16d74dc9..2d1ced26781a 100644 --- a/superset/config.py +++ b/superset/config.py @@ -41,6 +41,7 @@ import click import pkg_resources from celery.schedules import crontab +from dotenv import load_dotenv from flask import Blueprint from flask_appbuilder.security.manager import AUTH_DB from flask_caching.backends.base import BaseCache @@ -63,7 +64,6 @@ from superset.utils.encrypt import SQLAlchemyUtilsAdapter from superset.utils.log import DBEventLogger from superset.utils.logging_configurator import DefaultLoggingConfigurator -from dotenv import load_dotenv load_dotenv() logger = logging.getLogger(__name__) @@ -202,7 +202,8 @@ def _try_json_readsha(filepath: str, length: int) -> str | None: # isolation level is READ COMMITTED. All backends should use READ COMMITTED (or similar) # to help ensure consistent behavior. SQLALCHEMY_ENGINE_OPTIONS = { - "isolation_level": "SERIALIZABLE", # SQLite does not support READ COMMITTED. + # SQLite does not support READ COMMITTED. + "isolation_level": "SERIALIZABLE", } # In order to hook up a custom password store for all SQLALCHEMY connections @@ -278,7 +279,8 @@ def _try_json_readsha(filepath: str, length: int) -> str | None: # Use all X-Forwarded headers when ENABLE_PROXY_FIX is True. # When proxying to a different port, set "x_port" to 0 to avoid downstream issues. ENABLE_PROXY_FIX = False -PROXY_FIX_CONFIG = {"x_for": 1, "x_proto": 1, "x_host": 1, "x_port": 1, "x_prefix": 1} +PROXY_FIX_CONFIG = {"x_for": 1, "x_proto": 1, + "x_host": 1, "x_port": 1, "x_prefix": 1} # Configuration for scheduling queries from SQL Lab. SCHEDULED_QUERIES: dict[str, Any] = {} @@ -569,7 +571,7 @@ class D3TimeFormat(TypedDict, total=False): # Feature flags may also be set via 'SUPERSET_FEATURE_' prefixed environment vars. DEFAULT_FEATURE_FLAGS.update( { - k[len("SUPERSET_FEATURE_") :]: parse_boolean_string(v) + k[len("SUPERSET_FEATURE_"):]: parse_boolean_string(v) for k, v in os.environ.items() if re.search(r"^SUPERSET_FEATURE_\w+", k) } @@ -592,7 +594,8 @@ class D3TimeFormat(TypedDict, total=False): # if hasattr(g, "user") and g.user.is_active: # feature_flags_dict['some_feature'] = g.user and g.user.get_id() == 5 # return feature_flags_dict -GET_FEATURE_FLAGS_FUNC: Callable[[dict[str, bool]], dict[str, bool]] | None = None +GET_FEATURE_FLAGS_FUNC: Callable[[ + dict[str, bool]], dict[str, bool]] | None = None # A function that receives a feature flag name and an optional default value. # Has a similar utility to GET_FEATURE_FLAGS_FUNC but it's useful to not force the # evaluation of all feature flags when just evaluating a single one. @@ -691,7 +694,8 @@ class D3TimeFormat(TypedDict, total=False): THUMBNAIL_DASHBOARD_DIGEST_FUNC: ( None | (Callable[[Dashboard, ExecutorType, str], str]) ) = None -THUMBNAIL_CHART_DIGEST_FUNC: Callable[[Slice, ExecutorType, str], str] | None = None +THUMBNAIL_CHART_DIGEST_FUNC: Callable[[ + Slice, ExecutorType, str], str] | None = None THUMBNAIL_CACHE_CONFIG: CacheConfig = { "CACHE_TYPE": "NullCache", @@ -1218,7 +1222,7 @@ def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC( # pylint: disable=invalid-name # lambda url, query: url if is_fresh(query) else None # ) # pylint: disable-next=unnecessary-lambda-assignment -TRACKING_URL_TRANSFORMER = lambda url: url # noqa: E731 +def TRACKING_URL_TRANSFORMER(url): return url # noqa: E731 # customize the polling time of each engine @@ -1370,7 +1374,8 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument ALERT_REPORTS_WORKING_TIME_OUT_LAG = int(timedelta(seconds=10).total_seconds()) # if ALERT_REPORTS_WORKING_TIME_OUT_KILL is True, set a celery hard timeout # Equal to working timeout + ALERT_REPORTS_WORKING_SOFT_TIME_OUT_LAG -ALERT_REPORTS_WORKING_SOFT_TIME_OUT_LAG = int(timedelta(seconds=1).total_seconds()) +ALERT_REPORTS_WORKING_SOFT_TIME_OUT_LAG = int( + timedelta(seconds=1).total_seconds()) # Default values that user using when creating alert ALERT_REPORTS_DEFAULT_WORKING_TIMEOUT = 3600 ALERT_REPORTS_DEFAULT_RETENTION = 90 @@ -1516,7 +1521,8 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument CONTENT_SECURITY_POLICY_WARNING = True # Do you want Talisman enabled? -TALISMAN_ENABLED = utils.cast_to_boolean(os.environ.get("TALISMAN_ENABLED", True)) +TALISMAN_ENABLED = utils.cast_to_boolean( + os.environ.get("TALISMAN_ENABLED", True)) # If you want Talisman, how do you want it configured?? TALISMAN_CONFIG = { @@ -1637,7 +1643,9 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument # conventions and such. You can find examples in the tests. # pylint: disable-next=unnecessary-lambda-assignment -SQLA_TABLE_MUTATOR = lambda table: table # noqa: E731 + + +def SQLA_TABLE_MUTATOR(table): return table # noqa: E731 # Global async query config options. @@ -1813,7 +1821,8 @@ class ExtraDynamicQueryFilters(TypedDict, total=False): if key.isupper(): setattr(module, key, getattr(override_conf, key)) - click.secho(f"Loaded your LOCAL configuration at [{cfg_path}]", fg="cyan") + click.secho( + f"Loaded your LOCAL configuration at [{cfg_path}]", fg="cyan") except Exception: logger.exception( "Failed to import config for %s=%s", CONFIG_PATH_ENV_VAR, cfg_path From 1d9fda88c0974aa3630689381aac4811334d98a0 Mon Sep 17 00:00:00 2001 From: Hiren Soni Date: Thu, 4 Jul 2024 11:53:25 +0200 Subject: [PATCH 3/3] Updated UPDATING.md --- UPDATING.md | 1 + 1 file changed, 1 insertion(+) diff --git a/UPDATING.md b/UPDATING.md index 7ecc1a298db8..941d4d72df63 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -57,6 +57,7 @@ assists people when migrating to a new version. translations inside the python package. This includes the .mo files needed by pybabel on the backend, as well as the .json files used by the frontend. If you were doing anything before as part of your bundling to expose translation packages, it's probably not needed anymore. +- [29471](https://github.com/apache/superset/pull/29471) We now support .env file. This change will not affect the impact of .env file in the docker folder. ### Potential Downtime