Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Reading env file when running project locally without docker #29471

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions UPDATING.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ assists people when migrating to a new version.
translations inside the python package. This includes the .mo files needed by pybabel on the
backend, as well as the .json files used by the frontend. If you were doing anything before
as part of your bundling to expose translation packages, it's probably not needed anymore.
- [29471](https://github.com/apache/superset/pull/29471) We now support .env file. This change will not affect the impact of .env file in the docker folder.

### Potential Downtime

Expand Down
31 changes: 21 additions & 10 deletions superset/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import click
import pkg_resources
from celery.schedules import crontab
from dotenv import load_dotenv
from flask import Blueprint
from flask_appbuilder.security.manager import AUTH_DB
from flask_caching.backends.base import BaseCache
Expand All @@ -64,6 +65,7 @@
from superset.utils.log import DBEventLogger
from superset.utils.logging_configurator import DefaultLoggingConfigurator

load_dotenv()
logger = logging.getLogger(__name__)

if TYPE_CHECKING:
Expand Down Expand Up @@ -200,7 +202,8 @@
# isolation level is READ COMMITTED. All backends should use READ COMMITTED (or similar)
# to help ensure consistent behavior.
SQLALCHEMY_ENGINE_OPTIONS = {
"isolation_level": "SERIALIZABLE", # SQLite does not support READ COMMITTED.
# SQLite does not support READ COMMITTED.
"isolation_level": "SERIALIZABLE",
}

# In order to hook up a custom password store for all SQLALCHEMY connections
Expand Down Expand Up @@ -276,7 +279,8 @@
# Use all X-Forwarded headers when ENABLE_PROXY_FIX is True.
# When proxying to a different port, set "x_port" to 0 to avoid downstream issues.
ENABLE_PROXY_FIX = False
PROXY_FIX_CONFIG = {"x_for": 1, "x_proto": 1, "x_host": 1, "x_port": 1, "x_prefix": 1}
PROXY_FIX_CONFIG = {"x_for": 1, "x_proto": 1,
"x_host": 1, "x_port": 1, "x_prefix": 1}

# Configuration for scheduling queries from SQL Lab.
SCHEDULED_QUERIES: dict[str, Any] = {}
Expand Down Expand Up @@ -567,7 +571,7 @@
# Feature flags may also be set via 'SUPERSET_FEATURE_' prefixed environment vars.
DEFAULT_FEATURE_FLAGS.update(
{
k[len("SUPERSET_FEATURE_") :]: parse_boolean_string(v)
k[len("SUPERSET_FEATURE_"):]: parse_boolean_string(v)
for k, v in os.environ.items()
if re.search(r"^SUPERSET_FEATURE_\w+", k)
}
Expand All @@ -590,7 +594,8 @@
# if hasattr(g, "user") and g.user.is_active:
# feature_flags_dict['some_feature'] = g.user and g.user.get_id() == 5
# return feature_flags_dict
GET_FEATURE_FLAGS_FUNC: Callable[[dict[str, bool]], dict[str, bool]] | None = None
GET_FEATURE_FLAGS_FUNC: Callable[[
dict[str, bool]], dict[str, bool]] | None = None
# A function that receives a feature flag name and an optional default value.
# Has a similar utility to GET_FEATURE_FLAGS_FUNC but it's useful to not force the
# evaluation of all feature flags when just evaluating a single one.
Expand Down Expand Up @@ -689,7 +694,8 @@
THUMBNAIL_DASHBOARD_DIGEST_FUNC: (
None | (Callable[[Dashboard, ExecutorType, str], str])
) = None
THUMBNAIL_CHART_DIGEST_FUNC: Callable[[Slice, ExecutorType, str], str] | None = None
THUMBNAIL_CHART_DIGEST_FUNC: Callable[[
Slice, ExecutorType, str], str] | None = None

THUMBNAIL_CACHE_CONFIG: CacheConfig = {
"CACHE_TYPE": "NullCache",
Expand Down Expand Up @@ -1216,7 +1222,7 @@
# lambda url, query: url if is_fresh(query) else None
# )
# pylint: disable-next=unnecessary-lambda-assignment
TRACKING_URL_TRANSFORMER = lambda url: url # noqa: E731
def TRACKING_URL_TRANSFORMER(url): return url # noqa: E731


# customize the polling time of each engine
Expand Down Expand Up @@ -1368,7 +1374,8 @@
ALERT_REPORTS_WORKING_TIME_OUT_LAG = int(timedelta(seconds=10).total_seconds())
# if ALERT_REPORTS_WORKING_TIME_OUT_KILL is True, set a celery hard timeout
# Equal to working timeout + ALERT_REPORTS_WORKING_SOFT_TIME_OUT_LAG
ALERT_REPORTS_WORKING_SOFT_TIME_OUT_LAG = int(timedelta(seconds=1).total_seconds())
ALERT_REPORTS_WORKING_SOFT_TIME_OUT_LAG = int(
timedelta(seconds=1).total_seconds())
# Default values that user using when creating alert
ALERT_REPORTS_DEFAULT_WORKING_TIMEOUT = 3600
ALERT_REPORTS_DEFAULT_RETENTION = 90
Expand Down Expand Up @@ -1514,7 +1521,8 @@
CONTENT_SECURITY_POLICY_WARNING = True

# Do you want Talisman enabled?
TALISMAN_ENABLED = utils.cast_to_boolean(os.environ.get("TALISMAN_ENABLED", True))
TALISMAN_ENABLED = utils.cast_to_boolean(
os.environ.get("TALISMAN_ENABLED", True))

# If you want Talisman, how do you want it configured??
TALISMAN_CONFIG = {
Expand Down Expand Up @@ -1635,7 +1643,9 @@
# conventions and such. You can find examples in the tests.

# pylint: disable-next=unnecessary-lambda-assignment
SQLA_TABLE_MUTATOR = lambda table: table # noqa: E731


def SQLA_TABLE_MUTATOR(table): return table # noqa: E731


# Global async query config options.
Expand Down Expand Up @@ -1811,7 +1821,8 @@
if key.isupper():
setattr(module, key, getattr(override_conf, key))

click.secho(f"Loaded your LOCAL configuration at [{cfg_path}]", fg="cyan")
click.secho(

Check warning on line 1824 in superset/config.py

View check run for this annotation

Codecov / codecov/patch

superset/config.py#L1824

Added line #L1824 was not covered by tests
f"Loaded your LOCAL configuration at [{cfg_path}]", fg="cyan")
except Exception:
logger.exception(
"Failed to import config for %s=%s", CONFIG_PATH_ENV_VAR, cfg_path
Expand Down
Loading