Skip to content

Commit

Permalink
chore(backend): Spelling (#25457)
Browse files Browse the repository at this point in the history
Signed-off-by: Josh Soref <[email protected]>
  • Loading branch information
jsoref authored Sep 29, 2023
1 parent 3e0c70d commit 0735680
Show file tree
Hide file tree
Showing 31 changed files with 56 additions and 56 deletions.
4 changes: 2 additions & 2 deletions superset/advanced_data_type/plugins/internet_address.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding coperatoryright ownership. The ASF licenses this file
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a coperatory of the License at
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
Expand Down
4 changes: 2 additions & 2 deletions superset/advanced_data_type/plugins/internet_port.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding coperatoryright ownership. The ASF licenses this file
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a coperatory of the License at
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
Expand Down
2 changes: 1 addition & 1 deletion superset/advanced_data_type/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ class AdvancedDataTypeResponse(TypedDict, total=False):
@dataclass
class AdvancedDataType:
"""
Used for coverting base type value into an advanced type value
Used for converting base type value into an advanced type value
"""

verbose_name: str
Expand Down
8 changes: 4 additions & 4 deletions superset/charts/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@
)
query_context_generation_description = (
"The query context generation represents whether the query_context"
"is user generated or not so that it does not update user modfied"
"is user generated or not so that it does not update user modified"
"state."
)
cache_timeout_description = (
Expand All @@ -101,12 +101,12 @@
)
datasource_id_description = (
"The id of the dataset/datasource this new chart will use. "
"A complete datasource identification needs `datasouce_id` "
"A complete datasource identification needs `datasource_id` "
"and `datasource_type`."
)
datasource_uid_description = (
"The uid of the dataset/datasource this new chart will use. "
"A complete datasource identification needs `datasouce_uid` "
"A complete datasource identification needs `datasource_uid` "
)
datasource_type_description = (
"The type of dataset/datasource identified on `datasource_id`."
Expand Down Expand Up @@ -1593,7 +1593,7 @@ class ChartCacheWarmUpResponseSchema(Schema):
ChartDataResponseSchema,
ChartDataAsyncResponseSchema,
# TODO: These should optimally be included in the QueryContext schema as an `anyOf`
# in ChartDataPostPricessingOperation.options, but since `anyOf` is not
# in ChartDataPostProcessingOperation.options, but since `anyOf` is not
# by Marshmallow<3, this is not currently possible.
ChartDataAdhocMetricSchema,
ChartDataAggregateOptionsSchema,
Expand Down
2 changes: 1 addition & 1 deletion superset/cli/test_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def test_datetime(console: Console, engine: Engine) -> None:
stmt = select(table)
row = engine.execute(stmt).fetchone()
assert row[0] == now
console.print(":thumbs_up: [green]Succcess!")
console.print(":thumbs_up: [green]Success!")
except Exception as ex: # pylint: disable=broad-except
console.print(f"[red]Test failed: {ex}")
console.print("[bold]Exiting...")
Expand Down
6 changes: 3 additions & 3 deletions superset/columns/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
ImportExportMixin,
)

UNKOWN_TYPE = "UNKNOWN"
UNKNOWN_TYPE = "UNKNOWN"


class Column(
Expand Down Expand Up @@ -92,9 +92,9 @@ class Column(
# [1] https://www.postgresql.org/docs/9.1/datatype-character.html
name = sa.Column(sa.Text)
# Raw type as returned and used by db engine.
type = sa.Column(sa.Text, default=UNKOWN_TYPE)
type = sa.Column(sa.Text, default=UNKNOWN_TYPE)

# Assigns column advnaced type to determine custom behavior
# Assigns column advanced type to determine custom behavior
# does nothing unless feature flag ENABLE_ADVANCED_DATA_TYPES in true
advanced_data_type = sa.Column(sa.Text)

Expand Down
6 changes: 3 additions & 3 deletions superset/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -472,7 +472,7 @@ class D3Format(TypedDict, total=False):
# Enable caching per impersonation key (e.g username) in a datasource where user
# impersonation is enabled
"CACHE_IMPERSONATION": False,
# Enable caching per user key for Superset cache (not datatabase cache impersonation)
# Enable caching per user key for Superset cache (not database cache impersonation)
"CACHE_QUERY_BY_USER": False,
# Enable sharing charts with embedding
"EMBEDDABLE_CHARTS": True,
Expand Down Expand Up @@ -743,7 +743,7 @@ class D3Format(TypedDict, total=False):
HTML_SANITIZATION = True

# Use this configuration to extend the HTML sanitization schema.
# By default we use the Gihtub schema defined in
# By default we use the GitHub schema defined in
# https://github.com/syntax-tree/hast-util-sanitize/blob/main/lib/schema.js
# For example, the following configuration would allow the rendering of the
# style attribute for div elements and the ftp protocol in hrefs:
Expand Down Expand Up @@ -1102,7 +1102,7 @@ def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC( # pylint: disable=invalid-name
# dictionary. Exposing functionality through JINJA_CONTEXT_ADDONS has security
# implications as it opens a window for a user to execute untrusted code.
# It's important to make sure that the objects exposed (as well as objects attached
# to those objets) are harmless. We recommend only exposing simple/pure functions that
# to those objects) are harmless. We recommend only exposing simple/pure functions that
# return native types.
JINJA_CONTEXT_ADDONS: dict[str, Callable[..., Any]] = {}

Expand Down
2 changes: 1 addition & 1 deletion superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ def __init__(self, **kwargs: Any) -> None:
"""
Construct a TableColumn object.
Historically a TableColumn object (from an ORM perspective) was tighly bound to
Historically a TableColumn object (from an ORM perspective) was tightly bound to
a SqlaTable object, however with the introduction of the Query datasource this
is no longer true, i.e., the SqlaTable relationship is optional.
Expand Down
2 changes: 1 addition & 1 deletion superset/daos/report.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def update(
:param item: The object to update
:param attributes: The attributes associated with the object to update
:param commit: Whether to commit the transaction
:raises: DAOUpdateFailedError: If the updation failed
:raises: DAOUpdateFailedError: If the update failed
"""

# TODO(john-bodley): Determine why we need special handling for recipients.
Expand Down
2 changes: 1 addition & 1 deletion superset/dashboards/permalink/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,6 @@ class DashboardPermalinkSchema(Schema):
dashboardId = fields.String(
required=True,
allow_none=False,
metadata={"description": "The id or slug of the dasbhoard"},
metadata={"description": "The id or slug of the dashboard"},
)
state = fields.Nested(DashboardPermalinkStateSchema())
2 changes: 1 addition & 1 deletion superset/dashboards/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ class DashboardJSONMetadataSchema(Schema):
# chart_configuration for now keeps data about cross-filter scoping for charts
chart_configuration = fields.Dict()
# global_chart_configuration keeps data about global cross-filter scoping
# for charts - can be overriden by chart_configuration for each chart
# for charts - can be overridden by chart_configuration for each chart
global_chart_configuration = fields.Dict()
# filter_sets_configuration is for dashboard-native filters
filter_sets_configuration = fields.List(fields.Dict(), allow_none=True)
Expand Down
2 changes: 1 addition & 1 deletion superset/databases/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def wraps(
database, Table(table_name_parsed, schema_name_parsed)
):
stats_logger_manager.instance.incr(
f"permisssion_denied_{self.__class__.__name__}.select_star"
f"permission_denied_{self.__class__.__name__}.select_star"
)
logger.warning(
"Permission denied for user %s on table: %s schema: %s",
Expand Down
2 changes: 1 addition & 1 deletion superset/datasets/commands/create.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def run(self) -> Model:
# Creates SqlaTable (Dataset)
dataset = DatasetDAO.create(attributes=self._properties, commit=False)

# Updates columns and metrics from the datase
# Updates columns and metrics from the dataset
dataset.fetch_metadata(commit=False)
db.session.commit()
except (SQLAlchemyError, DAOCreateFailedError) as ex:
Expand Down
16 changes: 8 additions & 8 deletions superset/db_engine_specs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ The table below (generated via `python superset/db_engine_specs/lib.py`) summari
| Allows aliases in the SELECT statement | True | True | True | True | True | True | True | True | False | True | True | True | True | True | True | True | True | True | True | True | True | True | False | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True |
| Allows referencing aliases in the ORDER BY statement | True | True | True | True | True | True | True | True | False | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True |
| Supports secondary time columns | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | True | True | False | False | False | False | False | False | False | True | True | False | False | False | False | False | False | False | True | True | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False |
| Allows ommiting time filters from inline GROUP BYs | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | True | True | False | False | False | False | False | False | False | True | True | False | False | False | False | False | False | False | True | True | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False |
| Allows omitting time filters from inline GROUP BYs | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | True | True | False | False | False | False | False | False | False | True | True | False | False | False | False | False | False | False | True | True | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False |
| Able to use source column when an alias overshadows it | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | True | False | False | False | False | False | False | False | False | True | False | False |
| Allows aggregations in ORDER BY not present in the SELECT | True | True | True | True | True | False | True | True | True | True | False | True | True | True | True | True | True | True | True | True | False | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True | True |
| Allows expressions in ORDER BY | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | True | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False | False |
Expand Down Expand Up @@ -107,7 +107,7 @@ When running user queries in SQL Lab, Superset needs to limit the number of rows

For most databases this is done by parsing the user submitted query and applying a limit, if one is not present, or replacing the existing limit if it's larger. This is called the `FORCE_LIMIT` method, and is the most efficient, since the database will produce at most the number of rows that Superset will display.

For some databases this method might not work, and they can use the `WRAP_SQL` method, which wraps the original query in a `SELECT *` and applies a limit via the SQLAlchemy dialect, which should get translated to the correct syntax. This method might be inneficient, since the database optimizer might not be able to push the limit to the inner query.
For some databases this method might not work, and they can use the `WRAP_SQL` method, which wraps the original query in a `SELECT *` and applies a limit via the SQLAlchemy dialect, which should get translated to the correct syntax. This method might be inefficient, since the database optimizer might not be able to push the limit to the inner query.

Finally, as a last resource there is the `FETCH_MANY` method. When a DB engine spec uses this method the query runs unmodified, but Superset fetches only a certain number of rows from the cursor. It's possible that a database using this method can optimize the query execution and compute rows as they are being read by the cursor, but it's unlikely. This makes this method the least efficient of the three.

Expand Down Expand Up @@ -158,7 +158,7 @@ GROUP BY
```
### `time_groupby_inline = False`

In theory this attribute should be used to ommit time filters from the self-joins. When the attribute is false the time attribute will be present in the subquery used to compute limited series, eg:
In theory this attribute should be used to omit time filters from the self-joins. When the attribute is false the time attribute will be present in the subquery used to compute limited series, eg:

```sql
SELECT DATE_TRUNC('day', ts) AS ts,
Expand Down Expand Up @@ -390,7 +390,7 @@ class MssqlEngineSpec(BaseEngineSpec):

### Function names

DB engine specs should implement a class method called `get_function_names` that retuns a list of strings, representing all the function names that the database supports. This is used for autocomplete in SQL Lab.
DB engine specs should implement a class method called `get_function_names` that returns a list of strings, representing all the function names that the database supports. This is used for autocomplete in SQL Lab.

### Masked encrypted extra

Expand Down Expand Up @@ -521,7 +521,7 @@ class GSheetsEngineSpec(ShillelaghEngineSpec):

The method `get_url_for_impersonation` updates the SQLAlchemy URI before every query. In this particular case, it will fetch the user's email and add it to the `subject` query argument. The driver will then lower the permissions to match that given user. This allows the connection to be configured with a service account that has access to all the spreadsheets, while giving users access to only the spreadsheets they own are have been shared with them (or with their organization — Google will handle the authorization in this case, not Superset).

Alternatively, it's also possible to impersonate users by implemeneting the `update_impersonation_config`. This is a class method which modifies `connect_args` in place. You can use either method, and ideally they [should be consolidated in a single one](https://github.com/apache/superset/issues/24910).
Alternatively, it's also possible to impersonate users by implementing the `update_impersonation_config`. This is a class method which modifies `connect_args` in place. You can use either method, and ideally they [should be consolidated in a single one](https://github.com/apache/superset/issues/24910).

### File upload

Expand Down Expand Up @@ -706,13 +706,13 @@ Note that despite being implemented only for Presto, this behavior has nothing t

Some databases allow uses to estimate the cost of running a query before running it. This is done via the `estimate_query_cost` method in DB engine specs, which receives the SQL and returns a list of "costs". The definition of what "cost" is varies from database to database (in the few that support this functionality), and it can be formatted via the `query_cost_formatter`.

The `query_cost_formatter` can be overriden with an arbitrary function via the config `QUERY_COST_FORMATTERS_BY_ENGINE`. This allows custom deployments of Superset to format the results in different ways. For example, at some point in Lyft the cost for running Presto queries would also show the carbon footprint (in trees).
The `query_cost_formatter` can be overridden with an arbitrary function via the config `QUERY_COST_FORMATTERS_BY_ENGINE`. This allows custom deployments of Superset to format the results in different ways. For example, at some point in Lyft the cost for running Presto queries would also show the carbon footprint (in trees).

### SQL validation

A few databases support validating the syntax of the SQL as the user is typing it, indicating in SQL Lab any errors. This is usually done using an `EXPLAIN` query and, because it gets called every few seconds as the user types, it's important that the database returns the result quickly.

This is currently implement for Presto and Postgres, via custom classes in `superset/sql_validators` that should be enabled in the configuration. Implementing this as custom classes, instead of a `validate_sql` method in the DB engine spec offerts no advantages, and ideally in the future we should move the logic to DB engine specs.
This is currently implement for Presto and Postgres, via custom classes in `superset/sql_validators` that should be enabled in the configuration. Implementing this as custom classes, instead of a `validate_sql` method in the DB engine spec offers no advantages, and ideally in the future we should move the logic to DB engine specs.

## Testing DB engine specs

Expand All @@ -722,4 +722,4 @@ Superset has a command to test the connection to a given database, as well as ch
superset test-db sqlite://
```

If the connection needs additional arguments thay can be passed when the command runs.
If the connection needs additional arguments they can be passed when the command runs.
4 changes: 2 additions & 2 deletions superset/db_engine_specs/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -461,7 +461,7 @@ def get_schema_from_engine_params( # pylint: disable=unused-argument
connect_args: dict[str, Any],
) -> str | None:
"""
Return the schema configured in a SQLALchemy URI and connection argments, if any.
Return the schema configured in a SQLALchemy URI and connection arguments, if any.
"""
return None

Expand Down Expand Up @@ -1144,7 +1144,7 @@ def get_prequeries(
connection arguments.
For example, in order to specify a default schema in RDS we need to run a query
at the beggining of the session:
at the beginning of the session:
sql> set search_path = my_schema;
Expand Down
2 changes: 1 addition & 1 deletion superset/db_engine_specs/lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
"alias_in_orderby": "Allows referencing aliases in the ORDER BY statement",
"secondary_time_columns": "Supports secondary time columns",
"time_groupby_inline": (
"Allows ommiting time filters from inline GROUP BYs"
"Allows omitting time filters from inline GROUP BYs"
), # E: line too long (80 > 79 characters)
"alias_to_source_column": (
"Able to use source column when an alias overshadows it"
Expand Down
2 changes: 1 addition & 1 deletion superset/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ class SupersetErrorType(StrEnum):
1025: _("CVAS (create view as select) query is not a SELECT statement."),
1026: _("Query is too complex and takes too long to run."),
1027: _("The database is currently running too many queries."),
1028: _("One or more parameters specified in the query are malformatted."),
1028: _("One or more parameters specified in the query are malformed."),
1029: _("The object does not exist in the given database."),
1030: _("The query has a syntax error."),
1031: _("The results backend no longer has the data from the query."),
Expand Down
2 changes: 1 addition & 1 deletion superset/examples/configs/charts/Messages_per_Channel.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ params:
dashboards: '#3CCCCB'
design: '#1FA8C9'
developers: '#9EE5E5'
embedd-dashboards: '#ACE1C4'
embedded-dashboards: '#ACE1C4'
feature-requests: '#454E7C'
general: '#3CCCCB'
github-notifications: '#E04355'
Expand Down
Loading

0 comments on commit 0735680

Please sign in to comment.