Skip to content

Commit

Permalink
chore(deps): Upgrade ruff (#21648)
Browse files Browse the repository at this point in the history
  • Loading branch information
webjunkie authored Apr 19, 2024
1 parent 61ad509 commit a0fc086
Show file tree
Hide file tree
Showing 97 changed files with 353 additions and 240 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci-backend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ jobs:
- name: Check formatting
run: |
ruff format --exclude posthog/hogql/grammar --check --diff .
ruff format --check --diff .
- name: Add Problem Matcher
run: echo "::add-matcher::.github/mypy-problem-matcher.json"
Expand Down
12 changes: 9 additions & 3 deletions ee/clickhouse/queries/event_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,19 @@ def __init__(
should_join_distinct_ids=False,
should_join_persons=False,
# Extra events/person table columns to fetch since parent query needs them
extra_fields: List[ColumnName] = [],
extra_event_properties: List[PropertyName] = [],
extra_person_fields: List[ColumnName] = [],
extra_fields: Optional[List[ColumnName]] = None,
extra_event_properties: Optional[List[PropertyName]] = None,
extra_person_fields: Optional[List[ColumnName]] = None,
override_aggregate_users_by_distinct_id: Optional[bool] = None,
person_on_events_mode: PersonsOnEventsMode = PersonsOnEventsMode.disabled,
**kwargs,
) -> None:
if extra_person_fields is None:
extra_person_fields = []
if extra_event_properties is None:
extra_event_properties = []
if extra_fields is None:
extra_fields = []
super().__init__(
filter=filter,
team=team,
Expand Down
4 changes: 2 additions & 2 deletions ee/clickhouse/queries/funnels/funnel_correlation.py
Original file line number Diff line number Diff line change
Expand Up @@ -868,9 +868,9 @@ def get_partial_event_contingency_tables(self) -> Tuple[List[EventContingencyTab

# Get the total success/failure counts from the results
results = [result for result in results_with_total if result[0] != self.TOTAL_IDENTIFIER]
_, success_total, failure_total = [
_, success_total, failure_total = next(
result for result in results_with_total if result[0] == self.TOTAL_IDENTIFIER
][0]
)

# Add a little structure, and keep it close to the query definition so it's
# obvious what's going on with result indices.
Expand Down
4 changes: 3 additions & 1 deletion ee/clickhouse/queries/test/test_cohort_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,10 @@ def _make_event_sequence(
interval_days,
period_event_counts,
event="$pageview",
properties={},
properties=None,
):
if properties is None:
properties = {}
for period_index, event_count in enumerate(period_event_counts):
for i in range(event_count):
_create_event(
Expand Down
2 changes: 1 addition & 1 deletion ee/clickhouse/views/test/test_clickhouse_retention.py
Original file line number Diff line number Diff line change
Expand Up @@ -592,7 +592,7 @@ def test_can_specify_breakdown_event_property_and_retrieve_people(self):
),
)

chrome_cohort = [cohort for cohort in retention["result"] if cohort["label"] == "Chrome"][0]
chrome_cohort = next(cohort for cohort in retention["result"] if cohort["label"] == "Chrome")
people_url = chrome_cohort["values"][0]["people_url"]
people_response = self.client.get(people_url)
assert people_response.status_code == 200
Expand Down
3 changes: 2 additions & 1 deletion ee/models/license.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,8 @@ class License(models.Model):
]

ENTERPRISE_PLAN = "enterprise"
ENTERPRISE_FEATURES = SCALE_FEATURES + [
ENTERPRISE_FEATURES = [
*SCALE_FEATURES,
AvailableFeature.ADVANCED_PERMISSIONS,
AvailableFeature.PROJECT_BASED_PERMISSIONING,
AvailableFeature.SAML,
Expand Down
4 changes: 3 additions & 1 deletion ee/settings.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Django settings for PostHog Enterprise Edition.
"""

import os
from typing import Dict, List

Expand All @@ -15,7 +16,8 @@
}

# SSO
AUTHENTICATION_BACKENDS = AUTHENTICATION_BACKENDS + [
AUTHENTICATION_BACKENDS = [
*AUTHENTICATION_BACKENDS,
"ee.api.authentication.MultitenantSAMLAuth",
"social_core.backends.google.GoogleOAuth2",
]
Expand Down
1 change: 1 addition & 0 deletions manage.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""

import os
import sys

Expand Down
42 changes: 6 additions & 36 deletions mypy-baseline.txt

Large diffs are not rendered by default.

8 changes: 4 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
"build:esbuild": "node frontend/build.mjs",
"schema:build": "pnpm run schema:build:json && pnpm run schema:build:python",
"schema:build:json": "ts-node bin/build-schema.mjs && prettier --write frontend/src/queries/schema.json",
"schema:build:python": "datamodel-codegen --class-name='SchemaRoot' --collapse-root-models --target-python-version 3.10 --disable-timestamp --use-one-literal-as-default --use-default --use-default-kwarg --use-subclass-enum --input frontend/src/queries/schema.json --input-file-type jsonschema --output posthog/schema.py --output-model-type pydantic_v2.BaseModel && ruff format posthog/schema.py",
"schema:build:python": "datamodel-codegen --class-name='SchemaRoot' --collapse-root-models --target-python-version 3.10 --disable-timestamp --use-one-literal-as-default --use-default --use-default-kwarg --use-subclass-enum --input frontend/src/queries/schema.json --input-file-type jsonschema --output posthog/schema.py --output-model-type pydantic_v2.BaseModel && ruff format posthog/schema.py && ruff check --fix posthog/schema.py",
"grammar:build": "npm run grammar:build:python && npm run grammar:build:cpp",
"grammar:build:python": "cd posthog/hogql/grammar && antlr -Dlanguage=Python3 HogQLLexer.g4 && antlr -visitor -no-listener -Dlanguage=Python3 HogQLParser.g4",
"grammar:build:cpp": "cd posthog/hogql/grammar && antlr -o ../../../hogql_parser -Dlanguage=Cpp HogQLLexer.g4 && antlr -o ../../../hogql_parser -visitor -no-listener -Dlanguage=Cpp HogQLParser.g4",
Expand All @@ -47,7 +47,7 @@
"typescript:check": "tsc --noEmit && echo \"No errors reported by tsc.\"",
"lint:js": "eslint frontend/src",
"lint:css": "stylelint \"frontend/**/*.{css,scss}\"",
"format:backend": "ruff --exclude posthog/hogql/grammar .",
"format:backend": "ruff .",
"format:frontend": "pnpm lint:js --fix && pnpm lint:css --fix && pnpm prettier",
"format": "pnpm format:backend && pnpm format:frontend",
"typegen:write": "kea-typegen write --delete --show-ts-errors",
Expand Down Expand Up @@ -337,8 +337,8 @@
"pnpm --dir plugin-server exec prettier --write"
],
"!(posthog/hogql/grammar/*)*.{py,pyi}": [
"ruff format",
"ruff check --fix"
"ruff check --fix",
"ruff format"
]
},
"browserslist": {
Expand Down
2 changes: 1 addition & 1 deletion posthog/api/action.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ class ActionViewSet(
viewsets.ModelViewSet,
):
scope_object = "action"
renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) + (csvrenderers.PaginatedCSVRenderer,)
renderer_classes = (*tuple(api_settings.DEFAULT_RENDERER_CLASSES), csvrenderers.PaginatedCSVRenderer)
queryset = Action.objects.all()
serializer_class = ActionSerializer
authentication_classes = [TemporaryTokenAuthentication]
Expand Down
9 changes: 2 additions & 7 deletions posthog/api/capture.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,10 +59,7 @@
# events that are ingested via a separate path than analytics events. They have
# fewer restrictions on e.g. the order they need to be processed in.
SESSION_RECORDING_DEDICATED_KAFKA_EVENTS = ("$snapshot_items",)
SESSION_RECORDING_EVENT_NAMES = (
"$snapshot",
"$performance_event",
) + SESSION_RECORDING_DEDICATED_KAFKA_EVENTS
SESSION_RECORDING_EVENT_NAMES = ("$snapshot", "$performance_event", *SESSION_RECORDING_DEDICATED_KAFKA_EVENTS)

EVENTS_RECEIVED_COUNTER = Counter(
"capture_events_received_total",
Expand Down Expand Up @@ -604,9 +601,7 @@ def capture_internal(

if event["event"] in SESSION_RECORDING_EVENT_NAMES:
session_id = event["properties"]["$session_id"]
headers = [
("token", token),
] + extra_headers
headers = [("token", token), *extra_headers]

overflowing = False
if token in settings.REPLAY_OVERFLOW_FORCED_TOKENS:
Expand Down
2 changes: 1 addition & 1 deletion posthog/api/event.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ class EventViewSet(
viewsets.GenericViewSet,
):
scope_object = "query"
renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) + (csvrenderers.PaginatedCSVRenderer,)
renderer_classes = (*tuple(api_settings.DEFAULT_RENDERER_CLASSES), csvrenderers.PaginatedCSVRenderer)
serializer_class = ClickhouseEventSerializer
throttle_classes = [ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle]
pagination_class = UncountedLimitOffsetPagination
Expand Down
14 changes: 7 additions & 7 deletions posthog/api/insight.py
Original file line number Diff line number Diff line change
Expand Up @@ -572,7 +572,7 @@ class InsightViewSet(
ClickHouseBurstRateThrottle,
ClickHouseSustainedRateThrottle,
]
renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) + (csvrenderers.CSVRenderer,)
renderer_classes = (*tuple(api_settings.DEFAULT_RENDERER_CLASSES), csvrenderers.CSVRenderer)
filter_backends = [DjangoFilterBackend]
filterset_fields = ["short_id", "created_by"]
sharing_enabled_actions = ["retrieve", "list"]
Expand Down Expand Up @@ -838,12 +838,12 @@ def trend(self, request: request.Request, *args: Any, **kwargs: Any):
export = "{}/insights/{}/\n".format(SITE_URL, request.GET["export_insight_id"]).encode() + export

response = HttpResponse(export)
response[
"Content-Disposition"
] = 'attachment; filename="{name} ({date_from} {date_to}) from PostHog.csv"'.format(
name=slugify(request.GET.get("export_name", "export")),
date_from=filter.date_from.strftime("%Y-%m-%d -") if filter.date_from else "up until",
date_to=filter.date_to.strftime("%Y-%m-%d"),
response["Content-Disposition"] = (
'attachment; filename="{name} ({date_from} {date_to}) from PostHog.csv"'.format(
name=slugify(request.GET.get("export_name", "export")),
date_from=filter.date_from.strftime("%Y-%m-%d -") if filter.date_from else "up until",
date_to=filter.date_to.strftime("%Y-%m-%d"),
)
)
return response

Expand Down
20 changes: 5 additions & 15 deletions posthog/api/person.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ class PersonViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet):
"""

scope_object = "person"
renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) + (csvrenderers.PaginatedCSVRenderer,)
renderer_classes = (*tuple(api_settings.DEFAULT_RENDERER_CLASSES), csvrenderers.PaginatedCSVRenderer)
queryset = Person.objects.all()
serializer_class = PersonSerializer
pagination_class = PersonLimitOffsetPagination
Expand Down Expand Up @@ -932,21 +932,11 @@ def prepare_actor_query_filter(filter: T) -> T:
new_group = {
"type": "OR",
"values": [
{
"key": "email",
"type": "person",
"value": search,
"operator": "icontains",
},
{"key": "email", "type": "person", "value": search, "operator": "icontains"},
{"key": "name", "type": "person", "value": search, "operator": "icontains"},
{
"key": "distinct_id",
"type": "event",
"value": search,
"operator": "icontains",
},
]
+ group_properties_filter_group,
{"key": "distinct_id", "type": "event", "value": search, "operator": "icontains"},
*group_properties_filter_group,
],
}
prop_group = (
{"type": "AND", "values": [new_group, filter.property_groups.to_dict()]}
Expand Down
12 changes: 9 additions & 3 deletions posthog/api/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,11 @@ def _update_plugin_attachments(request: request.Request, plugin_config: PluginCo
_update_plugin_attachment(request, plugin_config, match.group(1), None, user)


def get_plugin_config_changes(old_config: Dict[str, Any], new_config: Dict[str, Any], secret_fields=[]) -> List[Change]:
def get_plugin_config_changes(
old_config: Dict[str, Any], new_config: Dict[str, Any], secret_fields=None
) -> List[Change]:
if secret_fields is None:
secret_fields = []
config_changes = dict_changes_between("Plugin", old_config, new_config)

for i, change in enumerate(config_changes):
Expand All @@ -79,8 +83,10 @@ def get_plugin_config_changes(old_config: Dict[str, Any], new_config: Dict[str,


def log_enabled_change_activity(
new_plugin_config: PluginConfig, old_enabled: bool, user: User, was_impersonated: bool, changes=[]
new_plugin_config: PluginConfig, old_enabled: bool, user: User, was_impersonated: bool, changes=None
):
if changes is None:
changes = []
if old_enabled != new_plugin_config.enabled:
log_activity(
organization_id=new_plugin_config.team.organization.id,
Expand Down Expand Up @@ -864,7 +870,7 @@ def frontend(self, request: request.Request, **kwargs):

def _get_secret_fields_for_plugin(plugin: Plugin) -> Set[str]:
# A set of keys for config fields that have secret = true
secret_fields = {field["key"] for field in plugin.config_schema if "secret" in field and field["secret"]}
secret_fields = {field["key"] for field in plugin.config_schema if isinstance(field, dict) and field.get("secret")}
return secret_fields


Expand Down
4 changes: 1 addition & 3 deletions posthog/api/signup.py
Original file line number Diff line number Diff line change
Expand Up @@ -503,9 +503,7 @@ def social_create_user(
user=user.id if user else None,
)
if user:
backend_processor = (
"domain_whitelist"
) # This is actually `jit_provisioning` (name kept for backwards-compatibility purposes)
backend_processor = "domain_whitelist" # This is actually `jit_provisioning` (name kept for backwards-compatibility purposes)
from_invite = True # jit_provisioning means they're definitely not organization_first_user

if not user:
Expand Down
3 changes: 2 additions & 1 deletion posthog/api/team.py
Original file line number Diff line number Diff line change
Expand Up @@ -421,7 +421,8 @@ def get_permissions(self) -> List:
IsAuthenticated,
APIScopePermission,
PremiumMultiProjectPermissions,
] + self.permission_classes
*self.permission_classes,
]

base_permissions = [permission() for permission in common_permissions]

Expand Down
10 changes: 6 additions & 4 deletions posthog/api/test/test_capture.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def mocked_get_ingest_context_from_token(_: Any) -> None:
openapi_spec = cast(Dict[str, Any], parser.specification)

large_data_array = [
{"key": random.choice(string.ascii_letters) for _ in range(512 * 1024)}
{"key": "".join(random.choice(string.ascii_letters) for _ in range(512 * 1024))}
] # 512 * 1024 is the max size of a single message and random letters shouldn't be compressible, so this should be at least 2 messages

android_json = {
Expand Down Expand Up @@ -188,7 +188,7 @@ def _to_arguments(self, patch_process_event_with_plugins: Any) -> dict:
def _send_original_version_session_recording_event(
self,
number_of_events: int = 1,
event_data: Dict | None = {},
event_data: Dict | None = None,
snapshot_source=3,
snapshot_type=1,
session_id="abc123",
Expand All @@ -198,6 +198,8 @@ def _send_original_version_session_recording_event(
) -> dict:
if event_data is None:
event_data = {}
if event_data is None:
event_data = {}

event = {
"event": "$snapshot",
Expand Down Expand Up @@ -1525,8 +1527,8 @@ def test_handle_invalid_snapshot(self):
]
)
def test_cors_allows_tracing_headers(self, _: str, path: str, headers: List[str]) -> None:
expected_headers = ",".join(["X-Requested-With", "Content-Type"] + headers)
presented_headers = ",".join(headers + ["someotherrandomheader"])
expected_headers = ",".join(["X-Requested-With", "Content-Type", *headers])
presented_headers = ",".join([*headers, "someotherrandomheader"])
response = self.client.options(
path,
HTTP_ORIGIN="https://localhost",
Expand Down
4 changes: 3 additions & 1 deletion posthog/api/test/test_comments.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@


class TestComments(APIBaseTest, QueryMatchingTest):
def _create_comment(self, data={}) -> Any:
def _create_comment(self, data=None) -> Any:
if data is None:
data = {}
payload = {
"content": "my content",
"scope": "Notebook",
Expand Down
16 changes: 12 additions & 4 deletions posthog/api/test/test_decide.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,12 +73,14 @@ def _post_decide(
origin="http://127.0.0.1:8000",
api_version=1,
distinct_id="example_id",
groups={},
groups=None,
geoip_disable=False,
ip="127.0.0.1",
disable_flags=False,
user_agent: Optional[str] = None,
):
if groups is None:
groups = {}
return self.client.post(
f"/decide/?v={api_version}",
{
Expand Down Expand Up @@ -3336,10 +3338,12 @@ def _post_decide(
origin="http://127.0.0.1:8000",
api_version=1,
distinct_id="example_id",
groups={},
groups=None,
geoip_disable=False,
ip="127.0.0.1",
):
if groups is None:
groups = {}
return self.client.post(
f"/decide/?v={api_version}",
{
Expand Down Expand Up @@ -3571,11 +3575,15 @@ def _post_decide(
origin="http://127.0.0.1:8000",
api_version=3,
distinct_id="example_id",
groups={},
person_props={},
groups=None,
person_props=None,
geoip_disable=False,
ip="127.0.0.1",
):
if person_props is None:
person_props = {}
if groups is None:
groups = {}
return self.client.post(
f"/decide/?v={api_version}",
{
Expand Down
Loading

0 comments on commit a0fc086

Please sign in to comment.