Skip to content

Commit

Permalink
feat(on-demand): Add logic for supporting prefilling of on demand met…
Browse files Browse the repository at this point in the history
…rics (#54833)
  • Loading branch information
iambriccardo authored Aug 30, 2023
1 parent 9217585 commit 34efa18
Show file tree
Hide file tree
Showing 3 changed files with 166 additions and 29 deletions.
97 changes: 72 additions & 25 deletions src/sentry/relay/config/metric_extraction.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
import logging
from dataclasses import dataclass
from typing import Any, Dict, List, Literal, Optional, Sequence, Tuple, TypedDict, Union
from typing import Any, Dict, List, Literal, Optional, Sequence, Set, Tuple, TypedDict, Union, cast

from sentry import features, options
from sentry.api.endpoints.project_transaction_threshold import DEFAULT_THRESHOLD
from sentry.incidents.models import AlertRule, AlertRuleStatus
from sentry.models import (
DashboardWidgetQuery,
DashboardWidgetTypes,
Organization,
Project,
ProjectTransactionThreshold,
ProjectTransactionThresholdOverride,
Expand All @@ -21,6 +22,7 @@
should_use_on_demand_metrics,
)
from sentry.snuba.models import SnubaQuery
from sentry.utils import metrics

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -53,45 +55,79 @@ def get_metric_extraction_config(project: Project) -> Optional[MetricExtractionC
- Performance alert rules with advanced filter expressions.
- On-demand metrics widgets.
"""
if not features.has("organizations:on-demand-metrics-extraction", project.organization):
return None
# For efficiency purposes, we fetch the flags in batch and propagate them downstream.
enabled_features = _on_demand_metrics_feature_flags(project.organization)

alert_specs = _get_alert_metric_specs(project)
widget_specs = _get_widget_metric_specs(project)
prefilling = (
"organizations:on-demand-metrics-prefill" in enabled_features
and "organizations:enable-on-demand-metrics-prefill" in enabled_features
)

metrics = _merge_metric_specs(alert_specs, widget_specs)
alert_specs = _get_alert_metric_specs(project, enabled_features, prefilling)
widget_specs = _get_widget_metric_specs(project, enabled_features, prefilling)

if not metrics:
metric_specs = _merge_metric_specs(alert_specs, widget_specs)
if not metric_specs:
return None

return {
"version": _METRIC_EXTRACTION_VERSION,
"metrics": metrics,
"metrics": metric_specs,
}


def _get_alert_metric_specs(project: Project) -> List[HashedMetricSpec]:
def _on_demand_metrics_feature_flags(organization: Organization) -> Set[str]:
feature_names = [
"organizations:on-demand-metrics-extraction",
"organizations:on-demand-metrics-extraction-experimental",
"organizations:on-demand-metrics-prefill",
"organizations:enable-on-demand-metrics-prefill",
]

feature_values = features.batch_has(feature_names, organization=organization)
if feature_values is None:
return set()

all_features = feature_values.get(f"organization:{organization.id}", {})
return cast(Set[str], {name for name, value in all_features.items() if value})


def _get_alert_metric_specs(
project: Project, enabled_features: Set[str], prefilling: bool
) -> List[HashedMetricSpec]:
if not ("organizations:on-demand-metrics-extraction" in enabled_features or prefilling):
return []

datasets = [Dataset.PerformanceMetrics.value]
if prefilling:
datasets.append(Dataset.Transactions.value)

alert_rules = (
AlertRule.objects.fetch_for_project(project)
.filter(
organization=project.organization,
status=AlertRuleStatus.PENDING.value,
snuba_query__dataset=Dataset.PerformanceMetrics.value,
snuba_query__dataset__in=datasets,
)
.select_related("snuba_query")
)

specs = []
for alert in alert_rules:
alert_snuba_query = alert.snuba_query
if result := _convert_snuba_query_to_metric(project, alert.snuba_query):
if result := _convert_snuba_query_to_metric(project, alert.snuba_query, prefilling):
_log_on_demand_metric_spec(
project_id=project.id,
spec_for="alert",
spec=result,
id=alert.id,
field=alert_snuba_query.aggregate,
query=alert_snuba_query.query,
prefilling=prefilling,
)
metrics.incr(
"on_demand_metrics.on_demand_spec.for_alert",
tags={"prefilling": prefilling},
)
specs.append(result)

Expand All @@ -105,9 +141,12 @@ def _get_alert_metric_specs(project: Project) -> List[HashedMetricSpec]:
return specs


def _get_widget_metric_specs(project: Project) -> List[HashedMetricSpec]:
if not features.has(
"organizations:on-demand-metrics-extraction-experimental", project.organization
def _get_widget_metric_specs(
project: Project, enabled_features: Set[str], prefilling: bool
) -> List[HashedMetricSpec]:
if not (
"organizations:on-demand-metrics-extraction" in enabled_features
and "organizations:on-demand-metrics-extraction-experimental" in enabled_features
):
return []

Expand All @@ -119,7 +158,7 @@ def _get_widget_metric_specs(project: Project) -> List[HashedMetricSpec]:

specs = []
for widget in widget_queries:
for result in _convert_widget_query_to_metric(project, widget):
for result in _convert_widget_query_to_metric(project, widget, prefilling):
specs.append(result)

max_widget_specs = options.get("on_demand.max_widget_specs") or _MAX_ON_DEMAND_WIDGETS
Expand Down Expand Up @@ -154,23 +193,19 @@ def _merge_metric_specs(


def _convert_snuba_query_to_metric(
project: Project, snuba_query: SnubaQuery
project: Project, snuba_query: SnubaQuery, prefilling: bool
) -> Optional[HashedMetricSpec]:
"""
If the passed snuba_query is a valid query for on-demand metric extraction,
returns a tuple of (hash, MetricSpec) for the query. Otherwise, returns None.
"""
return _convert_aggregate_and_query_to_metric(
project,
snuba_query.dataset,
snuba_query.aggregate,
snuba_query.query,
project, snuba_query.dataset, snuba_query.aggregate, snuba_query.query, prefilling
)


def _convert_widget_query_to_metric(
project: Project,
widget_query: DashboardWidgetQuery,
project: Project, widget_query: DashboardWidgetQuery, prefilling: bool
) -> Sequence[HashedMetricSpec]:
"""
Converts a passed metrics widget query to one or more MetricSpecs.
Expand All @@ -189,6 +224,7 @@ def _convert_widget_query_to_metric(
Dataset.PerformanceMetrics.value,
aggregate,
widget_query.conditions,
prefilling,
):
_log_on_demand_metric_spec(
project_id=project.id,
Expand All @@ -197,20 +233,25 @@ def _convert_widget_query_to_metric(
id=widget_query.id,
field=aggregate,
query=widget_query.conditions,
prefilling=prefilling,
)
metrics.incr(
"on_demand_metrics.on_demand_spec.for_widget",
tags={"prefilling": prefilling},
)
metrics_specs.append(result)

return metrics_specs


def _convert_aggregate_and_query_to_metric(
project: Project, dataset: str, aggregate: str, query: str
project: Project, dataset: str, aggregate: str, query: str, prefilling: bool
) -> Optional[HashedMetricSpec]:
"""
Converts an aggregate and a query to a metric spec with its hash value.
"""
try:
if not should_use_on_demand_metrics(dataset, aggregate, query):
if not should_use_on_demand_metrics(dataset, aggregate, query, prefilling):
return None

on_demand_spec = OnDemandMetricSpec(
Expand All @@ -220,7 +261,11 @@ def _convert_aggregate_and_query_to_metric(

return on_demand_spec.query_hash, on_demand_spec.to_metric_spec(project)
except Exception as e:
logger.error(e, exc_info=True)
# Since prefilling might include several non-ondemand-compatible alerts, we want to not trigger errors in the
# Sentry console.
if not prefilling:
logger.error(e, exc_info=True)

return None


Expand All @@ -231,6 +276,7 @@ def _log_on_demand_metric_spec(
id: int,
field: str,
query: str,
prefilling: bool,
) -> None:
spec_query_hash, spec_dict = spec

Expand All @@ -244,6 +290,7 @@ def _log_on_demand_metric_spec(
"spec_for": spec_for,
"spec_query_hash": spec_query_hash,
"spec": spec_dict,
"prefilling": prefilling,
},
)

Expand Down
13 changes: 11 additions & 2 deletions src/sentry/snuba/metrics/extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,10 +260,19 @@ def is_on_demand_snuba_query(snuba_query: SnubaQuery) -> bool:


def should_use_on_demand_metrics(
dataset: Optional[Union[str, Dataset]], aggregate: str, query: Optional[str]
dataset: Optional[Union[str, Dataset]],
aggregate: str,
query: Optional[str],
prefilling: bool = False,
) -> bool:
"""On-demand metrics are used if the aggregate and query are supported by on-demand metrics but not standard"""
if not dataset or Dataset(dataset) != Dataset.PerformanceMetrics:
supported_datasets = [Dataset.PerformanceMetrics]
# In case we are running a prefill, we want to support also transactions, since our goal is to start extracting
# metrics that will be needed after a query is converted from using transactions to metrics.
if prefilling:
supported_datasets.append(Dataset.Transactions)

if not dataset or Dataset(dataset) not in supported_datasets:
return False

aggregate_supported_by = _get_aggregate_supported_by(aggregate)
Expand Down
85 changes: 83 additions & 2 deletions tests/sentry/relay/config/test_metric_extraction.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from typing import Sequence
from unittest.mock import ANY

import pytest

from sentry.incidents.models import AlertRule
from sentry.models import (
Dashboard,
Expand All @@ -20,13 +22,17 @@

ON_DEMAND_METRICS = "organizations:on-demand-metrics-extraction"
ON_DEMAND_METRICS_WIDGETS = "organizations:on-demand-metrics-extraction-experimental"
ON_DEMAND_METRICS_PREFILL = "organizations:on-demand-metrics-prefill"
ON_DEMAND_METRIC_PREFILL_ENABLE = "organizations:enable-on-demand-metrics-prefill"


def create_alert(aggregate: str, query: str, project: Project) -> AlertRule:
def create_alert(
aggregate: str, query: str, project: Project, dataset: Dataset = Dataset.PerformanceMetrics
) -> AlertRule:
snuba_query = SnubaQuery.objects.create(
aggregate=aggregate,
query=query,
dataset=Dataset.PerformanceMetrics.value,
dataset=dataset.value,
time_window=300,
resolution=60,
environment=None,
Expand Down Expand Up @@ -393,3 +399,78 @@ def test_get_metric_extraction_config_with_apdex(default_project):
{"key": "query_hash", "value": ANY},
],
}


@django_db_all
@pytest.mark.parametrize(
"enabled_features, number_of_metrics",
[
([ON_DEMAND_METRICS], 1), # Only alerts.
([ON_DEMAND_METRICS_WIDGETS, ON_DEMAND_METRICS_PREFILL], 0), # Nothing.
([ON_DEMAND_METRICS, ON_DEMAND_METRICS_WIDGETS], 2), # Alerts and widgets.
([ON_DEMAND_METRICS_PREFILL, ON_DEMAND_METRIC_PREFILL_ENABLE], 1), # Alerts.
([ON_DEMAND_METRICS_PREFILL], 0), # Nothing.
([ON_DEMAND_METRICS, ON_DEMAND_METRICS_PREFILL], 1), # Alerts.
([ON_DEMAND_METRICS, ON_DEMAND_METRIC_PREFILL_ENABLE], 1), # Alerts.
([], 0), # Nothing.
],
)
def test_get_metrics_extraction_config_features_combinations(
enabled_features, number_of_metrics, default_project
):
create_alert("count()", "transaction.duration:>=10", default_project)
create_widget(["count()"], "transaction.duration:>=20", default_project)

features = {feature: True for feature in enabled_features}
with Feature(features):
config = get_metric_extraction_config(default_project)
if number_of_metrics == 0:
assert config is None
else:
assert config is not None
assert len(config["metrics"]) == number_of_metrics


@django_db_all
def test_get_metric_extraction_config_with_transactions_dataset(default_project):
create_alert(
"count()", "transaction.duration:>=10", default_project, dataset=Dataset.PerformanceMetrics
)
create_alert(
"count()", "transaction.duration:>=20", default_project, dataset=Dataset.Transactions
)

# We test with prefilling, and we expect that both alerts are fetched since we support both datasets.
with Feature({ON_DEMAND_METRICS_PREFILL: True, ON_DEMAND_METRIC_PREFILL_ENABLE: True}):
config = get_metric_extraction_config(default_project)

assert config
assert len(config["metrics"]) == 2
assert config["metrics"][0] == {
"category": "transaction",
"condition": {"name": "event.duration", "op": "gte", "value": 10.0},
"field": None,
"mri": "c:transactions/on_demand@none",
"tags": [{"key": "query_hash", "value": ANY}],
}
assert config["metrics"][1] == {
"category": "transaction",
"condition": {"name": "event.duration", "op": "gte", "value": 20.0},
"field": None,
"mri": "c:transactions/on_demand@none",
"tags": [{"key": "query_hash", "value": ANY}],
}

# We test without prefilling, and we expect that only alerts for performance metrics are fetched.
with Feature({ON_DEMAND_METRICS: True}):
config = get_metric_extraction_config(default_project)

assert config
assert len(config["metrics"]) == 1
assert config["metrics"][0] == {
"category": "transaction",
"condition": {"name": "event.duration", "op": "gte", "value": 10.0},
"field": None,
"mri": "c:transactions/on_demand@none",
"tags": [{"key": "query_hash", "value": ANY}],
}

0 comments on commit 34efa18

Please sign in to comment.