Skip to content

Commit

Permalink
feat: locustio#2955 Allow to exclude some statistics from aggregation
Browse files Browse the repository at this point in the history
  • Loading branch information
tyge68 committed Oct 31, 2024
1 parent ab5faf3 commit 78460d0
Show file tree
Hide file tree
Showing 3 changed files with 41 additions and 14 deletions.
8 changes: 6 additions & 2 deletions locust/env.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

from operator import methodcaller
from re import Pattern
from typing import Callable, TypeVar

from configargparse import Namespace
Expand All @@ -27,6 +28,7 @@ def __init__(
tags: list[str] | None = None,
locustfile: str | None = None,
exclude_tags: list[str] | None = None,
exclude_from_aggregation: str | Pattern[str] | None = None,
events: Events | None = None,
host: str | None = None,
reset_stats=False,
Expand Down Expand Up @@ -69,7 +71,7 @@ def __init__(
"""If set, only tasks that are tagged by tags in this list will be executed. Leave this as None to use the one from parsed_options"""
self.exclude_tags = exclude_tags
"""If set, only tasks that aren't tagged by tags in this list will be executed. Leave this as None to use the one from parsed_options"""
self.stats = RequestStats()
self.stats = RequestStats(exclude_from_aggregation=self.exclude_from_aggregation)
"""Reference to RequestStats instance"""
self.host = host
"""Base URL of the target system"""
Expand Down Expand Up @@ -154,7 +156,9 @@ def create_worker_runner(self, master_host: str, master_port: int) -> WorkerRunn
"""
# Create a new RequestStats with use_response_times_cache set to False to save some memory
# and CPU cycles, since the response_times_cache is not needed for Worker nodes
self.stats = RequestStats(use_response_times_cache=False)
self.stats = RequestStats(
use_response_times_cache=False, exclude_from_aggregation=self.exclude_from_aggregation
)
return self._create_runner(
WorkerRunner,
master_host=master_host,
Expand Down
17 changes: 15 additions & 2 deletions locust/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import json
import logging
import os
import re
import signal
import time
from abc import abstractmethod
Expand All @@ -13,6 +14,7 @@
from copy import copy
from html import escape
from itertools import chain
from re import Pattern
from types import FrameType
from typing import TYPE_CHECKING, Any, Callable, NoReturn, Protocol, TypedDict, TypeVar, cast

Expand Down Expand Up @@ -184,14 +186,17 @@ class RequestStats:
Class that holds the request statistics. Accessible in a User from self.environment.stats
"""

def __init__(self, use_response_times_cache=True):
def __init__(self, use_response_times_cache=True, exclude_from_aggregation: str | Pattern[str] = ""):
"""
:param use_response_times_cache: The value of use_response_times_cache will be set for each StatsEntry()
when they are created. Settings it to False saves some memory and CPU
cycles which we can do on Worker nodes where the response_times_cache
is not needed.
:param exclude_from_aggregation: Define which logs method or name should be excluded from "Aggretated"
stats. Default will accept all the logs. Regexp is allowed.
"""
self.use_response_times_cache = use_response_times_cache
self.exclude_from_aggregation = exclude_from_aggregation
self.entries: dict[tuple[str, str], StatsEntry] = EntriesDict(self)
self.errors: dict[str, StatsError] = {}
self.total = StatsEntry(self, "Aggregated", None, use_response_times_cache=self.use_response_times_cache)
Expand All @@ -217,8 +222,16 @@ def last_request_timestamp(self):
def start_time(self):
return self.total.start_time

def exclude_from_total(self, method: str, name: str):
if self.exclude_from_aggregation:
found_in_method = re.search(self.exclude_from_aggregation, method)
found_in_name = re.search(self.exclude_from_aggregation, name)
return found_in_method or found_in_name
return False

def log_request(self, method: str, name: str, response_time: int, content_length: int) -> None:
self.total.log(response_time, content_length)
if not self.exclude_from_total(method, name):
self.total.log(response_time, content_length)
self.entries[(name, method)].log(response_time, content_length)

def log_error(self, method: str, name: str, error: Exception | str | None) -> None:
Expand Down
30 changes: 20 additions & 10 deletions locust/test/test_stats.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,13 @@
import csv
import json
import os
import re
import time
import unittest
from unittest import mock

import gevent

import locust
from locust import HttpUser, TaskSet, User, __version__, constant, task
from locust.env import Environment
Expand All @@ -17,16 +27,6 @@
from locust.test.testcases import LocustTestCase, WebserverTestCase
from locust.user.inspectuser import _get_task_ratio

import csv
import json
import os
import re
import time
import unittest
from unittest import mock

import gevent

_TEST_CSV_STATS_INTERVAL_SEC = 0.2
_TEST_CSV_STATS_INTERVAL_WAIT_SEC = _TEST_CSV_STATS_INTERVAL_SEC + 0.1

Expand Down Expand Up @@ -98,6 +98,16 @@ def test_total_rps(self):
self.assertAlmostEqual(s2.total_rps, 1 / 5.0)
self.assertEqual(self.stats.total.total_rps, 10 / 5.0)

def test_total_exclude_from_aggregation(self):
before_count = self.stats.num_requests
# First without exclusion
self.stats.log_request("CUSTOM", "some_name", 1337, 1337)
self.assertEqual(self.stats.num_requests, before_count + 1)
# Second with exclusion
self.stats.exclude_from_aggregation = r"CUSTOM"
self.stats.log_request("CUSTOM", "some_name", 1337, 1337)
self.assertEqual(self.stats.num_requests, before_count + 1)

def test_rps_less_than_one_second(self):
s = StatsEntry(self.stats, "percentile_test", "GET")
for i in range(10):
Expand Down

0 comments on commit 78460d0

Please sign in to comment.