Skip to content

Commit

Permalink
[FEATURE] Changes for capturing metrics (#900)
Browse files Browse the repository at this point in the history
* Initial changes for capturing metrics

Signed-off-by: Deepak <[email protected]>

* Minor fixes

Signed-off-by: Deepak <[email protected]>

* Minor fix

Signed-off-by: Deepak <[email protected]>

* Minor fixes

Signed-off-by: Deepak <[email protected]>

* SDK and tool version bump

Signed-off-by: Deepak <[email protected]>

* redis version fix

Signed-off-by: Deepak <[email protected]>

* Commit pdm.lock changes

---------

Signed-off-by: Deepak <[email protected]>
Co-authored-by: Deepak-Kesavan <[email protected]>
  • Loading branch information
Deepak-Kesavan and Deepak-Kesavan authored Dec 18, 2024
1 parent 7fea3e1 commit 2f84d14
Show file tree
Hide file tree
Showing 24 changed files with 1,111 additions and 968 deletions.
5 changes: 5 additions & 0 deletions backend/api_v2/api_deployment_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ def post(
serializer.is_valid(raise_exception=True)
timeout = serializer.validated_data.get(ApiExecution.TIMEOUT_FORM_DATA)
include_metadata = serializer.validated_data.get(ApiExecution.INCLUDE_METADATA)
include_metrics = serializer.validated_data.get(ApiExecution.INCLUDE_METRICS)
use_file_history = serializer.validated_data.get(ApiExecution.USE_FILE_HISTORY)
if not file_objs or len(file_objs) == 0:
raise InvalidAPIRequest("File shouldn't be empty")
Expand All @@ -61,6 +62,7 @@ def post(
file_objs=file_objs,
timeout=timeout,
include_metadata=include_metadata,
include_metrics=include_metrics,
use_file_history=use_file_history,
)
if "error" in response and response["error"]:
Expand All @@ -79,6 +81,7 @@ def get(

execution_id = serializer.validated_data.get(ApiExecution.EXECUTION_ID)
include_metadata = serializer.validated_data.get(ApiExecution.INCLUDE_METADATA)
include_metrics = serializer.validated_data.get(ApiExecution.INCLUDE_METRICS)

# Fetch execution status
response: ExecutionResponse = DeploymentHelper.get_execution_status(
Expand All @@ -91,6 +94,8 @@ def get(
response_status = status.HTTP_200_OK
if not include_metadata:
response.remove_result_metadata_keys()
if not include_metrics:
response.remove_result_metrics()
if response.result_acknowledged:
response_status = status.HTTP_406_NOT_ACCEPTABLE
response.result = "Result already acknowledged"
Expand Down
1 change: 1 addition & 0 deletions backend/api_v2/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,6 @@ class ApiExecution:
FILES_FORM_DATA: str = "files"
TIMEOUT_FORM_DATA: str = "timeout"
INCLUDE_METADATA: str = "include_metadata"
INCLUDE_METRICS: str = "include_metrics"
USE_FILE_HISTORY: str = "use_file_history" # Undocumented parameter
EXECUTION_ID: str = "execution_id"
3 changes: 3 additions & 0 deletions backend/api_v2/deployment_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@ def execute_workflow(
file_objs: list[UploadedFile],
timeout: int,
include_metadata: bool = False,
include_metrics: bool = False,
use_file_history: bool = False,
) -> ReturnDict:
"""Execute workflow by api.
Expand Down Expand Up @@ -180,6 +181,8 @@ def execute_workflow(
)
if not include_metadata:
result.remove_result_metadata_keys()
if not include_metrics:
result.remove_result_metrics()
except Exception as error:
DestinationConnector.delete_api_storage_dir(
workflow_id=workflow_id, execution_id=execution_id
Expand Down
2 changes: 2 additions & 0 deletions backend/api_v2/postman_collection/dto.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ def get_form_data_items(self) -> list[FormDataItem]:
value=ApiExecution.MAXIMUM_TIMEOUT_IN_SEC,
),
FormDataItem(key=ApiExecution.INCLUDE_METADATA, type="text", value="False"),
FormDataItem(key=ApiExecution.INCLUDE_METRICS, type="text", value="False"),
]

def get_api_key(self) -> str:
Expand All @@ -131,6 +132,7 @@ def _get_status_api_request(self) -> RequestItem:
status_query_param = {
"execution_id": CollectionKey.STATUS_EXEC_ID_DEFAULT,
ApiExecution.INCLUDE_METADATA: "False",
ApiExecution.INCLUDE_METRICS: "False",
}
status_query_str = urlencode(status_query_param)
abs_api_endpoint = urljoin(settings.WEB_APP_ORIGIN_URL, self.api_endpoint)
Expand Down
3 changes: 3 additions & 0 deletions backend/api_v2/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ class ExecutionRequestSerializer(Serializer):
timeout (int): Timeout for the API deployment, maximum value can be 300s.
If -1 it corresponds to async execution. Defaults to -1
include_metadata (bool): Flag to include metadata in API response
include_metrics (bool): Flag to include metrics in API response
use_file_history (bool): Flag to use FileHistory to save and retrieve
responses quickly. This is undocumented to the user and can be
helpful for demos.
Expand All @@ -115,12 +116,14 @@ class ExecutionRequestSerializer(Serializer):
min_value=-1, max_value=ApiExecution.MAXIMUM_TIMEOUT_IN_SEC, default=-1
)
include_metadata = BooleanField(default=False)
include_metrics = BooleanField(default=False)
use_file_history = BooleanField(default=False)


class ExecutionQuerySerializer(Serializer):
execution_id = CharField(required=True)
include_metadata = BooleanField(default=False)
include_metrics = BooleanField(default=False)

def validate_execution_id(self, value):
"""Trim spaces, validate UUID format, and check if execution_id exists."""
Expand Down
79 changes: 40 additions & 39 deletions backend/pdm.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion backend/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ dependencies = [
"python-socketio==5.9.0", # For log_events
"social-auth-app-django==5.3.0", # For OAuth
"social-auth-core==4.4.2", # For OAuth
"unstract-sdk~=0.54.0rc6",
"unstract-sdk~=0.54.0rc8",
# ! IMPORTANT!
# Indirect local dependencies usually need to be added in their own projects
# as: https://pdm-project.org/latest/usage/dependency/#local-dependencies.
Expand Down
4 changes: 2 additions & 2 deletions backend/sample.env
Original file line number Diff line number Diff line change
Expand Up @@ -82,9 +82,9 @@ REMOTE_PROMPT_STUDIO_FILE_PATH=

# Structure Tool Image (Runs prompt studio exported tools)
# https://hub.docker.com/r/unstract/tool-structure
STRUCTURE_TOOL_IMAGE_URL="docker:unstract/tool-structure:0.0.52"
STRUCTURE_TOOL_IMAGE_URL="docker:unstract/tool-structure:0.0.53"
STRUCTURE_TOOL_IMAGE_NAME="unstract/tool-structure"
STRUCTURE_TOOL_IMAGE_TAG="0.0.52"
STRUCTURE_TOOL_IMAGE_TAG="0.0.53"

# Feature Flags
EVALUATION_SERVER_IP=unstract-flipt
Expand Down
15 changes: 15 additions & 0 deletions backend/workflow_manager/workflow_v2/dto.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,21 @@ def remove_result_metadata_keys(self, keys_to_remove: list[str] = []) -> None:

self._remove_specific_keys(result=result, keys_to_remove=keys_to_remove)

def remove_result_metrics(self) -> None:
"""Removes the 'metrics' key from the 'result' dictionary within each
'result' dictionary in the 'result' list attribute of the instance.
"""
if not isinstance(self.result, list):
return

for item in self.result:
if not isinstance(item, dict):
continue

result = item.get("result")
if isinstance(result, dict):
result.pop("metrics", None)

def _remove_specific_keys(self, result: dict, keys_to_remove: list[str]) -> None:
"""Removes specified keys from the 'metadata' dictionary within the
provided 'result' dictionary. If 'keys_to_remove' is empty, the
Expand Down
Loading

0 comments on commit 2f84d14

Please sign in to comment.