From 1dceec3d9124c915c83571de919558f46e82404d Mon Sep 17 00:00:00 2001 From: Brad Dwyer Date: Wed, 27 Nov 2024 20:26:38 -0600 Subject: [PATCH 1/3] Don't require API Keys for Public Workflows --- inference/core/entities/requests/workflows.py | 3 ++- inference/core/roboflow_api.py | 8 +++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/inference/core/entities/requests/workflows.py b/inference/core/entities/requests/workflows.py index 79587d132..d9605c9e6 100644 --- a/inference/core/entities/requests/workflows.py +++ b/inference/core/entities/requests/workflows.py @@ -8,7 +8,8 @@ class WorkflowInferenceRequest(BaseModel): - api_key: str = Field( + api_key: Optional[str] = Field( + default=None, description="Roboflow API Key that will be passed to the model during initialization for artifact retrieval", ) inputs: Dict[str, Any] = Field( diff --git a/inference/core/roboflow_api.py b/inference/core/roboflow_api.py index 63a53aeba..f3dbcb39d 100644 --- a/inference/core/roboflow_api.py +++ b/inference/core/roboflow_api.py @@ -397,7 +397,7 @@ def get_workflow_cache_file( ) -> str: sanitized_workspace_id = sanitize_path_segment(workspace_id) sanitized_workflow_id = sanitize_path_segment(workflow_id) - api_key_hash = hashlib.md5(api_key.encode("utf-8")).hexdigest() + api_key_hash = hashlib.md5(api_key.encode("utf-8")).hexdigest() if api_key else "None" prefix = os.path.abspath(os.path.join(MODEL_CACHE_DIR, "workflow")) result = os.path.abspath( os.path.join( @@ -570,7 +570,7 @@ def _prepare_workflow_response_cache_key( workspace_id: WorkspaceID, workflow_id: str, ) -> str: - api_key_hash = hashlib.md5(api_key.encode("utf-8")).hexdigest() + api_key_hash = hashlib.md5(api_key.encode("utf-8")).hexdigest() if api_key else "None" return f"workflow_definition:{workspace_id}:{workflow_id}:{api_key_hash}" @@ -594,7 +594,9 @@ def _add_params_to_url(url: str, params: List[Tuple[str, str]]) -> str: if len(params) == 0: return url params_chunks = [ - f"{name}={urllib.parse.quote_plus(value)}" for name, value in params + f"{name}={urllib.parse.quote_plus(value)}" + for name, value in params + if value is not None ] parameters_string = "&".join(params_chunks) return f"{url}?{parameters_string}" From dd6869b983ae3ccc91346ecec25cea1a60a5c753 Mon Sep 17 00:00:00 2001 From: Brad Dwyer Date: Wed, 27 Nov 2024 20:42:27 -0600 Subject: [PATCH 2/3] Fix style --- inference/core/roboflow_api.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/inference/core/roboflow_api.py b/inference/core/roboflow_api.py index f3dbcb39d..aff33158d 100644 --- a/inference/core/roboflow_api.py +++ b/inference/core/roboflow_api.py @@ -397,7 +397,9 @@ def get_workflow_cache_file( ) -> str: sanitized_workspace_id = sanitize_path_segment(workspace_id) sanitized_workflow_id = sanitize_path_segment(workflow_id) - api_key_hash = hashlib.md5(api_key.encode("utf-8")).hexdigest() if api_key else "None" + api_key_hash = ( + hashlib.md5(api_key.encode("utf-8")).hexdigest() if api_key else "None" + ) prefix = os.path.abspath(os.path.join(MODEL_CACHE_DIR, "workflow")) result = os.path.abspath( os.path.join( @@ -570,7 +572,9 @@ def _prepare_workflow_response_cache_key( workspace_id: WorkspaceID, workflow_id: str, ) -> str: - api_key_hash = hashlib.md5(api_key.encode("utf-8")).hexdigest() if api_key else "None" + api_key_hash = ( + hashlib.md5(api_key.encode("utf-8")).hexdigest() if api_key else "None" + ) return f"workflow_definition:{workspace_id}:{workflow_id}:{api_key_hash}" From 7471b9cfe4dcbe5f676ef7df62f43ab35423682c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20P=C4=99czek?= Date: Thu, 28 Nov 2024 11:00:33 +0100 Subject: [PATCH 3/3] Apply changes after review of PR --- inference/core/roboflow_api.py | 33 +++++++++++++++++++-------------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/inference/core/roboflow_api.py b/inference/core/roboflow_api.py index aff33158d..b58b03eaf 100644 --- a/inference/core/roboflow_api.py +++ b/inference/core/roboflow_api.py @@ -393,12 +393,14 @@ def get_roboflow_labeling_jobs( def get_workflow_cache_file( - workspace_id: WorkspaceID, workflow_id: str, api_key: str + workspace_id: WorkspaceID, workflow_id: str, api_key: Optional[str] ) -> str: sanitized_workspace_id = sanitize_path_segment(workspace_id) sanitized_workflow_id = sanitize_path_segment(workflow_id) api_key_hash = ( - hashlib.md5(api_key.encode("utf-8")).hexdigest() if api_key else "None" + hashlib.md5(api_key.encode("utf-8")).hexdigest() + if api_key is not None + else "None" ) prefix = os.path.abspath(os.path.join(MODEL_CACHE_DIR, "workflow")) result = os.path.abspath( @@ -416,7 +418,7 @@ def get_workflow_cache_file( def cache_workflow_response( - workspace_id: WorkspaceID, workflow_id: str, api_key: str, response: dict + workspace_id: WorkspaceID, workflow_id: str, api_key: Optional[str], response: dict ): workflow_cache_file = get_workflow_cache_file( workspace_id=workspace_id, @@ -433,7 +435,7 @@ def cache_workflow_response( def delete_cached_workflow_response_if_exists( workspace_id: WorkspaceID, workflow_id: str, - api_key: str, + api_key: Optional[str], ) -> None: workflow_cache_file = get_workflow_cache_file( workspace_id=workspace_id, @@ -447,7 +449,7 @@ def delete_cached_workflow_response_if_exists( def load_cached_workflow_response( workspace_id: WorkspaceID, workflow_id: str, - api_key: str, + api_key: Optional[str], ) -> Optional[dict]: workflow_cache_file = get_workflow_cache_file( workspace_id=workspace_id, @@ -469,7 +471,7 @@ def load_cached_workflow_response( @wrap_roboflow_api_errors() def get_workflow_specification( - api_key: str, + api_key: Optional[str], workspace_id: WorkspaceID, workflow_id: str, use_cache: bool = True, @@ -485,9 +487,12 @@ def get_workflow_specification( ) if cached_entry: return cached_entry + params = [] + if api_key is not None: + params.append(("api_key", api_key)) api_url = _add_params_to_url( url=f"{API_BASE_URL}/{workspace_id}/workflows/{workflow_id}", - params=[("api_key", api_key)], + params=params, ) try: response = _get_from_url(url=api_url) @@ -535,7 +540,7 @@ def get_workflow_specification( def _retrieve_workflow_specification_from_ephemeral_cache( - api_key: str, + api_key: Optional[str], workspace_id: WorkspaceID, workflow_id: str, ephemeral_cache: BaseCache, @@ -549,7 +554,7 @@ def _retrieve_workflow_specification_from_ephemeral_cache( def _cache_workflow_specification_in_ephemeral_cache( - api_key: str, + api_key: Optional[str], workspace_id: WorkspaceID, workflow_id: str, specification: dict, @@ -568,12 +573,14 @@ def _cache_workflow_specification_in_ephemeral_cache( def _prepare_workflow_response_cache_key( - api_key: str, + api_key: Optional[str], workspace_id: WorkspaceID, workflow_id: str, ) -> str: api_key_hash = ( - hashlib.md5(api_key.encode("utf-8")).hexdigest() if api_key else "None" + hashlib.md5(api_key.encode("utf-8")).hexdigest() + if api_key is not None + else "None" ) return f"workflow_definition:{workspace_id}:{workflow_id}:{api_key_hash}" @@ -598,9 +605,7 @@ def _add_params_to_url(url: str, params: List[Tuple[str, str]]) -> str: if len(params) == 0: return url params_chunks = [ - f"{name}={urllib.parse.quote_plus(value)}" - for name, value in params - if value is not None + f"{name}={urllib.parse.quote_plus(value)}" for name, value in params ] parameters_string = "&".join(params_chunks) return f"{url}?{parameters_string}"