Skip to content

Commit

Permalink
Merge pull request #263 from parea-ai/fix-openai-tracing-on-evals
Browse files Browse the repository at this point in the history
fix old openai function calling parser, handle parent trace id in logger
  • Loading branch information
jalexanderII committed Dec 26, 2023
2 parents 92dce7d + a3bfb38 commit 227231d
Show file tree
Hide file tree
Showing 7 changed files with 44 additions and 16 deletions.
10 changes: 8 additions & 2 deletions parea/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,27 +35,33 @@ def __attrs_post_init__(self):
_init_parea_wrapper(logger_all_possible, self.cache)

def completion(self, data: Completion) -> CompletionResponse:
parent_trace_id = get_current_trace_id()
inference_id = gen_trace_id()
data.inference_id = inference_id
data.parent_trace_id = parent_trace_id or inference_id

r = self._client.request(
"POST",
COMPLETION_ENDPOINT,
data=asdict(data),
)
if parent_trace_id := get_current_trace_id():
if parent_trace_id:
trace_data.get()[parent_trace_id].children.append(inference_id)
logger_record_log(parent_trace_id)
return CompletionResponse(**r.json())

async def acompletion(self, data: Completion) -> CompletionResponse:
parent_trace_id = get_current_trace_id()
inference_id = gen_trace_id()
data.inference_id = inference_id
data.parent_trace_id = parent_trace_id or inference_id

r = await self._client.request_async(
"POST",
COMPLETION_ENDPOINT,
data=asdict(data),
)
if parent_trace_id := get_current_trace_id():
if parent_trace_id:
trace_data.get()[parent_trace_id].children.append(inference_id)
logger_record_log(parent_trace_id)
return CompletionResponse(**r.json())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ def format_docs(docs):


def main():
rag_chain.invoke("What is Task Decomposition?", config={"callbacks": [PareaAILangchainTracer()]})
response = rag_chain.invoke("What is Task Decomposition?", config={"callbacks": [PareaAILangchainTracer()]})
print(response)


if __name__ == "__main__":
Expand Down
9 changes: 4 additions & 5 deletions parea/cookbook/tracing_with_openai_with_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,11 +163,10 @@ def provide_user_specific_recommendations(user_input, user_id, functions) -> tup
temperature=0,
functions=functions,
)

if "message" in response.choices[0] and "function_call" in response.choices[0]["message"]:
function_call = response.choices[0]["message"]["function_call"]
if function_call["name"] == "call_google_places_api":
place_type = json.loads(function_call["arguments"])["place_type"]
if response.choices[0].message.function_call:
function_call = response.choices[0].message.function_call
if function_call.name == "call_google_places_api":
place_type = json.loads(function_call.arguments)["place_type"]
places = call_google_places_api(user_id, place_type, food_preference)
if places: # If the list of places is not empty
return f"Here are some places you might be interested in: {' '.join(places)}", trace_id
Expand Down
2 changes: 1 addition & 1 deletion parea/evals/rag/answer_context_faithfulness_binary.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
def answer_context_faithfulness_binary_factory(
question_field: Optional[str] = "question",
context_field: Optional[str] = "context",
model: Optional[str] = "gpt-4",
model: Optional[str] = "gpt-3.5-turbo-16k",
) -> Callable[[Log], float]:
"""Quantifies how much the generated answer can be inferred from the retrieved context."""

Expand Down
1 change: 1 addition & 0 deletions parea/schemas/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
@define
class Completion:
inference_id: Optional[str] = None
parent_trace_id: Optional[str] = None
trace_name: Optional[str] = None
llm_inputs: Optional[dict[str, Any]] = None
llm_configuration: LLMInputs = LLMInputs()
Expand Down
33 changes: 27 additions & 6 deletions parea/wrapper/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,12 +246,33 @@ def _get_output(result: Any) -> str:

@staticmethod
def _format_function_call(response_message) -> str:
function_name = response_message["function_call"]["name"]
if isinstance(response_message["function_call"]["arguments"], OpenAIObject):
function_args = dict(response_message["function_call"]["arguments"])
else:
function_args = json.loads(response_message["function_call"]["arguments"])
return json.dumps({"name": function_name, "arguments": function_args}, indent=4)
def clean_json_string(s):
"""If OpenAI responds with improper newlines and multiple quotes, this will clean it up"""
return json.dumps(s.replace("'", '"').replace("\\n", "\\\\n"))

if openai_version.startswith("0."):
function_name = response_message["function_call"]["name"]
if isinstance(response_message["function_call"]["arguments"], OpenAIObject):
function_args = dict(response_message["function_call"]["arguments"])
else:
function_args = json.loads(response_message["function_call"]["arguments"])
return json.dumps({"name": function_name, "arguments": function_args}, indent=4)

func_obj = response_message.function_call or response_message.tool_calls
calls = []
if not isinstance(func_obj, list):
func_obj = [func_obj]

for call in func_obj:
if call:
body = getattr(call, "function", None) or call
function_name = body.name
try:
function_args = json.loads(body.arguments)
except json.decoder.JSONDecodeError:
function_args = json.loads(clean_json_string(body.arguments))
calls.append(json.dumps({"name": function_name, "arguments": function_args}, indent=4))
return "\n".join(calls)

@staticmethod
def get_model_cost(model_name: str, is_completion: bool = False) -> float:
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "parea-ai"
packages = [{ include = "parea" }]
version = "0.2.25"
version = "0.2.26a0"
description = "Parea python sdk"
readme = "README.md"
authors = ["joel-parea-ai <[email protected]>"]
Expand Down

0 comments on commit 227231d

Please sign in to comment.