Skip to content

Commit

Permalink
Update llm_wrapper.llm_response objects to handle tool calls. Clean u…
Browse files Browse the repository at this point in the history
…p deprecated methods.
  • Loading branch information
uogbuji committed Jun 24, 2024
1 parent 0c07111 commit 95fb488
Showing 1 changed file with 9 additions and 41 deletions.
50 changes: 9 additions & 41 deletions pylib/llm_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
'''

import os
import json
import asyncio
import concurrent.futures
from functools import partial
Expand Down Expand Up @@ -70,8 +71,14 @@ def from_openai_chat(response):
if 'message' in c:
c['message'] = llm_response(c['message'])
rc1 = resp['choices'][0]
# print(f'from_openai_chat: {rc1 =}')
resp['first_choice_text'] = rc1['text'] if 'text' in rc1 else rc1['message']['content']
# No response message content if a tool call is invoked
if 'tool_calls' in rc1['message']:
# Why the hell does OpenAI have these arguments properties as plain text? Seems like a massive layering violation
for tc in rc1['message']['tool_calls']:
tc['function']['arguments_obj'] = json.loads(tc['function']['arguments'])
else:
resp['first_choice_text'] = rc1['text'] if 'text' in rc1 else rc1['message']['content']
print('GRIPPO', f'from_openai_chat: {rc1 =}')
else:
resp['first_choice_text'] = resp['content']
return resp
Expand Down Expand Up @@ -244,19 +251,6 @@ def available_models(self) -> List[str]:
raise RuntimeError(f'Unexpected response from {self.base_url}/models:\n{repr(resp)}')
return [ i['id'] for i in resp['data'] ]

@staticmethod
def first_choice_text(response):
'''
Given an OpenAI-compatible API simple completion response, return the first choice text
'''
warnings.warn('The first_choice_text method is deprecated; use the first_choice_text attribute or key instead', DeprecationWarning, stacklevel=2) # noqa E501
try:
return response.choices[0].text
except AttributeError:
raise RuntimeError(
f'''Response does not appear to be an OpenAI API completion structure, as expected:
{repr(response)}''')


class openai_chat_api(openai_api):
'''
Expand Down Expand Up @@ -322,19 +316,6 @@ async def __call__(self, prompt, api_func=None, **kwargs):
# Haven't implemented any OpenAI API calls that are async, so just call the sync version
return self.call(prompt, api_func, **kwargs)

@staticmethod
def first_choice_message(response):
'''
Given an OpenAI-compatible API chat completion response, return the first choice message content
'''
warnings.warn('The first_choice_message method is deprecated; use the first_choice_text attribute or key instead', DeprecationWarning, stacklevel=2) # noqa E501
try:
return response.choices[0].message.content
except AttributeError:
raise RuntimeError(
f'''Response does not appear to be an OpenAI API chat-style completion structure, as expected:
{repr(response)}''')


class llama_cpp_http(llm_wrapper):
'''
Expand Down Expand Up @@ -465,19 +446,6 @@ async def __call__(self, messages, req='/v1/chat/completions', timeout=30.0, api
else:
raise RuntimeError(f'Unexpected response from {self.base_url}{req}:\n{repr(result)}')

@staticmethod
def first_choice_message(response):
'''
Given an OpenAI-compatible API chat completion response, return the first choice message content
'''
warnings.warn('The first_choice_message method is deprecated; use the first_choice_text attribute or key instead', DeprecationWarning, stacklevel=2) # noqa E501
try:
return response['choices'][0]['message']['content']
except (IndexError, KeyError):
raise RuntimeError(
f'''Response does not appear to be a llama.cpp API chat-style completion structure, as expected:
{repr(response)}''')


class ctransformer:
'''
Expand Down

0 comments on commit 95fb488

Please sign in to comment.