Skip to content

Commit

Permalink
initial function calling skeleton
Browse files Browse the repository at this point in the history
  • Loading branch information
Yiyun-Liang committed Jun 27, 2024
1 parent 2e6e62e commit 36ac1cf
Show file tree
Hide file tree
Showing 3 changed files with 142 additions and 0 deletions.
119 changes: 119 additions & 0 deletions python/sglang/backend/openai.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import dataclasses
import inspect
import json
import logging
import time
import warnings
Expand Down Expand Up @@ -229,6 +231,123 @@ def spec_pattern_match(self, comp):
return False
return True

def function_calling(
self,
s: StreamExecutor,
tools: List[str],
tool_choice: str,
):
# chat model vs. non chat model
# stream vs non stream
if self.model_name not in [
"gpt-4o",
"gpt-4o-2024-05-13",
"gpt-4-turbo",
"gpt-4-turbo-2024-04-09",
"gpt-4-turbo-preview",
"gpt-4-0125-preview",
"gpt-4-1106-preview",
"gpt-4",
"gpt-4-0613",
"gpt-3.5-turbo",
"gpt-3.5-turbo-0125",
"gpt-3.5-turbo-1106",
"gpt-3.5-turbo-0613",
]:
raise RuntimeError(
"This model currently does not support function calling."
)

def convert_param_type(type):
if type == "int" or type == "integer":
return "integer"
if type == "str" or type == "string":
return "string"
return type

def function_to_json_schema(func):
signature = inspect.signature(func)
parameters = signature.parameters
func_schema = {
"type": "function",
"function": {
"name": func.__name__,
"parameters": {
"type": "object",
"properties": {
param.name: {
"type": convert_param_type(
str(param.annotation)
.replace("<class '", "")
.replace("'>", "")
)
}
for param in parameters.values()
},
},
},
}
return func_schema

tools_to_use = []
if self.tools:
tools_to_use = [
function_to_json_schema(tool_to_use) for tool_to_use in self.tools
]
tool_choice = "auto"
if self.tool_choice:
tool_choice = (
self.tool_choice
if self.tool_choice in ["auto", "required", "none"]
else {"type": "function", "function": {"name": self.tool_choice}}
)

# should we append "Never mention what tools you use." or provide a system prompt input argument
messages = s.text_
comp = openai_completion(
client=self.client,
token_usage=self.token_usage,
is_chat=self.is_chat_model,
model=self.model_name,
prompt=messages,
tools=tools_to_use,
tool_choice=tool_choice,
**self.spec_kwargs,
)
response_message = comp.choices[0].message
tool_calls = response_message.tool_calls
# Check if the model wanted to call a function
if tool_calls:
# Call the function
# Note: the JSON response may not always be valid; be sure to handle errors
available_functions = ()
for tool_name in tools:
available_functions.append({tool_name: globals()[tool_name]})
messages.append(response_message)
# Send the info for each function call and function response to the model
for tool_call in tool_calls:
function_name = tool_call.function.name
function_to_call = available_functions[function_name]
function_args = json.loads(tool_call.function.arguments)
function_response = function_to_call(**function_args)
messages.append(
{
"tool_call_id": tool_call.id,
"role": "tool",
"name": function_name,
"content": function_response,
}
)
comp = openai_completion(
client=self.client,
token_usage=self.token_usage,
is_chat=self.is_chat_model,
model=self.model_name,
prompt=s.text_,
**self.spec_kwargs,
)
s.text_ += comp

def role_end_generate(
self,
s: StreamExecutor,
Expand Down
10 changes: 10 additions & 0 deletions python/sglang/lang/interpreter.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
SglFunction,
SglGen,
SglImage,
SglFuncCall,
SglRoleBegin,
SglRoleEnd,
SglSelect,
Expand Down Expand Up @@ -367,6 +368,8 @@ def _execute(self, other):
elif isinstance(other, SglExprList):
for x in other.expr_list:
self._execute(x)
elif isinstance(other, SglFuncCall):
self._execute_func_call(other)
elif isinstance(other, SglRoleBegin):
self._execute_role_begin(other)
elif isinstance(other, SglRoleEnd):
Expand Down Expand Up @@ -488,6 +491,7 @@ def find_stop():
def _execute_gen(self, expr: SglGen):
sampling_params = self._resolve_sampling_params(expr.sampling_params)
name = expr.name
print("0-", self)

if not self.stream:
if self.num_api_spec_tokens is None:
Expand All @@ -510,11 +514,14 @@ def _execute_gen(self, expr: SglGen):
else: # Speculative execution on models with completion interface
comp, meta_info = self._spec_gen(sampling_params)

print("1-", comp)
self.text_ += comp
print("2-", self.text_)

self.variables[name] = comp
self.meta_info[name] = meta_info
self.variable_event[name].set()
print("3-", self.variables[name], name, self.meta_info[name])
else:
assert (
self.num_api_spec_tokens is None
Expand Down Expand Up @@ -554,6 +561,9 @@ def _execute_select(self, expr: SglSelect):
self.variable_event[name].set()
self.text_ += decision

def _execute_func_call(self, expr: SglFuncCall):
self.backend.function_calling(self, expr.tools, expr.tool_choice)

def _execute_variable(self, expr: SglVariable):
src_executor = expr.source_stream_executor
value = src_executor.get_var(expr.name)
Expand Down
13 changes: 13 additions & 0 deletions python/sglang/lang/ir.py
Original file line number Diff line number Diff line change
Expand Up @@ -424,6 +424,19 @@ def __repr__(self):
return f"Select({self.name}, choices={self.choices})"


class SglFuncCall(SglExpr):
def __init__(self, name, tools, tool_choice):
super().__init__()
self.name = name
self.tools = tools
self.tool_choice = tool_choice

def __repr__(self):
return (
f"FuncCall({self.name}, tools={self.tools}, tool_choice={self.tool_choice})"
)


class SglFork(SglExpr):
def __init__(self, number, position_ids_offset=None):
super().__init__()
Expand Down

0 comments on commit 36ac1cf

Please sign in to comment.