Skip to content

Commit 9d44878

Browse files
committed
Add Temperature
1 parent 9cc9839 commit 9d44878

File tree

2 files changed

+25
-18
lines changed

2 files changed

+25
-18
lines changed

src/Plugins/BotSharp.Plugin.GoogleAI/Providers/ChatCompletionProvider.cs

Lines changed: 21 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,12 @@
11
using BotSharp.Abstraction.Agents;
22
using BotSharp.Abstraction.Agents.Enums;
3-
using BotSharp.Abstraction.Conversations;
43
using BotSharp.Abstraction.Loggers;
54
using BotSharp.Abstraction.Functions.Models;
65
using BotSharp.Abstraction.Routing;
76
using BotSharp.Plugin.GoogleAI.Settings;
87
using LLMSharp.Google.Palm;
98
using Microsoft.Extensions.Logging;
10-
using System.Diagnostics.Metrics;
11-
using static System.Net.Mime.MediaTypeNames;
9+
using LLMSharp.Google.Palm.DiscussService;
1210

1311
namespace BotSharp.Plugin.GoogleAI.Providers;
1412

@@ -39,19 +37,25 @@ public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> con
3937

4038
var client = new GooglePalmClient(apiKey: _settings.PaLM.ApiKey);
4139

42-
var (prompt, messages) = PrepareOptions(agent, conversations);
40+
var (prompt, messages, hasFunctions) = PrepareOptions(agent, conversations);
4341

4442
RoleDialogModel msg;
4543

46-
if (messages == null)
44+
if (hasFunctions)
4745
{
4846
// use text completion
49-
var response = client.GenerateTextAsync(prompt, null).Result;
47+
// var response = client.GenerateTextAsync(prompt, null).Result;
48+
var response = client.ChatAsync(new PalmChatCompletionRequest
49+
{
50+
Context = prompt,
51+
Messages = messages,
52+
Temperature = 0.1f
53+
}).Result;
5054

5155
var message = response.Candidates.First();
5256

5357
// check if returns function calling
54-
var llmResponse = message.Output.JsonContent<FunctionCallingResponse>();
58+
var llmResponse = message.Content.JsonContent<FunctionCallingResponse>();
5559

5660
msg = new RoleDialogModel(llmResponse.Role, llmResponse.Content)
5761
{
@@ -79,13 +83,14 @@ public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> con
7983
Task.WaitAll(hooks.Select(hook =>
8084
hook.AfterGenerated(msg, new TokenStatsModel
8185
{
86+
Prompt = prompt,
8287
Model = _model
8388
})).ToArray());
8489

8590
return msg;
8691
}
8792

88-
private (string, List<PalmChatMessage>) PrepareOptions(Agent agent, List<RoleDialogModel> conversations)
93+
private (string, List<PalmChatMessage>, bool) PrepareOptions(Agent agent, List<RoleDialogModel> conversations)
8994
{
9095
var prompt = "";
9196

@@ -99,6 +104,9 @@ public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> con
99104
var routing = _services.GetRequiredService<IRoutingService>();
100105
var router = routing.Router;
101106

107+
var messages = conversations.Select(c => new PalmChatMessage(c.Content, c.Role == AgentRole.User ? "user" : "AI"))
108+
.ToList();
109+
102110
if (agent.Functions != null && agent.Functions.Count > 0)
103111
{
104112
prompt += "\r\n\r\n[Functions] defined in JSON Schema:\r\n";
@@ -118,13 +126,13 @@ public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> con
118126

119127
prompt += "\r\n\r\n" + router.Templates.FirstOrDefault(x => x.Name == "response_with_function").Content;
120128

121-
return (prompt, null);
129+
return (prompt, new List<PalmChatMessage>
130+
{
131+
new PalmChatMessage("Which function should be used for the next step based on latest user or function response, output your response in JSON:", AgentRole.User),
132+
}, true);
122133
}
123134

124-
var messages = conversations.Select(c => new PalmChatMessage(c.Content, c.Role == AgentRole.User ? "user" : "AI"))
125-
.ToList();
126-
127-
return (prompt, messages);
135+
return (prompt, messages, false);
128136
}
129137

130138
public Task<bool> GetChatCompletionsAsync(Agent agent, List<RoleDialogModel> conversations, Func<RoleDialogModel, Task> onMessageReceived, Func<RoleDialogModel, Task> onFunctionExecuting)
Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,9 @@
1+
[Output Requirements]
12
1. Read the [Functions] definition, you can utilize the function to retrieve data or execute actions.
2-
2. Think step by step, check if specific function will provider data to help complete user request based on the [Conversation].
3+
2. Think step by step, check if specific function will provider data to help complete user request based on the conversation.
34
3. If you need to call a function to decide how to response user,
45
response in format: {"role": "function", "reason":"why choose this function", "function_name": "", "args": {}},
56
otherwise response in format: {"role": "assistant", "reason":"why response to user", "content":"next step question"}.
6-
4. If the [Conversation] already contains the function execution result, don't need to call it again.
7+
4. If the conversation already contains the function execution result, don't need to call it again.
78
5. If user mentioned some specific requirment, don't ask this question in your response.
8-
6. Don't repeat the same question in your response.
9-
10-
Which function should be used for the next step based on latest user's response, output your response in JSON:
9+
6. Don't repeat the same question in your response.

0 commit comments

Comments
 (0)