Skip to content

Commit

Permalink
Fix prompt bug, tell LLM to start a conversation more often, don't lo…
Browse files Browse the repository at this point in the history
…ad saved chats from storage (just keep in RAM)
  • Loading branch information
kercre123 committed Jul 29, 2024
1 parent 7bf4c71 commit 63f9d28
Show file tree
Hide file tree
Showing 5 changed files with 9 additions and 33 deletions.
21 changes: 0 additions & 21 deletions chipper/pkg/vars/vars.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@ var (
VoskModelPath string = "../vosk/models/"
WhisperModelPath string = "../whisper.cpp/models/"
SessionCertPath string = "./session-certs/"
SavedChatsPath string = "./openaiChats.json"
VersionFile string = "./version"
)

Expand Down Expand Up @@ -180,7 +179,6 @@ func Init() {
ServerConfigPath = join(podDir, "./certs/server_config.json")
Certs = join(podDir, "./certs")
SessionCertPath = join(podDir, SessionCertPath)
SavedChatsPath = join(podDir, SavedChatsPath)
if runtime.GOOS == "android" {
VersionFile = AndroidPath + "/static/version"
}
Expand Down Expand Up @@ -232,9 +230,6 @@ func Init() {
// load api config (config.go)
ReadConfig()

// load openai chats
LoadChats()

// check models folder, add all models to DownloadedVoskModels
if APIConfig.STT.Service == "vosk" {
GetDownloadedVoskModels()
Expand Down Expand Up @@ -420,22 +415,6 @@ func AddToRInfo(esn string, id string, ip string) {
RecurringInfo = append(RecurringInfo, rinfo)
}

func SaveChats() {
marshalled, err := json.Marshal(RememberedChats)
if err != nil {
logger.Println(err)
}
os.WriteFile(SavedChatsPath, marshalled, 0777)
}

func LoadChats() {
file, err := os.ReadFile(SavedChatsPath)
if err != nil {
return
}
json.Unmarshal(file, &RememberedChats)
}

func GetRobot(esn string) (*vector.Vector, error) {
var guid string
var target string
Expand Down
1 change: 0 additions & 1 deletion chipper/pkg/wirepod/config-ws/webserver.go
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,6 @@ func handleIsRunning(w http.ResponseWriter) {
}

func handleDeleteChats(w http.ResponseWriter) {
os.Remove(vars.SavedChatsPath)
vars.RememberedChats = []vars.RememberedChat{}
fmt.Fprint(w, "done")
}
Expand Down
2 changes: 0 additions & 2 deletions chipper/pkg/wirepod/ttr/kgsim.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,12 +36,10 @@ func PlaceChat(chat vars.RememberedChat) {
for i, achat := range vars.RememberedChats {
if achat.ESN == chat.ESN {
vars.RememberedChats[i] = chat
vars.SaveChats()
return
}
}
vars.RememberedChats = append(vars.RememberedChats, chat)
vars.SaveChats()
}

// remember last 16 lines of chat
Expand Down
16 changes: 8 additions & 8 deletions chipper/pkg/wirepod/ttr/kgsim_cmds.go
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ var ValidLLMCommands []LLMCommand = []LLMCommand{
},
{
Command: "newVoiceRequest",
Description: "Starts a new voice command from the robot. Use this if you want more input from the user/if you want to carry out a conversation. You are the only one who can end it in this case. This goes at the end of your response, if you use it.",
Description: "Starts a new voice command from the robot. Use this if you want more input from the user after your response/if you want to carry out a conversation. This goes at the end of your response, if you use it.",
ParamChoices: "now",
Action: ActionNewRequest,
SupportedModels: []string{"all"},
Expand Down Expand Up @@ -158,13 +158,13 @@ func CreatePrompt(origPrompt string, model string, isKG bool) string {
promptAppendage := "\n\nCommand Name: " + cmd.Command + "\nDescription: " + cmd.Description + "\nParameter choices: " + cmd.ParamChoices
prompt = prompt + promptAppendage
}
if isKG && vars.APIConfig.Knowledge.SaveChat {
promptAppentage := "\n\nNOTE: You are in 'conversation' mode. If you ask the user a question, use newVoiceRequest. If you don't, you should end the conversation by not using it."
prompt = prompt + promptAppentage
} else {
promptAppentage := "\n\nNOTE: You are NOT in 'conversation' mode. Refrain from asking the user any questions and from using newVoiceRequest."
prompt = prompt + promptAppentage
}
}
if isKG && vars.APIConfig.Knowledge.SaveChat {
promptAppentage := "\n\nNOTE: You are in 'conversation' mode. If you ask the user a question near the end of your response, you MUST use newVoiceRequest. If you decide you want to end the conversation, you should not use it."
prompt = prompt + promptAppentage
} else {
promptAppentage := "\n\nNOTE: You are NOT in 'conversation' mode. Refrain from asking the user any questions and from using newVoiceRequest."
prompt = prompt + promptAppentage
}
}
if os.Getenv("DEBUG_PRINT_PROMPT") == "true" {
Expand Down
2 changes: 1 addition & 1 deletion chipper/webroot/setup.html
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ <h3>Knowledge Graph Setup</h3>
<input type="checkbox" id="saveChatYes" name="saveChatselect" />
<label class="checkbox-label" for="saveChatYes">
Enable conversations via "I have a question". This also allows previous chats to be used in the
context of future responses.
context of future responses. LLM actions (the box above this one) must be enabled for conversations to work.
</label></br>
<a href="#" onclick="deleteSavedChats()">Delete Saved Chats</a>
</span>
Expand Down

0 comments on commit 63f9d28

Please sign in to comment.