|
| 1 | +use serde::{Deserialize, Serialize}; |
| 2 | + |
| 3 | +use crate::managed_agents::resolve_command; |
| 4 | + |
| 5 | +#[derive(Debug, Deserialize)] |
| 6 | +#[serde(rename_all = "camelCase")] |
| 7 | +pub struct ChatMessage { |
| 8 | + pub role: String, |
| 9 | + pub content: String, |
| 10 | +} |
| 11 | + |
| 12 | +#[derive(Debug, Serialize)] |
| 13 | +#[serde(rename_all = "camelCase")] |
| 14 | +pub struct ChatResponse { |
| 15 | + pub content: String, |
| 16 | +} |
| 17 | + |
| 18 | +/// Send messages to an LLM for the persona creator chat. |
| 19 | +/// |
| 20 | +/// Uses goose - the app's primary agent runtime - which resolves |
| 21 | +/// provider, model, and credentials from its own config. |
| 22 | +#[tauri::command] |
| 23 | +pub async fn persona_creator_chat( |
| 24 | + system_prompt: String, |
| 25 | + messages: Vec<ChatMessage>, |
| 26 | +) -> Result<ChatResponse, String> { |
| 27 | + let goose_path = resolve_command("goose", None).ok_or_else(|| { |
| 28 | + "No LLM runtime found. Install goose to use the AI persona creator.".to_string() |
| 29 | + })?; |
| 30 | + |
| 31 | + goose_chat(goose_path, system_prompt, messages).await |
| 32 | +} |
| 33 | + |
| 34 | +/// Format the conversation history as a single text prompt for goose. |
| 35 | +/// |
| 36 | +/// For single-turn (one user message), returns the message content directly. |
| 37 | +/// For multi-turn, includes prior exchanges as context so the LLM can continue |
| 38 | +/// the conversation coherently. |
| 39 | +fn format_conversation_prompt(messages: &[ChatMessage]) -> String { |
| 40 | + if messages.len() <= 1 { |
| 41 | + return messages |
| 42 | + .first() |
| 43 | + .map(|m| m.content.clone()) |
| 44 | + .unwrap_or_default(); |
| 45 | + } |
| 46 | + |
| 47 | + let mut parts = Vec::with_capacity(messages.len()); |
| 48 | + for (i, msg) in messages.iter().enumerate() { |
| 49 | + if i < messages.len() - 1 { |
| 50 | + let label = if msg.role == "assistant" { |
| 51 | + "Assistant" |
| 52 | + } else { |
| 53 | + "User" |
| 54 | + }; |
| 55 | + parts.push(format!("{label}: {}", msg.content)); |
| 56 | + } |
| 57 | + } |
| 58 | + |
| 59 | + let history = parts.join("\n\n"); |
| 60 | + let last = &messages[messages.len() - 1].content; |
| 61 | + |
| 62 | + format!( |
| 63 | + "Here is our conversation so far:\n\n{history}\n\n---\n\nNow respond to this message:\n\n{last}" |
| 64 | + ) |
| 65 | +} |
| 66 | + |
| 67 | +/// Run a one-shot LLM completion through goose. |
| 68 | +async fn goose_chat( |
| 69 | + goose_path: std::path::PathBuf, |
| 70 | + system_prompt: String, |
| 71 | + messages: Vec<ChatMessage>, |
| 72 | +) -> Result<ChatResponse, String> { |
| 73 | + let prompt_text = format_conversation_prompt(&messages); |
| 74 | + |
| 75 | + let output = tokio::task::spawn_blocking(move || { |
| 76 | + std::process::Command::new(&goose_path) |
| 77 | + .args([ |
| 78 | + "run", |
| 79 | + "-t", |
| 80 | + &prompt_text, |
| 81 | + "--system", |
| 82 | + &system_prompt, |
| 83 | + "--no-session", |
| 84 | + "--no-profile", |
| 85 | + "--max-turns", |
| 86 | + "1", |
| 87 | + "-q", |
| 88 | + "--output-format", |
| 89 | + "json", |
| 90 | + ]) |
| 91 | + .stdin(std::process::Stdio::null()) |
| 92 | + .stdout(std::process::Stdio::piped()) |
| 93 | + .stderr(std::process::Stdio::piped()) |
| 94 | + .output() |
| 95 | + .map_err(|e| format!("failed to spawn goose: {e}")) |
| 96 | + }) |
| 97 | + .await |
| 98 | + .map_err(|e| format!("goose task failed: {e}"))? |
| 99 | + .map_err(|e: String| e)?; |
| 100 | + |
| 101 | + if !output.status.success() { |
| 102 | + let stderr = String::from_utf8_lossy(&output.stderr); |
| 103 | + return Err(format!( |
| 104 | + "goose exited with {}: {}", |
| 105 | + output.status.code().unwrap_or(-1), |
| 106 | + stderr.chars().take(500).collect::<String>() |
| 107 | + )); |
| 108 | + } |
| 109 | + |
| 110 | + let response: serde_json::Value = serde_json::from_slice(&output.stdout) |
| 111 | + .map_err(|e| format!("failed to parse goose JSON: {e}"))?; |
| 112 | + |
| 113 | + // Extract the last assistant message's text content. |
| 114 | + let content = response["messages"] |
| 115 | + .as_array() |
| 116 | + .and_then(|msgs| { |
| 117 | + msgs.iter() |
| 118 | + .rev() |
| 119 | + .find(|m| m["role"].as_str() == Some("assistant")) |
| 120 | + }) |
| 121 | + .and_then(|msg| msg["content"].as_array()) |
| 122 | + .and_then(|blocks| { |
| 123 | + blocks |
| 124 | + .iter() |
| 125 | + .find(|b| b["type"].as_str() == Some("text")) |
| 126 | + .and_then(|b| b["text"].as_str()) |
| 127 | + }) |
| 128 | + .unwrap_or("") |
| 129 | + .to_string(); |
| 130 | + |
| 131 | + if content.is_empty() { |
| 132 | + return Err("goose returned no assistant response".to_string()); |
| 133 | + } |
| 134 | + |
| 135 | + Ok(ChatResponse { content }) |
| 136 | +} |
0 commit comments