From 60ea5692aae1f2592d7a7f3c80434e850ae341af Mon Sep 17 00:00:00 2001 From: Paul Irish Date: Sun, 2 Jun 2024 14:06:01 -0700 Subject: [PATCH] gemi tweaks --- fish/aliases.fish | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/fish/aliases.fish b/fish/aliases.fish index d56e09c3a..aa690369d 100644 --- a/fish/aliases.fish +++ b/fish/aliases.fish @@ -139,14 +139,26 @@ alias brew_update="brew -v update; brew upgrade --force-bottle --cleanup; brew c alias update_brew_npm_gem='brew_update; npm install npm -g; npm update -g; sudo gem update --system; sudo gem update --no-document' -abbr gemini "llm -m gemini-1.5-pro-latest" - function gemi # using https://github.com/simonw/llm-gemini and llm - if test -n "$argv[1]" - llm prompt -m gemini-1.5-pro-latest $argv[1] | deno run --allow-env --allow-read --allow-run bin/render-streaming-markdown.ts - else + # no args? chat. otherwise use prompt, and allow unquoted stuff to work too + # gemi + # gemi tell me a joke + # gemi "tell me a joke" + if test -z "$argv[1]" + # no markdown parsing here without some real fancy stuff. because you dont want to send to markdown renderer (glow) inbetween backticks, etc. llm chat --continue -m gemini-1.5-pro-latest + else + llm prompt -m gemini-1.5-pro-latest "$argv" && echo "⬇️… and now rendered…⬇️" && llm logs -r | glow + end +end + +function openai + # using llm. same dealio as above + if test -z "$argv[1]" + llm chat --continue -m gpt-4o + else + llm prompt -m gpt-4o "$argv" && echo "⬇️… and now rendered…⬇️" && llm logs -r | glow end end