Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remixai chat #5241

Merged
merged 36 commits into from
Oct 26, 2024
Merged
Show file tree
Hide file tree
Changes from 32 commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
edfb3bd
initial chat
STetsing Sep 27, 2024
1e68ae0
initiall
STetsing Sep 30, 2024
118d66e
nlux chat working local
STetsing Oct 2, 2024
abeac7d
linked explain to remix ai output
STetsing Oct 2, 2024
4c843e9
directed all logs to chat element
STetsing Oct 7, 2024
ad70e5e
styled the chat interface
STetsing Oct 10, 2024
39e89ab
turned remote to live
STetsing Oct 10, 2024
52408f3
minor
STetsing Oct 14, 2024
1348d1f
minor fix docs
STetsing Oct 14, 2024
2695aaf
Merge branch 'master' into remixai__chat
STetsing Oct 14, 2024
1743146
api ai desktop
STetsing Oct 14, 2024
8c11a35
Merge branch 'master' into remixai__chat
LianaHus Oct 15, 2024
2b0f7a2
cleanup and new send button on remixai chat
STetsing Oct 15, 2024
0d0dbd1
Merge branch 'remixai__chat' of https://github.com/ethereum/remix-pro…
STetsing Oct 15, 2024
5b69872
fixed lint
STetsing Oct 15, 2024
b6eeacf
enforced insertion on completion
STetsing Oct 16, 2024
917f562
minor
STetsing Oct 16, 2024
9d782ea
minor
STetsing Oct 17, 2024
d2d68ce
Merge branch 'master' into remixai__chat
STetsing Oct 17, 2024
bfa5eda
enable forward stability
STetsing Oct 17, 2024
6e5ed32
Merge branch 'remixai__chat' of https://github.com/ethereum/remix-pro…
STetsing Oct 17, 2024
707699b
fixed streaming error leaving out some text in chat UI
STetsing Oct 18, 2024
b237be8
matomo back in
STetsing Oct 18, 2024
e89e938
off rexix AI when starting
STetsing Oct 21, 2024
dd061e5
minor
STetsing Oct 22, 2024
2d4fbb5
enable remixai remote on desktop
STetsing Oct 22, 2024
8968ce6
remixai remote inference working on desktop
STetsing Oct 23, 2024
51c4fbd
improving completion process
STetsing Oct 24, 2024
f964671
improving completion process
STetsing Oct 24, 2024
e33392f
improving completion process
STetsing Oct 24, 2024
5f0dc4d
enabled completion at string begin
STetsing Oct 24, 2024
0f0809e
Merge branch 'master' into remixai__chat
STetsing Oct 24, 2024
d52765b
Update remixAIPlugin.tsx rm setting to side panel
STetsing Oct 24, 2024
a18460d
Merge branch 'master' into remixai__chat
STetsing Oct 25, 2024
09db2ad
Merge branch 'master' into remixai__chat
STetsing Oct 25, 2024
f5e116e
Merge branch 'master' into remixai__chat
Aniket-Engg Oct 26, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 3 additions & 6 deletions apps/circuit-compiler/src/app/components/container.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -73,16 +73,14 @@ export function Container () {
full circom error: ${JSON.stringify(report, null, 2)}
explain why the error occurred and how to fix it.
`
// @ts-ignore
await circuitApp.plugin.call('remixAI', 'error_explaining', message)
await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
} else {
const message = `
error message: ${error}
full circom error: ${JSON.stringify(report, null, 2)}
explain why the error occurred and how to fix it.
`
// @ts-ignore
await circuitApp.plugin.call('remixAI', 'error_explaining', message)
await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
}
} else {
const error = report.message
Expand All @@ -91,8 +89,7 @@ export function Container () {
full circom error: ${JSON.stringify(report, null, 2)}
explain why the error occurred and how to fix it.
`
// @ts-ignore
await circuitApp.plugin.call('remixAI', 'error_explaining', message)
await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
}
}

Expand Down
2 changes: 1 addition & 1 deletion apps/remix-ide/src/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -557,7 +557,7 @@ class AppComponent {
await this.appManager.activatePlugin(['solidity-script', 'remix-templates'])

if (isElectron()) {
await this.appManager.activatePlugin(['isogit', 'electronconfig', 'electronTemplates', 'xterm', 'ripgrep', 'appUpdater', 'slither', 'foundry', 'hardhat', 'remixAID'])
await this.appManager.activatePlugin(['isogit', 'electronconfig', 'electronTemplates', 'xterm', 'ripgrep', 'appUpdater', 'slither', 'foundry', 'hardhat']) // 'remixAID'
}

this.appManager.on(
Expand Down
138 changes: 96 additions & 42 deletions apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
Original file line number Diff line number Diff line change
@@ -1,49 +1,77 @@
import * as packageJson from '../../../../../package.json'
import { ViewPlugin } from '@remixproject/engine-web'
import { Plugin } from '@remixproject/engine';
import { RemixAITab } from '@remix-ui/remix-ai'
import React from 'react';
import { ICompletions, IModel, RemoteInferencer, IRemoteModel } from '@remix/remix-ai-core';
import { RemixAITab, ChatApi } from '@remix-ui/remix-ai'
import React, { useCallback } from 'react';
import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, CodeExplainAgent } from '@remix/remix-ai-core';
import { CustomRemixApi } from '@remix-api'

type chatRequestBufferT<T> = {
[key in keyof T]: T[key]
}

const profile = {
name: 'remixAI',
displayName: 'Remix AI',
methods: ['code_generation', 'code_completion',
"solidity_answer", "code_explaining",
"code_insertion", "error_explaining",
"initialize"],
"initialize", 'chatPipe', 'ProcessChatRequestBuffer', 'isChatRequestPending'],
events: [],
icon: 'assets/img/remix-logo-blue.png',
description: 'RemixAI provides AI services to Remix IDE.',
kind: '',
// location: 'sidePanel',
location: 'sidePanel',
documentation: 'https://remix-ide.readthedocs.io/en/latest/remixai.html',
version: packageJson.version,
maintainedBy: 'Remix'
}

export class RemixAIPlugin extends Plugin {
// add Plugin<any, CustomRemixApi>
export class RemixAIPlugin extends ViewPlugin {
isOnDesktop:boolean = false
aiIsActivated:boolean = false
readonly remixDesktopPluginName = 'remixAID'
remoteInferencer:RemoteInferencer = null
isInferencing: boolean = false
chatRequestBuffer: chatRequestBufferT<any> = null
agent: CodeExplainAgent
useRemoteInferencer:boolean = false

constructor(inDesktop:boolean) {
super(profile)
this.isOnDesktop = inDesktop

this.agent = new CodeExplainAgent(this)
// user machine dont use ressource for remote inferencing
}

onActivation(): void {
this.initialize(null, null, null, false)
if (this.isOnDesktop) {
console.log('Activating RemixAIPlugin on desktop')
// this.on(this.remixDesktopPluginName, 'activated', () => {
this.useRemoteInferencer = true
this.initialize(null, null, null, this.useRemoteInferencer);
// })
} else {
console.log('Activating RemixAIPlugin on browser')
this.useRemoteInferencer = true
this.initialize()
}
// this.setRemixAIOnSidePannel(false)
}

setRemixAIOnSidePannel(resize:boolean=false){
if (resize){
this.call('sidePanel', 'pinView', profile)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if and else are doing the same thing.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Waiting for some answers from David to resize the sidepanel

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we will remove it from there anyway, you don't need to do this @STetsing


} else {
this.call('sidePanel', 'pinView', profile)
}
}

async initialize(model1?:IModel, model2?:IModel, remoteModel?:IRemoteModel, useRemote?:boolean){
if (this.isOnDesktop) {
if (this.isOnDesktop && !this.useRemoteInferencer) {
// on desktop use remote inferencer -> false
console.log('initialize on desktop')
const res = await this.call(this.remixDesktopPluginName, 'initializeModelBackend', useRemote, model1, model2)
if (res) {
this.on(this.remixDesktopPluginName, 'onStreamResult', (value) => {
Expand All @@ -60,7 +88,6 @@ export class RemixAIPlugin extends Plugin {
}

} else {
// on browser
this.remoteInferencer = new RemoteInferencer(remoteModel?.apiUrl, remoteModel?.completionUrl)
this.remoteInferencer.event.on('onInference', () => {
this.isInferencing = true
Expand All @@ -80,90 +107,117 @@ export class RemixAIPlugin extends Plugin {
return
}

if (this.isOnDesktop) {
if (this.isOnDesktop && !this.useRemoteInferencer) {
return await this.call(this.remixDesktopPluginName, 'code_generation', prompt)
} else {
return await this.remoteInferencer.code_generation(prompt)
}
}

async code_completion(prompt: string, promptAfter: string): Promise<any> {
if (this.isOnDesktop) {
if (this.isOnDesktop && !this.useRemoteInferencer) {
return await this.call(this.remixDesktopPluginName, 'code_completion', prompt, promptAfter)
} else {
return await this.remoteInferencer.code_completion(prompt, promptAfter)
}
}

async solidity_answer(prompt: string): Promise<any> {
async solidity_answer(prompt: string, params: IParams=GenerationParams): Promise<any> {
if (this.isInferencing) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" })
return
}

this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })

const newPrompt = await this.agent.chatCommand(prompt)
let result
if (this.isOnDesktop) {
result = await this.call(this.remixDesktopPluginName, 'solidity_answer', prompt)
if (this.isOnDesktop && !this.useRemoteInferencer) {
result = await this.call(this.remixDesktopPluginName, 'solidity_answer', newPrompt)
} else {
result = await this.remoteInferencer.solidity_answer(prompt)
result = await this.remoteInferencer.solidity_answer(newPrompt)
}
if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
// this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
return result
}

async code_explaining(prompt: string): Promise<any> {
async code_explaining(prompt: string, context: string, params: IParams=GenerationParams): Promise<any> {
if (this.isInferencing) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" })
return
}

this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })

let result
if (this.isOnDesktop) {
result = await this.call(this.remixDesktopPluginName, 'code_explaining', prompt)
if (this.isOnDesktop && !this.useRemoteInferencer) {
result = await this.call(this.remixDesktopPluginName, 'code_explaining', prompt, context, params)

} else {
result = await this.remoteInferencer.code_explaining(prompt)
result = await this.remoteInferencer.code_explaining(prompt, context, params)
}
if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
// this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
return result
}

async error_explaining(prompt: string): Promise<any> {
async error_explaining(prompt: string, context: string="", params: IParams=GenerationParams): Promise<any> {
if (this.isInferencing) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" })
return
}

this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })

let result
if (this.isOnDesktop) {
if (this.isOnDesktop && !this.useRemoteInferencer) {
result = await this.call(this.remixDesktopPluginName, 'error_explaining', prompt)
} else {
result = await this.remoteInferencer.error_explaining(prompt)
result = await this.remoteInferencer.error_explaining(prompt, params)
}
if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
// this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
return result
}

async code_insertion(msg_pfx: string, msg_sfx: string): Promise<any> {
if (this.isOnDesktop) {
if (this.isOnDesktop && !this.useRemoteInferencer) {
return await this.call(this.remixDesktopPluginName, 'code_insertion', msg_pfx, msg_sfx)
} else {
return await this.remoteInferencer.code_insertion(msg_pfx, msg_sfx)
}
}

// render() {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

need those?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This goes out from this PR

// return (
// <RemixAITab plugin={this}></RemixAITab>
// )
// }
chatPipe(fn, prompt: string, context?: string, pipeMessage?: string){
if (this.chatRequestBuffer == null){
this.chatRequestBuffer = {
fn_name: fn,
prompt: prompt,
context: context
}
if (pipeMessage) ChatApi.composer.send(pipeMessage)
else {
if (fn === "code_explaining") ChatApi.composer.send("Explain the current code")
else if (fn === "error_explaining") ChatApi.composer.send("Explain the error")
else if (fn === "solidity_answer") ChatApi.composer.send("Answer the following question")
else console.log("chatRequestBuffer is not empty. First process the last request.")
}
}
else {
console.log("chatRequestBuffer is not empty. First process the last request.")
}
}

async ProcessChatRequestBuffer(params:IParams=GenerationParams){
if (this.chatRequestBuffer != null){
const result = this[this.chatRequestBuffer.fn_name](this.chatRequestBuffer.prompt, this.chatRequestBuffer.context, params)
this.chatRequestBuffer = null
return result
}
else {
console.log("chatRequestBuffer is empty.")
return ""
}
}
isChatRequestPending(){
return this.chatRequestBuffer != null
}

render() {
return (
<RemixAITab plugin={this}></RemixAITab>
)
}
}
5 changes: 3 additions & 2 deletions apps/remix-ide/src/app/tabs/locales/en/editor.json
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,9 @@
"editor.explainFunction": "Explain this function",
"editor.explainFunctionSol": "Explain this code",
"editor.explainFunction2": "Explain the function \"{name}\"",
"editor.explainFunctionByAI": "solidity code: {content}\n Explain the function {currentFunction}",
"editor.explainFunctionByAISol": "solidity code: {content}\n Explain the function {currentFunction}",
"editor.explainFunctionByAI": "```\n{content}\n```\nExplain the function {currentFunction}",
"editor.explainFunctionByAISol": "```\n{content}\n```\nExplain the function {currentFunction}",
"editor.ExplainPipeMessage": "```\n {content}\n```\nExplain the snipped above",
"editor.executeFreeFunction": "Run a free function",
"editor.executeFreeFunction2": "Run the free function \"{name}\"",
"editor.toastText1": "This can only execute free function",
Expand Down
1 change: 0 additions & 1 deletion apps/remix-ide/src/remixAppManager.js
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,6 @@ let requiredModules = [
// 'doc-gen',
'remix-templates',
'remixAID',
'remixAI',
'solhint',
'dgit',
'pinnedPanel',
Expand Down
14 changes: 9 additions & 5 deletions apps/remixdesktop/src/lib/InferenceServerManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -404,8 +404,12 @@ export class InferenceManager implements ICompletions {
}
, responseType: 'stream' });

const userPrompt = payload[Object.keys(payload)[0]]
const userPrompt = payload.prompt
let resultText = ""
if (payload.return_stream_response) {
return response
}

response.data.on('data', (chunk: Buffer) => {
try {
const parsedData = JSON.parse(chunk.toString());
Expand Down Expand Up @@ -449,14 +453,14 @@ export class InferenceManager implements ICompletions {
}
}

async code_completion(context: any, params:IParams=CompletionParams): Promise<any> {
async code_completion(prompt, promptAfter, params:IParams=CompletionParams): Promise<any> {
if (!this.isReady) {
console.log('model not ready yet')
return
}

// as of now no prompt required
const payload = { context_code: context, ...params }
const payload = { prompt, 'context':promptAfter, ...params }
return this._makeInferenceRequest('code_completion', payload, AIRequestType.COMPLETION)
}

Expand Down Expand Up @@ -484,9 +488,9 @@ export class InferenceManager implements ICompletions {
return
}
if (params.stream_result) {
return this._streamInferenceRequest('code_explaining', { code, context, ...params })
return this._streamInferenceRequest('code_explaining', { prompt: code, context, ...params })
} else {
return this._makeInferenceRequest('code_explaining', { code, context, ...params }, AIRequestType.GENERAL)
return this._makeInferenceRequest('code_explaining', { prompt: code, context, ...params }, AIRequestType.GENERAL)
}
}

Expand Down
5 changes: 3 additions & 2 deletions apps/remixdesktop/src/plugins/remixAIDektop.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ class RemixAIDesktopPluginClient extends ElectronBasePluginClient {

async onActivation(): Promise<void> {
this.onload(() => {
this.emit('activated')
})
}

Expand Down Expand Up @@ -81,9 +82,9 @@ class RemixAIDesktopPluginClient extends ElectronBasePluginClient {
return true
}

async code_completion(context: any) {
async code_completion(prompt: string, promptAfter: string) {
// use general purpose model
return this.desktopInferencer.code_completion(context)
return this.desktopInferencer.code_completion(prompt, promptAfter)
}

async code_insertion(msg_pfx: string, msg_sfx: string) {
Expand Down
10 changes: 5 additions & 5 deletions apps/vyper/src/app/utils/remix-client.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import { ExampleContract } from '../components/VyperResult'
import EventEmitter from 'events'
import { CustomRemixApi } from '@remix-api'


export type VyperComplierAddress = 'https://vyper2.remixproject.org/' | 'http://localhost:8000/'
export class RemixClient extends PluginClient<any, CustomRemixApi> {
private client = createClient<Api, Readonly<RemixApi>>(this)
Expand Down Expand Up @@ -67,11 +68,10 @@ export class RemixClient extends PluginClient<any, CustomRemixApi> {
}
try {
// TODO: remove! no formatting required since already handled on server
const formattedMessage = `
${message}
can you explain why this error occurred and how to fix it?
`
await this.client.call('remixAI' as any, 'error_explaining', message)
const file = await this.client.call('fileManager', 'getCurrentFile')
const content = await this.client.call('fileManager', 'readFile', file)
const messageAI = `Vyper code: ${content}\n error message: ${message}\n explain why the error occurred and how to fix it.`
await this.client.call('remixAI' as any, 'chatPipe', 'error_explaining', messageAI)
} catch (err) {
console.error('unable to askGpt')
console.error(err)
Expand Down
Loading
Loading