From edfb3bda09d9006a92e3a77804d397c736281289 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Fri, 27 Sep 2024 15:42:52 +0200
Subject: [PATCH 01/27] initial chat
---
.../src/app/plugins/remixAIPlugin.tsx | 14 +-
apps/remix-ide/src/remixAppManager.js | 1 -
.../src/inferencers/remote/remoteInference.ts | 2 +-
.../remix-ai/src/lib/components/Default.tsx | 182 +++++++++++-------
package.json | 3 +
yarn.lock | 17 ++
6 files changed, 138 insertions(+), 81 deletions(-)
diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
index 0fd0917d082..52f236f1593 100644
--- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
+++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
@@ -16,13 +16,13 @@ const profile = {
icon: 'assets/img/remix-logo-blue.png',
description: 'RemixAI provides AI services to Remix IDE.',
kind: '',
- // location: 'sidePanel',
+ location: 'sidePanel',
documentation: 'https://remix-ide.readthedocs.io/en/latest/remixai.html',
version: packageJson.version,
maintainedBy: 'Remix'
}
-export class RemixAIPlugin extends Plugin {
+export class RemixAIPlugin extends ViewPlugin {
isOnDesktop:boolean = false
aiIsActivated:boolean = false
readonly remixDesktopPluginName = 'remixAID'
@@ -169,9 +169,9 @@ export class RemixAIPlugin extends Plugin {
}
}
- // render() {
- // return (
- //
- // )
- // }
+ render() {
+ return (
+
+ )
+ }
}
diff --git a/apps/remix-ide/src/remixAppManager.js b/apps/remix-ide/src/remixAppManager.js
index 9dab012bd55..1f080ad7ff2 100644
--- a/apps/remix-ide/src/remixAppManager.js
+++ b/apps/remix-ide/src/remixAppManager.js
@@ -77,7 +77,6 @@ let requiredModules = [ // services + layout views + system views
'doc-gen',
'remix-templates',
'remixAID',
- 'remixAI',
'solhint',
'dgit',
'pinnedPanel',
diff --git a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
index 638b35422b2..0c37cf751ac 100644
--- a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
+++ b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
@@ -10,7 +10,7 @@ export class RemoteInferencer implements ICompletions {
api_url: string
completion_url: string
max_history = 7
- model_op = RemoteBackendOPModel.DEEPSEEK // default model operation change this to llama if necessary
+ model_op = RemoteBackendOPModel.CODELLAMA // default model operation change this to llama if necessary
event: EventEmitter
constructor(apiUrl?:string, completionUrl?:string) {
diff --git a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
index 59239309ed5..b5e9291fb14 100644
--- a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
+++ b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
@@ -1,84 +1,122 @@
import React, { useContext, useEffect, useState } from 'react'
import '../remix-ai.css'
-import { DefaultModels } from '@remix/remix-ai-core';
+import { DefaultModels, GenerationParams } from '@remix/remix-ai-core';
+import { StreamSend, StreamingAdapterObserver } from '@nlux/react';
+import axios from 'axios';
+import { AiChat, useAsStreamAdapter } from '@nlux/react';
+import '@nlux/themes/nova.css';
+
+import { user, assistantAvatar } from './personas';
+const demoProxyServerUrl = 'https://solcoder.remixproject.org';
export const Default = (props) => {
- const [searchText, setSearchText] = useState('');
- const [resultText, setResultText] = useState('');
- const pluginName = 'remixAI'
- const appendText = (newText) => {
- setResultText(resultText => resultText + newText);
- }
- useEffect(() => {
- const handleResultReady = async (e) => {
- appendText(e);
- };
- if (props.plugin.isOnDesktop ) {
- props.plugin.on(props.plugin.remixDesktopPluginName, 'onStreamResult', (value) => {
- handleResultReady(value);
- })
- }
- }, [])
+ const send: StreamSend = async (
+ prompt: string,
+ observer: StreamingAdapterObserver,
+ ) => {
+ console.log(prompt);
+ const response = await props.plugin.call('remixAI', 'solidity_answer', prompt);
+ observer.next(response);
+ observer.complete();
+};
+ const adapter = useAsStreamAdapter(send, []);
return (
-
-
-
console.log('searchText not implememted')}
- >
-
+
+ );
+};
-
+// useEffect(() => {
+// const handleResultReady = async (e) => {
+// appendText(e);
+// };
+// if (props.plugin.isOnDesktop ) {
+// props.plugin.on(props.plugin.remixDesktopPluginName, 'onStreamResult', (value) => {
+// handleResultReady(value);
+// })
+// }
+// }, [])
-
+// return (
+//
+//
+//
console.log('searchText not implememted')}
+// >
+//
-
-
-
-
- );
-}
\ No newline at end of file
+// onClick={async () => {
+// if (props.plugin.isOnDesktop ) {
+// await props.plugin.call(pluginName, 'downloadModel', DefaultModels()[3]);
+// }
+// }}
+// > Download Model
+
+//
+
+//
+//
+//
+//
+// );
+// }
\ No newline at end of file
diff --git a/package.json b/package.json
index 31668fbca0d..3286dd4f3d1 100644
--- a/package.json
+++ b/package.json
@@ -106,6 +106,9 @@
"@isomorphic-git/lightning-fs": "^4.4.1",
"@metamask/eth-sig-util": "^7.0.2",
"@microlink/react-json-view": "^1.23.0",
+ "@nlux/core": "^2.17.1",
+ "@nlux/react": "^2.17.1",
+ "@nlux/themes": "^2.17.1",
"@openzeppelin/contracts": "^5.0.0",
"@openzeppelin/upgrades-core": "^1.30.0",
"@openzeppelin/wizard": "0.4.0",
diff --git a/yarn.lock b/yarn.lock
index 170ac0f36b1..ce86a032b26 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -5311,6 +5311,23 @@
pathval "1.1.1"
type-detect "4.0.8"
+"@nlux/core@2.17.1", "@nlux/core@^2.17.1":
+ version "2.17.1"
+ resolved "https://registry.yarnpkg.com/@nlux/core/-/core-2.17.1.tgz#18a95e21e5aafae83bf6d515651780497f0f39cc"
+ integrity sha512-hIvOnuENVqWaIg5Co4JtFmHph7Sp0Nj+QixOMdOW9Ou7CjU7HK+maB5koLoayNL64B+wHTtgPN7zBrB8NCSPXw==
+
+"@nlux/react@^2.17.1":
+ version "2.17.1"
+ resolved "https://registry.yarnpkg.com/@nlux/react/-/react-2.17.1.tgz#e4668e7cbe42dd195ea86a02350c8b15cb8f48f0"
+ integrity sha512-/t6qDAHIefg1vGIthLOtkQxbI4Sh/aL7/eqVuhcoC1w/8NqnvVxwfxR0mkshcIVrKSwHI8Yjav5edZ2yeRBqMw==
+ dependencies:
+ "@nlux/core" "2.17.1"
+
+"@nlux/themes@^2.17.1":
+ version "2.17.1"
+ resolved "https://registry.yarnpkg.com/@nlux/themes/-/themes-2.17.1.tgz#f991b1b5fcf9595e59d0abeb76f9997876b44784"
+ integrity sha512-spD3QJBSdkF+q45rQFFsUQcR4pTy3OEjQywEP+yc9dHcuPrxIMb0/W/whwiHn1aePGL758lKQH3E/NRHA4aSAw==
+
"@noble/curves@1.0.0", "@noble/curves@~1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@noble/curves/-/curves-1.0.0.tgz#e40be8c7daf088aaf291887cbc73f43464a92932"
From 1e68ae058a6cab8fcd6adc85c66531d8fc1a768f Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Mon, 30 Sep 2024 18:56:34 +0200
Subject: [PATCH 02/27] initiall
---
.../src/app/plugins/remixAIPlugin.tsx | 12 +-
.../src/lib/InferenceServerManager.ts | 4 +-
.../src/inferencers/remote/remoteInference.ts | 106 +++++++++---------
libs/remix-ai-core/src/types/models.ts | 1 +
libs/remix-ai-core/src/types/types.ts | 2 +-
.../remix-ai/src/lib/components/personas.tsx | 8 ++
.../remix-ai/src/lib/components/send.ts | 47 ++++++++
7 files changed, 123 insertions(+), 57 deletions(-)
create mode 100644 libs/remix-ui/remix-ai/src/lib/components/personas.tsx
create mode 100644 libs/remix-ui/remix-ai/src/lib/components/send.ts
diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
index 52f236f1593..667bbd8f769 100644
--- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
+++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
@@ -3,7 +3,7 @@ import { ViewPlugin } from '@remixproject/engine-web'
import { Plugin } from '@remixproject/engine';
import { RemixAITab } from '@remix-ui/remix-ai'
import React from 'react';
-import { ICompletions, IModel, RemoteInferencer, IRemoteModel } from '@remix/remix-ai-core';
+import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams } from '@remix/remix-ai-core';
const profile = {
name: 'remixAI',
@@ -127,7 +127,8 @@ export class RemixAIPlugin extends ViewPlugin {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" })
return
}
-
+ const params:IParams = GenerationParams
+ params.stream_result = true
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
let result
@@ -135,7 +136,7 @@ export class RemixAIPlugin extends ViewPlugin {
result = await this.call(this.remixDesktopPluginName, 'code_explaining', prompt)
} else {
- result = await this.remoteInferencer.code_explaining(prompt)
+ result = await this.remoteInferencer.code_explaining(prompt, "", params)
}
if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
// this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
@@ -148,13 +149,16 @@ export class RemixAIPlugin extends ViewPlugin {
return
}
+ const params:IParams = GenerationParams
+ params.stream_result = true
+
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
let result
if (this.isOnDesktop) {
result = await this.call(this.remixDesktopPluginName, 'error_explaining', prompt)
} else {
- result = await this.remoteInferencer.error_explaining(prompt)
+ result = await this.remoteInferencer.error_explaining(prompt, params)
}
if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
// this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
diff --git a/apps/remixdesktop/src/lib/InferenceServerManager.ts b/apps/remixdesktop/src/lib/InferenceServerManager.ts
index 65ea23696e8..87d1ae77812 100644
--- a/apps/remixdesktop/src/lib/InferenceServerManager.ts
+++ b/apps/remixdesktop/src/lib/InferenceServerManager.ts
@@ -484,9 +484,9 @@ export class InferenceManager implements ICompletions {
return
}
if (params.stream_result) {
- return this._streamInferenceRequest('code_explaining', { code, context, ...params })
+ return this._streamInferenceRequest('code_explaining', { prompt: code, context, ...params })
} else {
- return this._makeInferenceRequest('code_explaining', { code, context, ...params }, AIRequestType.GENERAL)
+ return this._makeInferenceRequest('code_explaining', { prompt: code, context, ...params }, AIRequestType.GENERAL)
}
}
diff --git a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
index 0c37cf751ac..09c2b1927d1 100644
--- a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
+++ b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
@@ -1,8 +1,10 @@
import { ICompletions, IParams, AIRequestType, RemoteBackendOPModel } from "../../types/types";
+import { GenerationParams, CompletionParams, InsertionParams } from "../../types/models";
import { buildSolgptPromt } from "../../prompts/promptBuilder";
-import axios from "axios";
import EventEmitter from "events";
import { ChatHistory } from "../../prompts/chat";
+import axios, { AxiosResponse } from 'axios';
+import { Readable } from 'stream';
const defaultErrorMessage = `Unable to get a response from AI server`
@@ -12,38 +14,34 @@ export class RemoteInferencer implements ICompletions {
max_history = 7
model_op = RemoteBackendOPModel.CODELLAMA // default model operation change this to llama if necessary
event: EventEmitter
+ test_env=true
constructor(apiUrl?:string, completionUrl?:string) {
- this.api_url = apiUrl!==undefined ? apiUrl: "https://solcoder.remixproject.org"
- this.completion_url = completionUrl!==undefined ? completionUrl : "https://completion.remixproject.org"
+ this.api_url = apiUrl!==undefined ? apiUrl: this.test_env? "http://127.0.0.1:7861/" : "https://solcoder.remixproject.org"
+ this.completion_url = completionUrl!==undefined ? completionUrl : this.test_env? "http://127.0.0.1:7861/" : "https://completion.remixproject.org"
this.event = new EventEmitter()
}
- private async _makeRequest(data, rType:AIRequestType){
+ private async _makeRequest(endpoint, payload, rType:AIRequestType){
this.event.emit("onInference")
- const requesURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
- const userPrompt = data.data[0]
+ let requesURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
+ const userPrompt = payload.prompt
+ console.log(requesURL)
try {
- const result = await axios(requesURL, {
- method: 'POST',
- headers: {
- Accept: 'application/json',
- 'Content-Type': 'application/json',
- },
- data: JSON.stringify(data),
- })
+ const options = { headers: { 'Content-Type': 'application/json', } }
+ const result = await axios.post(`${requesURL}/${endpoint}`, payload, options)
switch (rType) {
case AIRequestType.COMPLETION:
if (result.statusText === "OK")
- return result.data.data[0]
+ return result.data.generatedText
else {
return defaultErrorMessage
}
case AIRequestType.GENERAL:
if (result.statusText === "OK") {
- const resultText = result.data.data[0]
+ const resultText = result.data.generatedText
ChatHistory.pushHistory(userPrompt, resultText)
return resultText
} else {
@@ -54,33 +52,35 @@ export class RemoteInferencer implements ICompletions {
} catch (e) {
ChatHistory.clearHistory()
console.error('Error making request to Inference server:', e.message)
- return e
}
finally {
this.event.emit("onInferenceDone")
}
}
- private async _streamInferenceRequest(data, rType:AIRequestType){
+ private async _streamInferenceRequest(endpoint, payload, rType:AIRequestType){
+ let resultText = ""
try {
this.event.emit('onInference')
- const requesURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
- const userPrompt = data.data[0]
- const response = await axios({
+ const requestURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
+ const userPrompt = payload.prompt
+ const response:AxiosResponse = await axios({
method: 'post',
- url: requesURL,
- data: data,
- headers: { 'Content-Type': 'application/json', "Accept": "text/event-stream" },
- responseType: 'stream'
- });
+ url: `${requestURL}/${endpoint}`,
+ data: payload,
+ headers: {
+ "Content-Type": "application/json",
+ "Accept": "text/event-stream",
+ }
+ , responseType: 'blob' });
- let resultText = ""
response.data.on('data', (chunk: Buffer) => {
try {
const parsedData = JSON.parse(chunk.toString());
if (parsedData.isGenerating) {
this.event.emit('onStreamResult', parsedData.generatedText);
resultText = resultText + parsedData.generatedText
+ console.log("resultText" + resultText)
} else {
// stream generation is complete
resultText = resultText + parsedData.generatedText
@@ -99,43 +99,49 @@ export class RemoteInferencer implements ICompletions {
console.error('Error making stream request to Inference server:', error.message);
}
finally {
+ console.log("end streamin" + resultText)
this.event.emit('onInferenceDone')
}
}
- async code_completion(prompt, options:IParams=null): Promise {
- const payload = !options?
- { "data": [prompt, "code_completion", "", false, 30, 0.9, 0.90, 50]} :
- { "data": [prompt, "code_completion", "", options.stream_result,
- options.max_new_tokens, options.temperature, options.top_p, options.top_k]
- }
-
- return this._makeRequest(payload, AIRequestType.COMPLETION)
+ async code_completion(prompt, options:IParams=CompletionParams): Promise {
+ const payload = { prompt, "endpoint":"code_completion", ...options }
+ if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
+ else return this._makeRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
}
- async code_insertion(msg_pfx, msg_sfx): Promise {
- const payload = { "data":[msg_pfx, "code_insertion", msg_sfx, 1024, 0.5, 0.92, 50]}
- return this._makeRequest(payload, AIRequestType.COMPLETION)
+ async code_insertion(msg_pfx, msg_sfx, options:IParams=InsertionParams): Promise {
+ // const payload = { "data":[msg_pfx, "code_insertion", msg_sfx, 1024, 0.5, 0.92, 50]}
+ const payload = { prompt, "endpoint":"code_insertion", msg_pfx, msg_sfx, ...options }
+ if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
+ else return this._makeRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
}
- async code_generation(prompt): Promise {
- const payload = { "data":[prompt, "code_completion", "", false,1000,0.9,0.92,50]}
- return this._makeRequest(payload, AIRequestType.COMPLETION)
+ async code_generation(prompt, options:IParams=GenerationParams): Promise {
+ // const payload = { "data":[prompt, "code_completion", "", false,1000,0.9,0.92,50]}
+ const payload = { prompt, "endpoint":"code_completion", ...options }
+ if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
+ else return this._makeRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
}
- async solidity_answer(prompt): Promise {
+ async solidity_answer(prompt, options:IParams=GenerationParams): Promise {
const main_prompt = buildSolgptPromt(prompt, this.model_op)
- const payload = { "data":[main_prompt, "solidity_answer", false,2000,0.9,0.8,50]}
- return this._makeRequest(payload, AIRequestType.GENERAL)
+ // const payload = { "data":[main_prompt, "solidity_answer", false,2000,0.9,0.8,50]}
+ const payload = { prompt, "endpoint":"solidity_answer", ...options }
+ if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL)
+ else return this._makeRequest(payload.endpoint, payload, AIRequestType.GENERAL)
}
- async code_explaining(prompt, context:string=""): Promise {
- const payload = { "data":[prompt, "code_explaining", false,2000,0.9,0.8,50, context]}
- return this._makeRequest(payload, AIRequestType.GENERAL)
+ async code_explaining(prompt, context:string="", options:IParams=GenerationParams): Promise {
+ // const payload = { "data":[prompt, "code_explaining", false,2000,0.9,0.8,50, context]}
+ const payload = { prompt, "endpoint":"code_explaining", context, ...options }
+ if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL)
+ else return this._makeRequest(payload.endpoint, payload, AIRequestType.GENERAL)
}
- async error_explaining(prompt): Promise {
- const payload = { "data":[prompt, "error_explaining", false,2000,0.9,0.8,50]}
- return this._makeRequest(payload, AIRequestType.GENERAL)
+ async error_explaining(prompt, options:IParams=GenerationParams): Promise {
+ const payload = { prompt, "endpoint":"error_explaining", ...options }
+ if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload , AIRequestType.GENERAL)
+ else return this._makeRequest(payload.endpoint, payload, AIRequestType.GENERAL)
}
}
diff --git a/libs/remix-ai-core/src/types/models.ts b/libs/remix-ai-core/src/types/models.ts
index e3ed62fe74c..6ddfd4fbbda 100644
--- a/libs/remix-ai-core/src/types/models.ts
+++ b/libs/remix-ai-core/src/types/models.ts
@@ -76,6 +76,7 @@ const GenerationParams:IParams = {
topP: 0.92,
max_new_tokens: 2000,
stream_result: false,
+ repeat_penalty: 1.2,
}
export { DefaultModels, CompletionParams, InsertionParams, GenerationParams }
\ No newline at end of file
diff --git a/libs/remix-ai-core/src/types/types.ts b/libs/remix-ai-core/src/types/types.ts
index fe7ac5469d1..aefb72d6225 100644
--- a/libs/remix-ai-core/src/types/types.ts
+++ b/libs/remix-ai-core/src/types/types.ts
@@ -58,7 +58,7 @@ export interface IParams {
temperature?: number;
max_new_tokens?: number;
repetition_penalty?: number;
- repeatPenalty?:any
+ repeat_penalty?:any
no_repeat_ngram_size?: number;
num_beams?: number;
num_return_sequences?: number;
diff --git a/libs/remix-ui/remix-ai/src/lib/components/personas.tsx b/libs/remix-ui/remix-ai/src/lib/components/personas.tsx
new file mode 100644
index 00000000000..850fee3ac78
--- /dev/null
+++ b/libs/remix-ui/remix-ai/src/lib/components/personas.tsx
@@ -0,0 +1,8 @@
+import { PersonaOptions, UserPersona } from '@nlux/react';
+
+export const user: UserPersona = {
+ name: 'Pipper',
+ avatar: 'assets/img/remix-logo-blue.png'
+};
+
+export const assistantAvatar = 'assets/img/remi-prof.webp';
diff --git a/libs/remix-ui/remix-ai/src/lib/components/send.ts b/libs/remix-ui/remix-ai/src/lib/components/send.ts
new file mode 100644
index 00000000000..cd40b2c40fa
--- /dev/null
+++ b/libs/remix-ui/remix-ai/src/lib/components/send.ts
@@ -0,0 +1,47 @@
+
+// const demoProxyServerUrl = 'https://solcoder.remixproject.org';
+
+// export const send: StreamSend = async (
+// prompt: string,
+// observer: StreamingAdapterObserver,
+// plugin: any,
+// ) => {
+// const body = {"data": [prompt, 'solidity_answer', false,2000,0.9,0.8,50]};
+// const response = await axios(demoProxyServerUrl, {
+// method: 'POST',
+// headers: {'Content-Type': 'application/json'},
+// data: JSON.stringify(body),
+// });
+
+// console.log(plugin);
+// const result = await plugin.call('remixAI', 'solidity_answer', prompt);
+
+// if (response.status !== 200) {
+// observer.error(new Error('Failed to connect to the server'));
+// return;
+// }
+
+// if (response.statusText !== "OK") {
+// return;
+// }
+
+// // Read a stream of server-sent events
+// // and feed them to the observer as they are being generated
+// // const reader = response.body.getReader();
+// // const textDecoder = new TextDecoder();
+
+// // while (true) {
+// // const {value, done} = await reader.read();
+// // if (done) {
+// // break;
+// // }
+
+// // const content = textDecoder.decode(value);
+// // if (content) {
+// // observer.next(content);
+// // }
+// // }
+
+// observer.next(response.data.data[0]);
+// observer.complete();
+// };
From 118d66e8429ec81c50622e2c29a280b3cd28242c Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Wed, 2 Oct 2024 10:56:22 +0200
Subject: [PATCH 03/27] nlux chat working local
---
.../src/app/plugins/remixAIPlugin.tsx | 14 ++--
libs/remix-ai-core/src/index.ts | 5 +-
.../src/inferencers/remote/remoteInference.ts | 72 +++++++++++--------
.../src/prompts/promptBuilder.ts | 11 ++-
libs/remix-ai-core/src/types/models.ts | 1 +
libs/remix-ai-core/src/types/types.ts | 38 ++++++++++
.../remix-ai/src/lib/components/Default.tsx | 49 ++++++++++---
7 files changed, 139 insertions(+), 51 deletions(-)
diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
index 667bbd8f769..d3f86ec153d 100644
--- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
+++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
@@ -103,7 +103,7 @@ export class RemixAIPlugin extends ViewPlugin {
}
}
- async solidity_answer(prompt: string): Promise {
+ async solidity_answer(prompt: string, params: IParams=GenerationParams): Promise {
if (this.isInferencing) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" })
return
@@ -117,28 +117,26 @@ export class RemixAIPlugin extends ViewPlugin {
} else {
result = await this.remoteInferencer.solidity_answer(prompt)
}
- if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
+ if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
// this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
return result
}
- async code_explaining(prompt: string): Promise {
+ async code_explaining(prompt: string, context: string, params: IParams=GenerationParams): Promise {
if (this.isInferencing) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" })
return
}
- const params:IParams = GenerationParams
- params.stream_result = true
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
let result
if (this.isOnDesktop) {
- result = await this.call(this.remixDesktopPluginName, 'code_explaining', prompt)
+ result = await this.call(this.remixDesktopPluginName, 'code_explaining', prompt, context, params)
} else {
- result = await this.remoteInferencer.code_explaining(prompt, "", params)
+ result = await this.remoteInferencer.code_explaining(prompt, context, params)
}
- if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
+ if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
// this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
return result
}
diff --git a/libs/remix-ai-core/src/index.ts b/libs/remix-ai-core/src/index.ts
index 54130d7827e..e61998f3e0e 100644
--- a/libs/remix-ai-core/src/index.ts
+++ b/libs/remix-ai-core/src/index.ts
@@ -17,4 +17,7 @@ export {
getCompletionPrompt, getInsertionPrompt, IStreamResponse, buildSolgptPromt,
RemoteInferencer, InsertionParams, CompletionParams, GenerationParams,
ChatEntry, AIRequestType, RemoteBackendOPModel, ChatHistory, downloadLatestReleaseExecutable
-}
\ No newline at end of file
+}
+
+export * from './types/types'
+export * from './helpers/streamHandler'
\ No newline at end of file
diff --git a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
index 09c2b1927d1..f5e1ee7c7ef 100644
--- a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
+++ b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
@@ -1,13 +1,15 @@
-import { ICompletions, IParams, AIRequestType, RemoteBackendOPModel } from "../../types/types";
+import { ICompletions, IParams, AIRequestType, RemoteBackendOPModel, JsonStreamParser } from "../../types/types";
import { GenerationParams, CompletionParams, InsertionParams } from "../../types/models";
import { buildSolgptPromt } from "../../prompts/promptBuilder";
import EventEmitter from "events";
import { ChatHistory } from "../../prompts/chat";
import axios, { AxiosResponse } from 'axios';
import { Readable } from 'stream';
+import { StreamingAdapterObserver } from '@nlux/react';
const defaultErrorMessage = `Unable to get a response from AI server`
+
export class RemoteInferencer implements ICompletions {
api_url: string
completion_url: string
@@ -24,8 +26,7 @@ export class RemoteInferencer implements ICompletions {
private async _makeRequest(endpoint, payload, rType:AIRequestType){
this.event.emit("onInference")
- let requesURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
- const userPrompt = payload.prompt
+ const requesURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
console.log(requesURL)
try {
@@ -42,7 +43,7 @@ export class RemoteInferencer implements ICompletions {
case AIRequestType.GENERAL:
if (result.statusText === "OK") {
const resultText = result.data.generatedText
- ChatHistory.pushHistory(userPrompt, resultText)
+ ChatHistory.pushHistory(payload.prompt, resultText)
return resultText
} else {
return defaultErrorMessage
@@ -63,47 +64,62 @@ export class RemoteInferencer implements ICompletions {
try {
this.event.emit('onInference')
const requestURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
- const userPrompt = payload.prompt
- const response:AxiosResponse = await axios({
- method: 'post',
- url: `${requestURL}/${endpoint}`,
- data: payload,
+ const response = await fetch(`${requestURL}/${endpoint}`, {
+ method: 'POST',
headers: {
- "Content-Type": "application/json",
+ 'Content-Type': 'application/json',
"Accept": "text/event-stream",
- }
- , responseType: 'blob' });
+ },
+ body: JSON.stringify(payload),
+ });
+
+ if (payload.return_stream_response) {
+ return response
+ }
+
+ const reader = response.body!.getReader();
+ const decoder = new TextDecoder();
+ const parser = new JsonStreamParser();
+
+ while (true) {
+ const { done, value } = await reader.read();
+ if (done) break;
- response.data.on('data', (chunk: Buffer) => {
try {
- const parsedData = JSON.parse(chunk.toString());
- if (parsedData.isGenerating) {
- this.event.emit('onStreamResult', parsedData.generatedText);
- resultText = resultText + parsedData.generatedText
- console.log("resultText" + resultText)
- } else {
- // stream generation is complete
- resultText = resultText + parsedData.generatedText
- ChatHistory.pushHistory(userPrompt, resultText)
- return parsedData.generatedText
+ console.log("value" + decoder.decode(value))
+ const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(decoder.decode(value, { stream: true }));
+
+ for (const parsedData of chunk) {
+ if (parsedData.isGenerating) {
+ this.event.emit('onStreamResult', parsedData.generatedText);
+ resultText = resultText + parsedData.generatedText
+ } else {
+ // stream generation is complete
+ resultText = resultText + parsedData.generatedText
+ ChatHistory.pushHistory(payload.prompt, resultText)
+ return parsedData.generatedText
+ }
}
} catch (error) {
console.error('Error parsing JSON:', error);
ChatHistory.clearHistory()
}
- });
+ }
- return "" // return empty string for now as handled in event
+ return resultText
} catch (error) {
ChatHistory.clearHistory()
console.error('Error making stream request to Inference server:', error.message);
}
finally {
- console.log("end streamin" + resultText)
+ console.log("end streaming\n" + resultText)
this.event.emit('onInferenceDone')
}
}
+
+
+
async code_completion(prompt, options:IParams=CompletionParams): Promise {
const payload = { prompt, "endpoint":"code_completion", ...options }
if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
@@ -112,7 +128,7 @@ export class RemoteInferencer implements ICompletions {
async code_insertion(msg_pfx, msg_sfx, options:IParams=InsertionParams): Promise {
// const payload = { "data":[msg_pfx, "code_insertion", msg_sfx, 1024, 0.5, 0.92, 50]}
- const payload = { prompt, "endpoint":"code_insertion", msg_pfx, msg_sfx, ...options }
+ const payload = {"endpoint":"code_insertion", msg_pfx, msg_sfx, ...options, prompt: '' }
if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
else return this._makeRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
}
@@ -127,7 +143,7 @@ export class RemoteInferencer implements ICompletions {
async solidity_answer(prompt, options:IParams=GenerationParams): Promise {
const main_prompt = buildSolgptPromt(prompt, this.model_op)
// const payload = { "data":[main_prompt, "solidity_answer", false,2000,0.9,0.8,50]}
- const payload = { prompt, "endpoint":"solidity_answer", ...options }
+ const payload = { 'prompt': main_prompt, "endpoint":"solidity_answer", ...options }
if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL)
else return this._makeRequest(payload.endpoint, payload, AIRequestType.GENERAL)
}
diff --git a/libs/remix-ai-core/src/prompts/promptBuilder.ts b/libs/remix-ai-core/src/prompts/promptBuilder.ts
index 0fab3837c9c..fb917a6ee6a 100644
--- a/libs/remix-ai-core/src/prompts/promptBuilder.ts
+++ b/libs/remix-ai-core/src/prompts/promptBuilder.ts
@@ -2,7 +2,7 @@ import { RemoteBackendOPModel } from "../types/types"
import { ChatHistory } from "./chat"
export const PromptBuilder = (inst, answr, modelop) => {
- if (modelop === RemoteBackendOPModel.CODELLAMA) return `<|start_header_id|>user<|end_header_id|>${inst}<|eot_id|><|start_header_id|>assistant<|end_header_id|> ${answr}`
+ if (modelop === RemoteBackendOPModel.CODELLAMA) return `<|eot_id|>\n<|start_header_id|>user<|end_header_id|>${inst}<|eot_id|>\n<|start_header_id|>assistant<|end_header_id|> ${answr}\n`
if (modelop === RemoteBackendOPModel.DEEPSEEK) return "\n### INSTRUCTION:\n" + inst + "\n### RESPONSE:\n" + answr
if (modelop === RemoteBackendOPModel.MISTRAL) return ""
}
@@ -17,12 +17,17 @@ export const buildSolgptPromt = (userPrompt:string, modelOP:RemoteBackendOPModel
} else {
let newPrompt = ""
for (const [question, answer] of ChatHistory.getHistory()) {
+ console.log("question", question)
if (question.startsWith('sol-gpt')) newPrompt += PromptBuilder(question.split('sol-gpt')[1], answer, modelOP)
else if (question.startsWith('gpt')) newPrompt += PromptBuilder(question.split('gpt')[1], answer, modelOP)
else newPrompt += PromptBuilder(question, answer, modelOP)
}
- // finaly
- newPrompt = "sol-gpt " + newPrompt + PromptBuilder(userPrompt.split('gpt')[1], "", modelOP)
+
+ // remove sol-gpt or gpt from the start of the prompt
+ const parsedPrompt = userPrompt.replace(/^sol-gpt|^gpt/gm, '')
+
+ // finally add the new prompt to the end of the history
+ newPrompt = "sol-gpt " + newPrompt + PromptBuilder(parsedPrompt, "", modelOP)
return newPrompt
}
}
\ No newline at end of file
diff --git a/libs/remix-ai-core/src/types/models.ts b/libs/remix-ai-core/src/types/models.ts
index 6ddfd4fbbda..d44f0ac734a 100644
--- a/libs/remix-ai-core/src/types/models.ts
+++ b/libs/remix-ai-core/src/types/models.ts
@@ -77,6 +77,7 @@ const GenerationParams:IParams = {
max_new_tokens: 2000,
stream_result: false,
repeat_penalty: 1.2,
+ terminal_output: false,
}
export { DefaultModels, CompletionParams, InsertionParams, GenerationParams }
\ No newline at end of file
diff --git a/libs/remix-ai-core/src/types/types.ts b/libs/remix-ai-core/src/types/types.ts
index aefb72d6225..a8bbef244e6 100644
--- a/libs/remix-ai-core/src/types/types.ts
+++ b/libs/remix-ai-core/src/types/types.ts
@@ -71,6 +71,8 @@ export interface IParams {
topK?: number;
topP?: number;
temp?: number;
+ return_stream_response?: boolean;
+ terminal_output?: boolean;
}
export enum AIRequestType {
@@ -85,3 +87,39 @@ export enum RemoteBackendOPModel{
CODELLAMA,
MISTRAL
}
+
+export class JsonStreamParser {
+ buffer: string
+ constructor() {
+ this.buffer = '';
+ }
+
+ safeJsonParse(chunk: string): T[] | null {
+ this.buffer += chunk;
+ const results = [];
+
+ while (true) {
+ try {
+ const result = JSON.parse(this.buffer);
+ results.push(result);
+ this.buffer = '';
+ break;
+ } catch (error) {
+ const match = /^([^\{]*\{[^\}]*\})(.*)/.exec(this.buffer);
+ if (match) {
+ try {
+ const result = JSON.parse(match[1]);
+ results.push(result);
+ this.buffer = match[2];
+ } catch (e) {
+ break;
+ }
+ } else {
+ break;
+ }
+ }
+ }
+
+ return results;
+ }
+}
diff --git a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
index b5e9291fb14..bc25206bdd4 100644
--- a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
+++ b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
@@ -1,27 +1,45 @@
import React, { useContext, useEffect, useState } from 'react'
import '../remix-ai.css'
-import { DefaultModels, GenerationParams } from '@remix/remix-ai-core';
+import { DefaultModels, GenerationParams, ChatHistory, HandleStreamResponse } from '@remix/remix-ai-core';
import { StreamSend, StreamingAdapterObserver } from '@nlux/react';
import axios from 'axios';
-import { AiChat, useAsStreamAdapter } from '@nlux/react';
+import { AiChat, useAsStreamAdapter, ChatItem} from '@nlux/react';
import '@nlux/themes/nova.css';
-
+import { JsonStreamParser } from '@remix/remix-ai-core';
import { user, assistantAvatar } from './personas';
+
const demoProxyServerUrl = 'https://solcoder.remixproject.org';
+let chatobserver: StreamingAdapterObserver = null
-export const Default = (props) => {
+export const Default = (props) => {
const send: StreamSend = async (
prompt: string,
observer: StreamingAdapterObserver,
) => {
- console.log(prompt);
- const response = await props.plugin.call('remixAI', 'solidity_answer', prompt);
- observer.next(response);
- observer.complete();
-};
+ chatobserver = observer
+ GenerationParams.stream_result = true
+ GenerationParams.return_stream_response = true
+ const response = await props.plugin.call('remixAI', 'solidity_answer', prompt, GenerationParams);
+ HandleStreamResponse(response,
+ (text) => {observer.next(text)},
+ (result) => {
+ ChatHistory.pushHistory(prompt, result)
+ observer.complete() }
+ )
+
+ };
+
+ // Define initial messages
+ const initialMessages: ChatItem[] = [
+ {
+ role: 'assistant',
+ message: 'Welcome to Remix AI! How can I assist you today?'
+ }
+ ];
const adapter = useAsStreamAdapter(send, []);
+
return (
{
avatar: assistantAvatar
},
user
+
}}
+ //initialConversation={initialMessages}
conversationOptions={{ layout: 'bubbles' }}
- displayOptions={{ colorScheme: "dark" }}
- composerOptions={{ placeholder: "Type your query" }}
+ displayOptions={{ colorScheme: "auto" }}
+ composerOptions={{ placeholder: "Type your query",
+ submitShortcut: 'Enter',
+ hideStopButton: false,
+ }}
+ messageOptions={{ showCodeBlockCopyButton: true,
+ streamingAnimationSpeed: 2,
+ waitTimeBeforeStreamCompletion: 1000,
+ }}
/>
);
};
From abeac7d4e639217cdcd1157f73c49a8267638883 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Wed, 2 Oct 2024 17:41:12 +0200
Subject: [PATCH 04/27] linked explain to remix ai output
---
.../src/app/plugins/remixAIPlugin.tsx | 55 ++++++++++++++--
.../src/helpers/streamHandler.ts | 62 +++++++++++++++++++
.../src/inferencers/remote/remoteInference.ts | 3 +-
.../src/prompts/promptBuilder.ts | 1 -
.../src/lib/plugins/remixaiDesktop-api.ts | 16 +++++
libs/remix-ui/remix-ai/src/index.ts | 2 +-
.../remix-ai/src/lib/components/Default.tsx | 36 ++++++++---
.../remix-ai/src/lib/components/RemixAI.tsx | 5 +-
libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx | 4 +-
package.json | 1 +
yarn.lock | 13 ++++
11 files changed, 176 insertions(+), 22 deletions(-)
create mode 100644 libs/remix-ai-core/src/helpers/streamHandler.ts
create mode 100644 libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts
diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
index d3f86ec153d..cb1b5453aa3 100644
--- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
+++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
@@ -1,9 +1,13 @@
import * as packageJson from '../../../../../package.json'
import { ViewPlugin } from '@remixproject/engine-web'
import { Plugin } from '@remixproject/engine';
-import { RemixAITab } from '@remix-ui/remix-ai'
-import React from 'react';
-import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams } from '@remix/remix-ai-core';
+import { RemixAITab, ChatApi } from '@remix-ui/remix-ai'
+import React, { useCallback } from 'react';
+import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, HandleStreamResponse } from '@remix/remix-ai-core';
+
+type chatRequestBufferT = {
+ [key in keyof T]: T[key]
+}
const profile = {
name: 'remixAI',
@@ -11,7 +15,7 @@ const profile = {
methods: ['code_generation', 'code_completion',
"solidity_answer", "code_explaining",
"code_insertion", "error_explaining",
- "initialize"],
+ "initialize", 'chatPipe', 'ProcessChatRequestBuffer', 'isChatRequestPending'],
events: [],
icon: 'assets/img/remix-logo-blue.png',
description: 'RemixAI provides AI services to Remix IDE.',
@@ -28,6 +32,7 @@ export class RemixAIPlugin extends ViewPlugin {
readonly remixDesktopPluginName = 'remixAID'
remoteInferencer:RemoteInferencer = null
isInferencing: boolean = false
+ chatRequestBuffer: chatRequestBufferT = null
constructor(inDesktop:boolean) {
super(profile)
@@ -129,6 +134,7 @@ export class RemixAIPlugin extends ViewPlugin {
}
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
+
let result
if (this.isOnDesktop) {
result = await this.call(this.remixDesktopPluginName, 'code_explaining', prompt, context, params)
@@ -138,6 +144,10 @@ export class RemixAIPlugin extends ViewPlugin {
}
if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
// this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
+
+ // HandleStreamResponse(result, (text) => {
+ // this.call('terminal', 'log', { type: 'aitypewriterwarning', value: text })
+ // })
return result
}
@@ -171,6 +181,43 @@ export class RemixAIPlugin extends ViewPlugin {
}
}
+ chatPipe(fn, prompt: string, context?: string, params: IParams=GenerationParams){
+ if (this.chatRequestBuffer == null){
+ this.chatRequestBuffer = {
+ fn_name: fn,
+ prompt: prompt,
+ params: params,
+ context: context
+ }
+
+ if (fn === "code_explaining"){
+ ChatApi.composer.send("Explain the current code")
+ }
+ else if (fn === "solidity_answer"){
+ ChatApi.composer.send("Answer the following question")
+ }
+ }
+ else{
+ console.log("chatRequestBuffer is not empty. First process the last request.")
+ }
+ }
+
+ ProcessChatRequestBuffer(params:IParams=GenerationParams){
+ if (this.chatRequestBuffer != null){
+ const result = this[this.chatRequestBuffer.fn_name](this.chatRequestBuffer.prompt, this.chatRequestBuffer.context, params)
+ this.chatRequestBuffer = null
+ return result
+ }
+ else{
+ console.log("chatRequestBuffer is empty.")
+ }
+ }
+ isChatRequestPending(){
+ return this.chatRequestBuffer != null
+ }
+
+
+
render() {
return (
diff --git a/libs/remix-ai-core/src/helpers/streamHandler.ts b/libs/remix-ai-core/src/helpers/streamHandler.ts
new file mode 100644
index 00000000000..8f397f3e7f7
--- /dev/null
+++ b/libs/remix-ai-core/src/helpers/streamHandler.ts
@@ -0,0 +1,62 @@
+import { ChatHistory } from '../prompts/chat';
+import { JsonStreamParser} from '../types/types'
+
+export const HandleSimpleResponse = async (response,
+ cb?: (streamText: string) => void) => {
+ let resultText = ''
+ const parser = new JsonStreamParser();
+
+ const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(response);
+ for (const parsedData of chunk) {
+ if (parsedData.isGenerating) {
+ resultText += parsedData.generatedText
+ cb(parsedData.generatedText)
+ } else {
+ resultText += parsedData.generatedText
+ cb(parsedData.generatedText)
+ }
+ }
+}
+
+export const HandleStreamResponse = async (streamResponse,
+ cb?: (streamText: string) => void,
+ done_cb?: (result: string) => void) => {
+ try {
+ console.log("streamResponse handler", streamResponse)
+ let resultText = ''
+ const parser = new JsonStreamParser();
+ const reader = streamResponse.body!.getReader();
+ const decoder = new TextDecoder();
+
+ while (true) {
+ const { done, value } = await reader.read();
+ if (done) break;
+
+ try {
+ const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(decoder.decode(value, { stream: true }));
+ for (const parsedData of chunk) {
+ if (parsedData.isGenerating) {
+ resultText += parsedData.generatedText
+ cb(parsedData.generatedText)
+ } else {
+ resultText += parsedData.generatedText
+ cb(parsedData.generatedText)
+ }
+ }
+ }
+ catch (error) {
+ console.error('Error parsing JSON:', error);
+ }
+ }
+ if (done_cb) {
+ done_cb(resultText)
+ }
+ }
+ catch (error) {
+ console.error('Error parsing JSON:', error);
+ }
+}
+
+export const UpdtateChatHistory = (userPromptprompt: string, AIAnswer: string) => {
+ ChatHistory.pushHistory(userPromptprompt, AIAnswer)
+}
\ No newline at end of file
diff --git a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
index f5e1ee7c7ef..bbb78b3686c 100644
--- a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
+++ b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
@@ -28,7 +28,6 @@ export class RemoteInferencer implements ICompletions {
this.event.emit("onInference")
const requesURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
- console.log(requesURL)
try {
const options = { headers: { 'Content-Type': 'application/json', } }
const result = await axios.post(`${requesURL}/${endpoint}`, payload, options)
@@ -76,6 +75,7 @@ export class RemoteInferencer implements ICompletions {
if (payload.return_stream_response) {
return response
}
+
const reader = response.body!.getReader();
const decoder = new TextDecoder();
@@ -112,7 +112,6 @@ export class RemoteInferencer implements ICompletions {
console.error('Error making stream request to Inference server:', error.message);
}
finally {
- console.log("end streaming\n" + resultText)
this.event.emit('onInferenceDone')
}
}
diff --git a/libs/remix-ai-core/src/prompts/promptBuilder.ts b/libs/remix-ai-core/src/prompts/promptBuilder.ts
index fb917a6ee6a..87e01524201 100644
--- a/libs/remix-ai-core/src/prompts/promptBuilder.ts
+++ b/libs/remix-ai-core/src/prompts/promptBuilder.ts
@@ -17,7 +17,6 @@ export const buildSolgptPromt = (userPrompt:string, modelOP:RemoteBackendOPModel
} else {
let newPrompt = ""
for (const [question, answer] of ChatHistory.getHistory()) {
- console.log("question", question)
if (question.startsWith('sol-gpt')) newPrompt += PromptBuilder(question.split('sol-gpt')[1], answer, modelOP)
else if (question.startsWith('gpt')) newPrompt += PromptBuilder(question.split('gpt')[1], answer, modelOP)
else newPrompt += PromptBuilder(question, answer, modelOP)
diff --git a/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts b/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts
new file mode 100644
index 00000000000..0c4ca0b872a
--- /dev/null
+++ b/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts
@@ -0,0 +1,16 @@
+import { StatusEvents } from "@remixproject/plugin-utils";
+
+export interface IRemixAIDesktop {
+ events: {
+ onStreamResult(streamText: string): Promise,
+ } & StatusEvents,
+ methods: {
+ code_completion(context: string): Promise
+ code_insertion(msg_pfx: string, msg_sfx: string): Promise,
+ code_generation(prompt: string): Promise,
+ code_explaining(code: string, context?: string): Promise,
+ error_explaining(prompt: string): Promise,
+ solidity_answer(prompt: string): Promise,
+ initializeModelBackend(local: boolean, generalModel?, completionModel?): Promise,
+ }
+}
\ No newline at end of file
diff --git a/libs/remix-ui/remix-ai/src/index.ts b/libs/remix-ui/remix-ai/src/index.ts
index 56f0a076a1e..e8f9ee13f2f 100644
--- a/libs/remix-ui/remix-ai/src/index.ts
+++ b/libs/remix-ui/remix-ai/src/index.ts
@@ -1 +1 @@
-export { RemixAITab } from './lib/components/RemixAI'
\ No newline at end of file
+export { RemixAITab, ChatApi } from './lib/components/RemixAI'
\ No newline at end of file
diff --git a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
index bc25206bdd4..3e7a6800636 100644
--- a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
+++ b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
@@ -1,35 +1,49 @@
-import React, { useContext, useEffect, useState } from 'react'
+import React, { useContext, useEffect, useState, useCallback} from 'react'
import '../remix-ai.css'
-import { DefaultModels, GenerationParams, ChatHistory, HandleStreamResponse } from '@remix/remix-ai-core';
-import { StreamSend, StreamingAdapterObserver } from '@nlux/react';
+import { DefaultModels, GenerationParams, ChatHistory, HandleStreamResponse, HandleSimpleResponse } from '@remix/remix-ai-core';
+import { StreamSend, StreamingAdapterObserver, useAiChatApi } from '@nlux/react';
import axios from 'axios';
-import { AiChat, useAsStreamAdapter, ChatItem} from '@nlux/react';
+import { AiChat, useAsStreamAdapter, ChatItem, AiChatUI} from '@nlux/react';
import '@nlux/themes/nova.css';
import { JsonStreamParser } from '@remix/remix-ai-core';
import { user, assistantAvatar } from './personas';
+import {highlighter} from '@nlux/highlighter'
const demoProxyServerUrl = 'https://solcoder.remixproject.org';
-let chatobserver: StreamingAdapterObserver = null
+export let ChatApi = null
export const Default = (props) => {
const send: StreamSend = async (
prompt: string,
observer: StreamingAdapterObserver,
) => {
- chatobserver = observer
GenerationParams.stream_result = true
- GenerationParams.return_stream_response = true
+ GenerationParams.return_stream_response = GenerationParams.stream_result
- const response = await props.plugin.call('remixAI', 'solidity_answer', prompt, GenerationParams);
- HandleStreamResponse(response,
+ let response = null
+ if (await props.plugin.call('remixAI', 'isChatRequestPending')){
+ response = await props.plugin.call('remixAI', 'ProcessChatRequestBuffer', GenerationParams);
+ }
+ else{
+ response = await props.plugin.call('remixAI', 'solidity_answer', prompt, GenerationParams);
+ }
+
+
+ if (GenerationParams.return_stream_response) HandleStreamResponse(response,
(text) => {observer.next(text)},
(result) => {
ChatHistory.pushHistory(prompt, result)
- observer.complete() }
+ observer.complete()
+ }
)
+ else{
+ observer.next(response)
+ observer.complete()
+ }
};
+ ChatApi = useAiChatApi();
// Define initial messages
const initialMessages: ChatItem[] = [
@@ -42,6 +56,7 @@ export const Default = (props) => {
return (
{
messageOptions={{ showCodeBlockCopyButton: true,
streamingAnimationSpeed: 2,
waitTimeBeforeStreamCompletion: 1000,
+ syntaxHighlighter: highlighter
}}
/>
);
diff --git a/libs/remix-ui/remix-ai/src/lib/components/RemixAI.tsx b/libs/remix-ui/remix-ai/src/lib/components/RemixAI.tsx
index ad5c5cb8d5b..544694ecc9f 100644
--- a/libs/remix-ui/remix-ai/src/lib/components/RemixAI.tsx
+++ b/libs/remix-ui/remix-ai/src/lib/components/RemixAI.tsx
@@ -1,6 +1,6 @@
import React, { useContext } from 'react'
import '../remix-ai.css'
-import { Default } from './Default'
+import { Default, ChatApi } from './Default'
export const RemixAITab = (props) => {
@@ -12,4 +12,5 @@ export const RemixAITab = (props) => {
>
)
-}
\ No newline at end of file
+}
+export { ChatApi }
\ No newline at end of file
diff --git a/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx b/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx
index 68511dcbf4d..79bea215364 100644
--- a/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx
+++ b/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx
@@ -106,7 +106,6 @@ export const TabsUI = (props: TabsUIProps) => {
}
const renderTab = (tab: Tab, index) => {
-
const classNameImg = 'my-1 mr-1 text-dark ' + tab.iconClass
const classNameTab = 'nav-item nav-link d-flex justify-content-center align-items-center px-2 py-1 tab' + (index === currentIndexRef.current ? ' active' : '')
const invert = props.themeQuality === 'dark' ? 'invert(1)' : 'invert(0)'
@@ -251,7 +250,8 @@ export const TabsUI = (props: TabsUIProps) => {
const content = await props.plugin.call('fileManager', 'readFile', path)
if (tabsState.currentExt === 'sol') {
setExplaining(true)
- await props.plugin.call('remixAI', 'code_explaining', content)
+ await props.plugin.call('remixAI', 'chatPipe', 'code_explaining', content)
+ // await props.plugin.call('remixAI', 'code_explaining', content)
setExplaining(false)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explain_file'])
}
diff --git a/package.json b/package.json
index 3286dd4f3d1..9c9977f1f12 100644
--- a/package.json
+++ b/package.json
@@ -107,6 +107,7 @@
"@metamask/eth-sig-util": "^7.0.2",
"@microlink/react-json-view": "^1.23.0",
"@nlux/core": "^2.17.1",
+ "@nlux/highlighter": "^2.17.1",
"@nlux/react": "^2.17.1",
"@nlux/themes": "^2.17.1",
"@openzeppelin/contracts": "^5.0.0",
diff --git a/yarn.lock b/yarn.lock
index ce86a032b26..5c8f0c504aa 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -5316,6 +5316,14 @@
resolved "https://registry.yarnpkg.com/@nlux/core/-/core-2.17.1.tgz#18a95e21e5aafae83bf6d515651780497f0f39cc"
integrity sha512-hIvOnuENVqWaIg5Co4JtFmHph7Sp0Nj+QixOMdOW9Ou7CjU7HK+maB5koLoayNL64B+wHTtgPN7zBrB8NCSPXw==
+"@nlux/highlighter@^2.17.1":
+ version "2.17.1"
+ resolved "https://registry.yarnpkg.com/@nlux/highlighter/-/highlighter-2.17.1.tgz#e4d0f43b5afeff2631bc118b1cc6db80afb7e99f"
+ integrity sha512-/ETnJPbNJWY8ZQH6XAQ5zooEMPsy44Lk2tIxMfr5Ca7+0ICpkMP0mppOmAoKCQBNVsqmKe0oczFBk8blddNDaA==
+ dependencies:
+ "@nlux/core" "2.17.1"
+ highlight.js "^11"
+
"@nlux/react@^2.17.1":
version "2.17.1"
resolved "https://registry.yarnpkg.com/@nlux/react/-/react-2.17.1.tgz#e4668e7cbe42dd195ea86a02350c8b15cb8f48f0"
@@ -17650,6 +17658,11 @@ hey-listen@^1.0.8:
resolved "https://registry.yarnpkg.com/hey-listen/-/hey-listen-1.0.8.tgz#8e59561ff724908de1aa924ed6ecc84a56a9aa68"
integrity sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q==
+highlight.js@^11:
+ version "11.10.0"
+ resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-11.10.0.tgz#6e3600dc4b33d6dc23d5bd94fbf72405f5892b92"
+ integrity sha512-SYVnVFswQER+zu1laSya563s+F8VDGt7o35d4utbamowvUNLLMovFqwCLSocpZTz3MgaSRA1IbqRWZv97dtErQ==
+
hmac-drbg@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1"
From 4c843e9b5f439f719039d9fcca682eb55c8830b2 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Mon, 7 Oct 2024 12:29:34 +0200
Subject: [PATCH 05/27] directed all logs to chat element
---
.../src/app/components/container.tsx | 9 ++--
.../src/app/plugins/remixAIPlugin.tsx | 43 ++++++-------------
.../src/app/tabs/locales/en/editor.json | 5 ++-
apps/vyper/src/app/utils/remix-client.tsx | 3 +-
.../src/helpers/streamHandler.ts | 1 -
.../src/inferencers/remote/remoteInference.ts | 14 +++---
.../lib/providers/inlineCompletionProvider.ts | 3 ++
.../editor/src/lib/remix-ui-editor.tsx | 13 ++++--
.../remix-ai/src/lib/components/Default.tsx | 10 +++--
libs/remix-ui/renderer/src/lib/renderer.tsx | 3 +-
libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx | 28 ++++++++++++
.../terminal/src/lib/remix-ui-terminal.tsx | 4 +-
12 files changed, 79 insertions(+), 57 deletions(-)
diff --git a/apps/circuit-compiler/src/app/components/container.tsx b/apps/circuit-compiler/src/app/components/container.tsx
index 2c7cc41e16c..faa12d2c7c2 100644
--- a/apps/circuit-compiler/src/app/components/container.tsx
+++ b/apps/circuit-compiler/src/app/components/container.tsx
@@ -74,7 +74,8 @@ export function Container () {
explain why the error occurred and how to fix it.
`
// @ts-ignore
- await circuitApp.plugin.call('remixAI', 'error_explaining', message)
+ await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
+ // await circuitApp.plugin.call('remixAI', 'error_explaining', message)
} else {
const message = `
error message: ${error}
@@ -82,7 +83,8 @@ export function Container () {
explain why the error occurred and how to fix it.
`
// @ts-ignore
- await circuitApp.plugin.call('remixAI', 'error_explaining', message)
+ await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
+ //await circuitApp.plugin.call('remixAI', 'error_explaining', message)
}
} else {
const error = report.message
@@ -92,7 +94,8 @@ export function Container () {
explain why the error occurred and how to fix it.
`
// @ts-ignore
- await circuitApp.plugin.call('remixAI', 'error_explaining', message)
+ //await circuitApp.plugin.call('remixAI', 'error_explaining', message)
+ await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
}
}
diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
index cb1b5453aa3..51983ea6202 100644
--- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
+++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
@@ -73,7 +73,6 @@ export class RemixAIPlugin extends ViewPlugin {
}
} else {
- // on browser
this.remoteInferencer = new RemoteInferencer(remoteModel?.apiUrl, remoteModel?.completionUrl)
this.remoteInferencer.event.on('onInference', () => {
this.isInferencing = true
@@ -114,8 +113,6 @@ export class RemixAIPlugin extends ViewPlugin {
return
}
- this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
-
let result
if (this.isOnDesktop) {
result = await this.call(this.remixDesktopPluginName, 'solidity_answer', prompt)
@@ -123,7 +120,6 @@ export class RemixAIPlugin extends ViewPlugin {
result = await this.remoteInferencer.solidity_answer(prompt)
}
if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
- // this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
return result
}
@@ -132,8 +128,6 @@ export class RemixAIPlugin extends ViewPlugin {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" })
return
}
- this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
-
let result
if (this.isOnDesktop) {
@@ -143,33 +137,22 @@ export class RemixAIPlugin extends ViewPlugin {
result = await this.remoteInferencer.code_explaining(prompt, context, params)
}
if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
- // this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
-
- // HandleStreamResponse(result, (text) => {
- // this.call('terminal', 'log', { type: 'aitypewriterwarning', value: text })
- // })
return result
}
- async error_explaining(prompt: string): Promise {
+ async error_explaining(prompt: string, context: string="", params: IParams=GenerationParams): Promise {
if (this.isInferencing) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" })
return
}
- const params:IParams = GenerationParams
- params.stream_result = true
-
- this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
-
let result
if (this.isOnDesktop) {
result = await this.call(this.remixDesktopPluginName, 'error_explaining', prompt)
} else {
result = await this.remoteInferencer.error_explaining(prompt, params)
}
- if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
- // this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
+ if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
return result
}
@@ -181,20 +164,20 @@ export class RemixAIPlugin extends ViewPlugin {
}
}
- chatPipe(fn, prompt: string, context?: string, params: IParams=GenerationParams){
+ chatPipe(fn, prompt: string, context?: string, pipeMessage?: string){
if (this.chatRequestBuffer == null){
this.chatRequestBuffer = {
fn_name: fn,
prompt: prompt,
- params: params,
context: context
}
-
- if (fn === "code_explaining"){
- ChatApi.composer.send("Explain the current code")
- }
- else if (fn === "solidity_answer"){
- ChatApi.composer.send("Answer the following question")
+ console.log('pipe message', pipeMessage)
+ if (pipeMessage) ChatApi.composer.send(pipeMessage)
+ else {
+ if (fn === "code_explaining") ChatApi.composer.send("Explain the current code")
+ else if (fn === "error_explaining") ChatApi.composer.send("Explain the error")
+ else if (fn === "solidity_answer") ChatApi.composer.send("Answer the following question")
+ else console.log("chatRequestBuffer is not empty. First process the last request.")
}
}
else{
@@ -202,7 +185,8 @@ export class RemixAIPlugin extends ViewPlugin {
}
}
- ProcessChatRequestBuffer(params:IParams=GenerationParams){
+
+ async ProcessChatRequestBuffer(params:IParams=GenerationParams){
if (this.chatRequestBuffer != null){
const result = this[this.chatRequestBuffer.fn_name](this.chatRequestBuffer.prompt, this.chatRequestBuffer.context, params)
this.chatRequestBuffer = null
@@ -210,14 +194,13 @@ export class RemixAIPlugin extends ViewPlugin {
}
else{
console.log("chatRequestBuffer is empty.")
+ return ""
}
}
isChatRequestPending(){
return this.chatRequestBuffer != null
}
-
-
render() {
return (
diff --git a/apps/remix-ide/src/app/tabs/locales/en/editor.json b/apps/remix-ide/src/app/tabs/locales/en/editor.json
index 76bfc87a1f8..6f0f3675554 100644
--- a/apps/remix-ide/src/app/tabs/locales/en/editor.json
+++ b/apps/remix-ide/src/app/tabs/locales/en/editor.json
@@ -25,8 +25,9 @@
"editor.explainFunction": "Explain this function",
"editor.explainFunctionSol": "Explain this code",
"editor.explainFunction2": "Explain the function \"{name}\"",
- "editor.explainFunctionByAI": "solidity code: {content}\n Explain the function {currentFunction}",
- "editor.explainFunctionByAISol": "solidity code: {content}\n Explain the function {currentFunction}",
+ "editor.explainFunctionByAI": "```\n{content}\n```\nExplain the function {currentFunction}",
+ "editor.explainFunctionByAISol": "```\n{content}\n```\nExplain the function {currentFunction}",
+ "editor.ExplainPipeMessage": "```\n {content}\n```\nExplain the snipped above",
"editor.executeFreeFunction": "Run a free function",
"editor.executeFreeFunction2": "Run the free function \"{name}\"",
"editor.toastText1": "This can only execute free function",
diff --git a/apps/vyper/src/app/utils/remix-client.tsx b/apps/vyper/src/app/utils/remix-client.tsx
index 6d39d609d23..3034dcb27d4 100644
--- a/apps/vyper/src/app/utils/remix-client.tsx
+++ b/apps/vyper/src/app/utils/remix-client.tsx
@@ -72,7 +72,8 @@ export class RemixClient extends PluginClient {
${message}
can you explain why this error occurred and how to fix it?
`
- await this.client.call('remixAI' as any, 'error_explaining', message)
+ // await this.client.call('remixAI' as any, 'error_explaining', message)
+ await this.client.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
} catch (err) {
console.error('unable to askGpt')
console.error(err)
diff --git a/libs/remix-ai-core/src/helpers/streamHandler.ts b/libs/remix-ai-core/src/helpers/streamHandler.ts
index 8f397f3e7f7..ae13088c072 100644
--- a/libs/remix-ai-core/src/helpers/streamHandler.ts
+++ b/libs/remix-ai-core/src/helpers/streamHandler.ts
@@ -22,7 +22,6 @@ export const HandleStreamResponse = async (streamResponse,
cb?: (streamText: string) => void,
done_cb?: (result: string) => void) => {
try {
- console.log("streamResponse handler", streamResponse)
let resultText = ''
const parser = new JsonStreamParser();
const reader = streamResponse.body!.getReader();
diff --git a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
index bbb78b3686c..f8e816bc651 100644
--- a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
+++ b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
@@ -26,11 +26,11 @@ export class RemoteInferencer implements ICompletions {
private async _makeRequest(endpoint, payload, rType:AIRequestType){
this.event.emit("onInference")
- const requesURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
+ const requestURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
try {
const options = { headers: { 'Content-Type': 'application/json', } }
- const result = await axios.post(`${requesURL}/${endpoint}`, payload, options)
+ const result = await axios.post(`${requestURL}/${endpoint}`, payload, options)
switch (rType) {
case AIRequestType.COMPLETION:
@@ -75,7 +75,6 @@ export class RemoteInferencer implements ICompletions {
if (payload.return_stream_response) {
return response
}
-
const reader = response.body!.getReader();
const decoder = new TextDecoder();
@@ -116,20 +115,16 @@ export class RemoteInferencer implements ICompletions {
}
}
-
-
async code_completion(prompt, options:IParams=CompletionParams): Promise {
const payload = { prompt, "endpoint":"code_completion", ...options }
- if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
- else return this._makeRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
+ return this._makeRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
}
async code_insertion(msg_pfx, msg_sfx, options:IParams=InsertionParams): Promise {
// const payload = { "data":[msg_pfx, "code_insertion", msg_sfx, 1024, 0.5, 0.92, 50]}
const payload = {"endpoint":"code_insertion", msg_pfx, msg_sfx, ...options, prompt: '' }
- if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
- else return this._makeRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
+ return this._makeRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
}
async code_generation(prompt, options:IParams=GenerationParams): Promise {
@@ -156,6 +151,7 @@ export class RemoteInferencer implements ICompletions {
async error_explaining(prompt, options:IParams=GenerationParams): Promise {
const payload = { prompt, "endpoint":"error_explaining", ...options }
+ console.log("payload: ", payload)
if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload , AIRequestType.GENERAL)
else return this._makeRequest(payload.endpoint, payload, AIRequestType.GENERAL)
}
diff --git a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
index 79f995dd6e8..4a792bad4bb 100644
--- a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
+++ b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
@@ -1,5 +1,6 @@
/* eslint-disable no-control-regex */
import { EditorUIProps, monacoTypes } from '@remix-ui/editor';
+import { JsonStreamParser } from '@remix/remix-ai-core';
const _paq = (window._paq = window._paq || [])
export class RemixInLineCompletionProvider implements monacoTypes.languages.InlineCompletionsProvider {
@@ -81,6 +82,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
const data = await this.props.plugin.call('remixAI', 'code_insertion', word, word_after)
this.task = 'code_generation'
+ console.log("data: " + this.task, data)
const parsedData = data.trimStart() //JSON.parse(data).trimStart()
const item: monacoTypes.languages.InlineCompletion = {
@@ -131,6 +133,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
}
}
catch (err){
+ console.log("err: " + err)
return
}
}
diff --git a/libs/remix-ui/editor/src/lib/remix-ui-editor.tsx b/libs/remix-ui/editor/src/lib/remix-ui-editor.tsx
index d206c186b97..37be0a07aba 100644
--- a/libs/remix-ui/editor/src/lib/remix-ui-editor.tsx
+++ b/libs/remix-ui/editor/src/lib/remix-ui-editor.tsx
@@ -776,6 +776,8 @@ export const EditorUI = (props: EditorUIProps) => {
const file = await props.plugin.call('fileManager', 'getCurrentFile')
const content = await props.plugin.call('fileManager', 'readFile', file)
const message = intl.formatMessage({ id: 'editor.generateDocumentationByAI' }, { content, currentFunction: currentFunction.current })
+
+ // do not stream this response
const cm = await await props.plugin.call('remixAI', 'code_explaining', message)
const natSpecCom = "\n" + extractNatspecComments(cm)
@@ -827,9 +829,10 @@ export const EditorUI = (props: EditorUIProps) => {
],
run: async () => {
const file = await props.plugin.call('fileManager', 'getCurrentFile')
- const content = await props.plugin.call('fileManager', 'readFile', file)
- const message = intl.formatMessage({ id: 'editor.explainFunctionByAI' }, { content, currentFunction: currentFunction.current })
- await props.plugin.call('remixAI', 'code_explaining', message, content)
+ const context = await props.plugin.call('fileManager', 'readFile', file)
+ const message = intl.formatMessage({ id: 'editor.explainFunctionByAI' }, { content:context, currentFunction: currentFunction.current })
+ // await props.plugin.call('remixAI', 'code_explaining', message, context)
+ await props.plugin.call('remixAI' as any, 'chatPipe', 'code_explaining', message, context)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explainFunction'])
},
}
@@ -848,8 +851,10 @@ export const EditorUI = (props: EditorUIProps) => {
const file = await props.plugin.call('fileManager', 'getCurrentFile')
const content = await props.plugin.call('fileManager', 'readFile', file)
const selectedCode = editor.getModel().getValueInRange(editor.getSelection())
+ const pipeMessage = intl.formatMessage({ id: 'editor.ExplainPipeMessage' }, { content:selectedCode })
- await props.plugin.call('remixAI', 'code_explaining', selectedCode, content)
+ await props.plugin.call('remixAI' as any, 'chatPipe', 'code_explaining', selectedCode, content, pipeMessage)
+ // await props.plugin.call('remixAI', 'code_explaining', selectedCode, content)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explainFunction'])
},
}
diff --git a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
index 3e7a6800636..3d52b93a568 100644
--- a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
+++ b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
@@ -1,7 +1,7 @@
import React, { useContext, useEffect, useState, useCallback} from 'react'
import '../remix-ai.css'
import { DefaultModels, GenerationParams, ChatHistory, HandleStreamResponse, HandleSimpleResponse } from '@remix/remix-ai-core';
-import { StreamSend, StreamingAdapterObserver, useAiChatApi } from '@nlux/react';
+import { ConversationStarter, StreamSend, StreamingAdapterObserver, useAiChatApi } from '@nlux/react';
import axios from 'axios';
import { AiChat, useAsStreamAdapter, ChatItem, AiChatUI} from '@nlux/react';
import '@nlux/themes/nova.css';
@@ -29,7 +29,6 @@ export const Default = (props) => {
response = await props.plugin.call('remixAI', 'solidity_answer', prompt, GenerationParams);
}
-
if (GenerationParams.return_stream_response) HandleStreamResponse(response,
(text) => {observer.next(text)},
(result) => {
@@ -44,6 +43,9 @@ export const Default = (props) => {
};
ChatApi = useAiChatApi();
+ const conversationStarters: ConversationStarter[] = [
+ {prompt: 'Explain what is a solidity contract!', icon: ⭐️},
+ {prompt: 'Explain the current file in Editor'}]
// Define initial messages
const initialMessages: ChatItem[] = [
@@ -68,8 +70,8 @@ export const Default = (props) => {
}}
//initialConversation={initialMessages}
- conversationOptions={{ layout: 'bubbles' }}
- displayOptions={{ colorScheme: "auto" }}
+ conversationOptions={{ layout: 'bubbles', conversationStarters }}
+ displayOptions={{ colorScheme: "auto", themeId: "nova" }}
composerOptions={{ placeholder: "Type your query",
submitShortcut: 'Enter',
hideStopButton: false,
diff --git a/libs/remix-ui/renderer/src/lib/renderer.tsx b/libs/remix-ui/renderer/src/lib/renderer.tsx
index 984f180e0f6..adff9947748 100644
--- a/libs/remix-ui/renderer/src/lib/renderer.tsx
+++ b/libs/remix-ui/renderer/src/lib/renderer.tsx
@@ -75,7 +75,8 @@ export const Renderer = ({ message, opt = {}, plugin }: RendererProps) => {
try {
const content = await plugin.call('fileManager', 'readFile', editorOptions.errFile)
const message = intl.formatMessage({ id: 'solidity.openaigptMessage' }, { content, messageText })
- await plugin.call('remixAI', 'error_explaining', message)
+ // await plugin.call('remixAI', 'error_explaining', message)
+ await plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
_paq.push(['trackEvent', 'ai', 'remixAI', 'error_explaining_SolidityError'])
} catch (err) {
console.error('unable to askGtp')
diff --git a/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx b/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx
index 79bea215364..7acb7251851 100644
--- a/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx
+++ b/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx
@@ -250,7 +250,35 @@ export const TabsUI = (props: TabsUIProps) => {
const content = await props.plugin.call('fileManager', 'readFile', path)
if (tabsState.currentExt === 'sol') {
setExplaining(true)
+ // if plugin is pinned,
+ if (await props.plugin.call('pinnedPanel', 'currentFocus') === 'remixAI'){
+ console.log("pinned has focus")
+ await props.plugin.call('remixAI', 'chatPipe', 'code_explaining', content)
+ }
+ else{
+ const profile = {
+ name: 'remixAI',
+ displayName: 'Remix AI',
+ methods: ['code_generation', 'code_completion',
+ "solidity_answer", "code_explaining",
+ "code_insertion", "error_explaining",
+ "initialize", 'chatPipe', 'ProcessChatRequestBuffer', 'isChatRequestPending'],
+ events: [],
+ icon: 'assets/img/remix-logo-blue.png',
+ description: 'RemixAI provides AI services to Remix IDE.',
+ kind: '',
+ location: 'sidePanel',
+ documentation: 'https://remix-ide.readthedocs.io/en/latest/remixai.html',
+ maintainedBy: 'Remix'
+ }
+ console.log("pinned does not have focus")
+ // await props.plugin.call('sidePanel', 'focus', 'remixAI')
+ await props.plugin.call('sidePanel', 'pinView', profile)
+ setTimeout(async () => {
await props.plugin.call('remixAI', 'chatPipe', 'code_explaining', content)
+ }, 500)
+ }
+
// await props.plugin.call('remixAI', 'code_explaining', content)
setExplaining(false)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explain_file'])
diff --git a/libs/remix-ui/terminal/src/lib/remix-ui-terminal.tsx b/libs/remix-ui/terminal/src/lib/remix-ui-terminal.tsx
index a17d6846cb8..e7a4feb0aa5 100644
--- a/libs/remix-ui/terminal/src/lib/remix-ui-terminal.tsx
+++ b/libs/remix-ui/terminal/src/lib/remix-ui-terminal.tsx
@@ -238,11 +238,11 @@ export const RemixUiTerminal = (props: RemixUiTerminalProps) => {
// TODO: rm gpt or redirect gpt to sol-pgt
} else if (script.trim().startsWith('gpt')) {
call('terminal', 'log',{ type: 'warn', value: `> ${script}` })
- await call('remixAI', 'solidity_answer', script)
+ await call('remixAI', 'solidity_answer', script) // No streaming supported in terminal
_paq.push(['trackEvent', 'ai', 'remixAI', 'askFromTerminal'])
} else if (script.trim().startsWith('sol-gpt')) {
call('terminal', 'log',{ type: 'warn', value: `> ${script}` })
- await call('remixAI', 'solidity_answer', script)
+ await call('remixAI', 'solidity_answer', script) // No streaming supported in terminal
_paq.push(['trackEvent', 'ai', 'remixAI', 'askFromTerminal'])
} else {
await call('scriptRunner', 'execute', script)
From ad70e5ed0aa83e79eb6956075ff69afed2b71540 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Thu, 10 Oct 2024 18:37:27 +0200
Subject: [PATCH 06/27] styled the chat interface
---
.../src/app/plugins/remixAIPlugin.tsx | 11 ++-
.../src/lib/InferenceServerManager.ts | 6 +-
apps/vyper/src/app/utils/remix-client.tsx | 11 +--
.../src/agents/codeExplainAgent.ts | 30 ++++--
.../src/helpers/streamHandler.ts | 90 +++++++++---------
libs/remix-ai-core/src/index.ts | 3 +-
.../src/inferencers/remote/remoteInference.ts | 25 +++--
libs/remix-ai-core/src/types/types.ts | 3 +
.../remix-ai/src/lib/components/Default.tsx | 94 ++-----------------
.../remix-ai/src/lib/components/RemixAI.tsx | 2 +-
.../remix-ai/src/lib/components/color.css | 94 +++++++++++++++++++
11 files changed, 204 insertions(+), 165 deletions(-)
create mode 100644 libs/remix-ui/remix-ai/src/lib/components/color.css
diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
index 51983ea6202..1ffa118234e 100644
--- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
+++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
@@ -3,7 +3,7 @@ import { ViewPlugin } from '@remixproject/engine-web'
import { Plugin } from '@remixproject/engine';
import { RemixAITab, ChatApi } from '@remix-ui/remix-ai'
import React, { useCallback } from 'react';
-import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, HandleStreamResponse } from '@remix/remix-ai-core';
+import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, CodeExplainAgent} from '@remix/remix-ai-core';
type chatRequestBufferT = {
[key in keyof T]: T[key]
@@ -33,11 +33,12 @@ export class RemixAIPlugin extends ViewPlugin {
remoteInferencer:RemoteInferencer = null
isInferencing: boolean = false
chatRequestBuffer: chatRequestBufferT = null
+ agent: CodeExplainAgent
constructor(inDesktop:boolean) {
super(profile)
this.isOnDesktop = inDesktop
-
+ this.agent = new CodeExplainAgent(this)
// user machine dont use ressource for remote inferencing
}
@@ -113,11 +114,12 @@ export class RemixAIPlugin extends ViewPlugin {
return
}
+ const newPrompt = await this.agent.chatCommand(prompt)
let result
if (this.isOnDesktop) {
- result = await this.call(this.remixDesktopPluginName, 'solidity_answer', prompt)
+ result = await this.call(this.remixDesktopPluginName, 'solidity_answer', newPrompt)
} else {
- result = await this.remoteInferencer.solidity_answer(prompt)
+ result = await this.remoteInferencer.solidity_answer(newPrompt)
}
if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
return result
@@ -171,7 +173,6 @@ export class RemixAIPlugin extends ViewPlugin {
prompt: prompt,
context: context
}
- console.log('pipe message', pipeMessage)
if (pipeMessage) ChatApi.composer.send(pipeMessage)
else {
if (fn === "code_explaining") ChatApi.composer.send("Explain the current code")
diff --git a/apps/remixdesktop/src/lib/InferenceServerManager.ts b/apps/remixdesktop/src/lib/InferenceServerManager.ts
index 87d1ae77812..741295cba85 100644
--- a/apps/remixdesktop/src/lib/InferenceServerManager.ts
+++ b/apps/remixdesktop/src/lib/InferenceServerManager.ts
@@ -404,8 +404,12 @@ export class InferenceManager implements ICompletions {
}
, responseType: 'stream' });
- const userPrompt = payload[Object.keys(payload)[0]]
+ const userPrompt = payload.prompt
let resultText = ""
+ if (payload.return_stream_response) {
+ return response
+ }
+
response.data.on('data', (chunk: Buffer) => {
try {
const parsedData = JSON.parse(chunk.toString());
diff --git a/apps/vyper/src/app/utils/remix-client.tsx b/apps/vyper/src/app/utils/remix-client.tsx
index 3034dcb27d4..c15c3775a85 100644
--- a/apps/vyper/src/app/utils/remix-client.tsx
+++ b/apps/vyper/src/app/utils/remix-client.tsx
@@ -8,6 +8,7 @@ import EventEmitter from 'events'
import { Plugin } from "@remixproject/engine";
import { CustomRemixApi } from '@remix-api'
+
export type VyperComplierAddress = 'https://vyper2.remixproject.org/' | 'http://localhost:8000/'
export class RemixClient extends PluginClient {
private client = createClient>(this)
@@ -68,12 +69,10 @@ export class RemixClient extends PluginClient {
}
try {
// TODO: remove! no formatting required since already handled on server
- const formattedMessage = `
- ${message}
- can you explain why this error occurred and how to fix it?
- `
- // await this.client.call('remixAI' as any, 'error_explaining', message)
- await this.client.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
+ const file = await this.client.call('fileManager', 'getCurrentFile')
+ const content = await this.client.call('fileManager', 'readFile', file)
+ const messageAI = `Vyper code: ${content}\n error message: ${message}\n explain why the error occurred and how to fix it.`
+ await this.client.call('remixAI' as any, 'chatPipe', 'error_explaining', messageAI)
} catch (err) {
console.error('unable to askGpt')
console.error(err)
diff --git a/libs/remix-ai-core/src/agents/codeExplainAgent.ts b/libs/remix-ai-core/src/agents/codeExplainAgent.ts
index 8d1d02b89f2..176de4bbba9 100644
--- a/libs/remix-ai-core/src/agents/codeExplainAgent.ts
+++ b/libs/remix-ai-core/src/agents/codeExplainAgent.ts
@@ -1,29 +1,45 @@
// interactive code explaining and highlight security vunerabilities
import * as fs from 'fs';
-class CodeExplainAgent {
+export class CodeExplainAgent {
private codebase: string[]; // list of code base file
public currentFile: string;
+ plugin
- constructor(codebasePath: string) {
+ constructor(props) {
+ this.plugin = props
+
// git or fs
- this.codebase = this.loadCodebase(codebasePath);
+ const codebase = this.loadCodebase("codebasePath");
}
private loadCodebase(path: string): string[] {
- const files = fs.readdirSync(path);
- return files
- .filter(file => file.endsWith('.ts'))
- .flatMap(file => fs.readFileSync(`${path}/${file}`, 'utf-8').split('\n'));
+ return []
}
public update(currentFile, lineNumber){
}
+ async chatCommand(prompt:string){
+ // change this function with indexer or related
+ try{
+ if (prompt.includes('Explain briefly the current file')){
+ const file = await this.plugin.call('fileManager', 'getCurrentFile')
+ const content = `Explain this code:\n ${await this.plugin.call('fileManager', 'readFile', file)}`
+ return content
+ } else return prompt
+ } catch {
+ console.log('There is No file selected')
+ return 'There is No file selected'
+ }
+ }
+
public getExplanations(currentLine: string, numSuggestions: number = 3): string[] {
// process the code base explaining the current file and highlight some details
const suggestions: string[] = [];
return suggestions;
}
}
+
+// Handle file changed (significantly)
diff --git a/libs/remix-ai-core/src/helpers/streamHandler.ts b/libs/remix-ai-core/src/helpers/streamHandler.ts
index ae13088c072..e4e04d8ac24 100644
--- a/libs/remix-ai-core/src/helpers/streamHandler.ts
+++ b/libs/remix-ai-core/src/helpers/streamHandler.ts
@@ -1,61 +1,61 @@
import { ChatHistory } from '../prompts/chat';
-import { JsonStreamParser} from '../types/types'
+import { JsonStreamParser } from '../types/types'
-export const HandleSimpleResponse = async (response,
- cb?: (streamText: string) => void) => {
+export const HandleSimpleResponse = async (response,
+ cb?: (streamText: string) => void) => {
+ let resultText = ''
+ const parser = new JsonStreamParser();
+
+ const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(response);
+ for (const parsedData of chunk) {
+ if (parsedData.isGenerating) {
+ resultText += parsedData.generatedText
+ cb(parsedData.generatedText)
+ } else {
+ resultText += parsedData.generatedText
+ cb(parsedData.generatedText)
+ }
+ }
+}
+
+export const HandleStreamResponse = async (streamResponse,
+ cb?: (streamText: string) => void,
+ done_cb?: (result: string) => void) => {
+ try {
let resultText = ''
const parser = new JsonStreamParser();
+ const reader = streamResponse.body!.getReader();
+ const decoder = new TextDecoder();
- const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(response);
- for (const parsedData of chunk) {
- if (parsedData.isGenerating) {
+ while (true) {
+ const { done, value } = await reader.read();
+ if (done) break;
+
+ try {
+ const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(decoder.decode(value, { stream: true }));
+ for (const parsedData of chunk) {
+ if (parsedData.isGenerating) {
resultText += parsedData.generatedText
cb(parsedData.generatedText)
- } else {
+ } else {
resultText += parsedData.generatedText
cb(parsedData.generatedText)
+ }
}
- }
-}
-
-export const HandleStreamResponse = async (streamResponse,
- cb?: (streamText: string) => void,
- done_cb?: (result: string) => void) => {
- try {
- let resultText = ''
- const parser = new JsonStreamParser();
- const reader = streamResponse.body!.getReader();
- const decoder = new TextDecoder();
-
- while (true) {
- const { done, value } = await reader.read();
- if (done) break;
-
- try {
- const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(decoder.decode(value, { stream: true }));
- for (const parsedData of chunk) {
- if (parsedData.isGenerating) {
- resultText += parsedData.generatedText
- cb(parsedData.generatedText)
- } else {
- resultText += parsedData.generatedText
- cb(parsedData.generatedText)
- }
- }
- }
- catch (error) {
- console.error('Error parsing JSON:', error);
- }
- }
- if (done_cb) {
- done_cb(resultText)
- }
- }
- catch (error) {
+ }
+ catch (error) {
console.error('Error parsing JSON:', error);
+ }
+ }
+ if (done_cb) {
+ done_cb(resultText)
}
+ }
+ catch (error) {
+ console.error('Error parsing JSON:', error);
+ }
}
export const UpdtateChatHistory = (userPromptprompt: string, AIAnswer: string) => {
- ChatHistory.pushHistory(userPromptprompt, AIAnswer)
+ ChatHistory.pushHistory(userPromptprompt, AIAnswer)
}
\ No newline at end of file
diff --git a/libs/remix-ai-core/src/index.ts b/libs/remix-ai-core/src/index.ts
index e61998f3e0e..fe54a57f2f8 100644
--- a/libs/remix-ai-core/src/index.ts
+++ b/libs/remix-ai-core/src/index.ts
@@ -20,4 +20,5 @@ export {
}
export * from './types/types'
-export * from './helpers/streamHandler'
\ No newline at end of file
+export * from './helpers/streamHandler'
+export * from './agents/codeExplainAgent'
\ No newline at end of file
diff --git a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
index f8e816bc651..ea07cee5652 100644
--- a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
+++ b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
@@ -17,20 +17,21 @@ export class RemoteInferencer implements ICompletions {
model_op = RemoteBackendOPModel.CODELLAMA // default model operation change this to llama if necessary
event: EventEmitter
test_env=true
+ test_url="http://solcodertest.org/"
constructor(apiUrl?:string, completionUrl?:string) {
- this.api_url = apiUrl!==undefined ? apiUrl: this.test_env? "http://127.0.0.1:7861/" : "https://solcoder.remixproject.org"
- this.completion_url = completionUrl!==undefined ? completionUrl : this.test_env? "http://127.0.0.1:7861/" : "https://completion.remixproject.org"
+ this.api_url = apiUrl!==undefined ? apiUrl: this.test_env? this.test_url : "https://solcoder.remixproject.org"
+ this.completion_url = completionUrl!==undefined ? completionUrl : this.test_env? this.test_url : "https://completion.remixproject.org"
this.event = new EventEmitter()
}
- private async _makeRequest(endpoint, payload, rType:AIRequestType){
+ private async _makeRequest(payload, rType:AIRequestType){
this.event.emit("onInference")
const requestURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
try {
const options = { headers: { 'Content-Type': 'application/json', } }
- const result = await axios.post(`${requestURL}/${endpoint}`, payload, options)
+ const result = await axios.post(`${requestURL}`, payload, options)
switch (rType) {
case AIRequestType.COMPLETION:
@@ -63,11 +64,10 @@ export class RemoteInferencer implements ICompletions {
try {
this.event.emit('onInference')
const requestURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
- const response = await fetch(`${requestURL}/${endpoint}`, {
+ const response = await fetch(requestURL, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
- "Accept": "text/event-stream",
},
body: JSON.stringify(payload),
});
@@ -118,20 +118,20 @@ export class RemoteInferencer implements ICompletions {
async code_completion(prompt, options:IParams=CompletionParams): Promise {
const payload = { prompt, "endpoint":"code_completion", ...options }
- return this._makeRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
+ return this._makeRequest(payload, AIRequestType.COMPLETION)
}
async code_insertion(msg_pfx, msg_sfx, options:IParams=InsertionParams): Promise {
// const payload = { "data":[msg_pfx, "code_insertion", msg_sfx, 1024, 0.5, 0.92, 50]}
const payload = {"endpoint":"code_insertion", msg_pfx, msg_sfx, ...options, prompt: '' }
- return this._makeRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
+ return this._makeRequest(payload, AIRequestType.COMPLETION)
}
async code_generation(prompt, options:IParams=GenerationParams): Promise {
// const payload = { "data":[prompt, "code_completion", "", false,1000,0.9,0.92,50]}
const payload = { prompt, "endpoint":"code_completion", ...options }
if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
- else return this._makeRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
+ else return this._makeRequest(payload, AIRequestType.COMPLETION)
}
async solidity_answer(prompt, options:IParams=GenerationParams): Promise {
@@ -139,20 +139,19 @@ export class RemoteInferencer implements ICompletions {
// const payload = { "data":[main_prompt, "solidity_answer", false,2000,0.9,0.8,50]}
const payload = { 'prompt': main_prompt, "endpoint":"solidity_answer", ...options }
if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL)
- else return this._makeRequest(payload.endpoint, payload, AIRequestType.GENERAL)
+ else return this._makeRequest(payload, AIRequestType.GENERAL)
}
async code_explaining(prompt, context:string="", options:IParams=GenerationParams): Promise {
// const payload = { "data":[prompt, "code_explaining", false,2000,0.9,0.8,50, context]}
const payload = { prompt, "endpoint":"code_explaining", context, ...options }
if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL)
- else return this._makeRequest(payload.endpoint, payload, AIRequestType.GENERAL)
+ else return this._makeRequest(payload, AIRequestType.GENERAL)
}
async error_explaining(prompt, options:IParams=GenerationParams): Promise {
const payload = { prompt, "endpoint":"error_explaining", ...options }
- console.log("payload: ", payload)
if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload , AIRequestType.GENERAL)
- else return this._makeRequest(payload.endpoint, payload, AIRequestType.GENERAL)
+ else return this._makeRequest(payload, AIRequestType.GENERAL)
}
}
diff --git a/libs/remix-ai-core/src/types/types.ts b/libs/remix-ai-core/src/types/types.ts
index a8bbef244e6..14c5af2cc32 100644
--- a/libs/remix-ai-core/src/types/types.ts
+++ b/libs/remix-ai-core/src/types/types.ts
@@ -122,4 +122,7 @@ export class JsonStreamParser {
return results;
}
+ safeJsonParseSingle(chunk: string): T[] | null {
+ return JSON.parse(this.buffer);
+ }
}
diff --git a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
index 3d52b93a568..0d50023776e 100644
--- a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
+++ b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
@@ -4,10 +4,13 @@ import { DefaultModels, GenerationParams, ChatHistory, HandleStreamResponse, Han
import { ConversationStarter, StreamSend, StreamingAdapterObserver, useAiChatApi } from '@nlux/react';
import axios from 'axios';
import { AiChat, useAsStreamAdapter, ChatItem, AiChatUI} from '@nlux/react';
-import '@nlux/themes/nova.css';
import { JsonStreamParser } from '@remix/remix-ai-core';
import { user, assistantAvatar } from './personas';
import {highlighter} from '@nlux/highlighter'
+import './color.css'
+import '@nlux/themes/unstyled.css';
+// import '@nlux/themes'
+import { result } from 'lodash';
const demoProxyServerUrl = 'https://solcoder.remixproject.org';
@@ -44,8 +47,8 @@ export const Default = (props) => {
};
ChatApi = useAiChatApi();
const conversationStarters: ConversationStarter[] = [
- {prompt: 'Explain what is a solidity contract!', icon: ⭐️},
- {prompt: 'Explain the current file in Editor'}]
+ {prompt: 'Explain briefly the current file in Editor', icon: ⭐️},
+ {prompt: 'Explain what is a solidity contract!'}]
// Define initial messages
const initialMessages: ChatItem[] = [
@@ -71,7 +74,7 @@ export const Default = (props) => {
}}
//initialConversation={initialMessages}
conversationOptions={{ layout: 'bubbles', conversationStarters }}
- displayOptions={{ colorScheme: "auto", themeId: "nova" }}
+ displayOptions={{ colorScheme: "auto", themeId: "remix_ai_theme" }}
composerOptions={{ placeholder: "Type your query",
submitShortcut: 'Enter',
hideStopButton: false,
@@ -83,85 +86,4 @@ export const Default = (props) => {
}}
/>
);
-};
-
-// export const Default = (props) => {
-// const [searchText, setSearchText] = useState('');
-// const [resultText, setResultText] = useState('');
-// const pluginName = 'remixAI'
-// const appendText = (newText) => {
-// setResultText(resultText => resultText + newText);
-// }
-
-// useEffect(() => {
-// const handleResultReady = async (e) => {
-// appendText(e);
-// };
-// if (props.plugin.isOnDesktop ) {
-// props.plugin.on(props.plugin.remixDesktopPluginName, 'onStreamResult', (value) => {
-// handleResultReady(value);
-// })
-// }
-// }, [])
-
-// return (
-//
-//
-// console.log('searchText not implememted')}
-// >
-//
-
-//
-
-//
-
-//
-//
-//
-//
-// );
-// }
\ No newline at end of file
+};
\ No newline at end of file
diff --git a/libs/remix-ui/remix-ai/src/lib/components/RemixAI.tsx b/libs/remix-ui/remix-ai/src/lib/components/RemixAI.tsx
index 544694ecc9f..d55a3a9256e 100644
--- a/libs/remix-ui/remix-ai/src/lib/components/RemixAI.tsx
+++ b/libs/remix-ui/remix-ai/src/lib/components/RemixAI.tsx
@@ -7,7 +7,7 @@ export const RemixAITab = (props) => {
const plugin = props.plugin
return (
<>
-
+
>
diff --git a/libs/remix-ui/remix-ai/src/lib/components/color.css b/libs/remix-ui/remix-ai/src/lib/components/color.css
new file mode 100644
index 00000000000..67f5b6ab81a
--- /dev/null
+++ b/libs/remix-ui/remix-ai/src/lib/components/color.css
@@ -0,0 +1,94 @@
+.nlux-theme-remix_ai_theme[data-color-scheme='light'] {
+ --nlux-ChatRoom--BackgroundColor: var(--text-background);
+}
+
+.nlux-theme-remix_ai_theme[data-color-scheme='dark'] {
+ --nlux-ChatRoom--BackgroundColor: var(--text-background);
+}
+
+.nlux-theme-remix_ai_theme {
+
+ /* Override top-level chat room colors */
+ --nlux-ChatRoom--BorderColor: #24233d;
+ --nlux-ChatRoom-Divider--Color: var(--light);
+ /* --nlux-ChatRoom-Divider--BorderWidth:2px; */
+ --nlux-ChatRoom--TextColor: var(--text);
+
+ /* Override message bubble colors */
+ --nlux-AiMessage--BackgroundColor: var(--light);
+ --nlux-HumanMessage--BackgroundColor: var(--text-background);
+
+ /* Override border width */
+ --nlux-ChatRoom--BorderWidth: 0;
+ --nlux-SubmitButton--BorderWidth: 0;
+ --nlux-ChatItem-Avatar--BorderWidth: 0;
+ --nlux-ChatItem-Message-BubbleLayout--BorderWidth: 0;
+ --nlux-ConversationStarter--BorderWidth: 1;
+
+ /* Override border radius */
+ --nlux-ChatRoom--BorderRadius: 5px;
+ --nlux-SubmitButton--BorderRadius: 0 10px 10px 0;
+ --nlux-SubmitButton--Width: 73px;
+ --nlux-ChatItem-Avatar--BorderRadius: 5px;
+ --nlux-ChatItem-Message-BubbleLayout--BorderRadius: 5px;
+ --nlux-ConversationStarter--BorderRadius: 5px;
+ --nlux-PromptInput-Focus-Outline--Width: 10px;
+ --nlux-PromptInput-Max-Height: 50px;
+ --nlux-PromptInput--BorderWidth: 0;
+ .nlux-comp-composer > textarea {padding: 8px;}
+ --nlux-PromptInput--BorderRadius: 10px 0 0 10px;
+ --nlux-PromptInput-Height: 50px;
+
+
+ /* Override input colors */
+ --nlux-PromptInput--BackgroundColor: var(--light);
+ --nlux-PromptInput-Active--BackgroundColor: var(--light);
+ --nlux-PromptInput-Disabled--BackgroundColor: var(--dark);
+
+ /* Gap between submit button and input */
+ --nlux-Composer--Gap: 0;
+
+ /* Override submit button colors */
+ --nlux-SubmitButton--BackgroundColor: var(--primary);
+ --nlux-SubmitButton-Active--BackgroundColor:var(--primary);
+ --nlux-SubmitButton-Disabled--BackgroundColor: var(--dark);
+ --nlux-SubmitButton-Active--TextColor: var(--text);
+ --nlux-SubmitButton-Disabled--TextColor: var(--text);
+
+ /** Inline code in markdown */
+ --nlux-InlineCode--BorderRadius: 6px;
+ --nlux-InlineCode--BorderWidth: 0.5px;
+ --nlux-InlineCode--Padding: 0 2px;
+ --nlux-InlineCode--FontSize: 14px;
+
+
+ /*code block */
+ --nlux-CodeBlock-CopyButton--BackgroundColor: var(--bg-text);
+ --nlux-CodeBlock-CopyButton--TextColor: var(--text);
+
+ /*codeblock*/
+ /*--nlux-CodeBlock--BackgroundColor: var(--body-bg);*/
+ --nlux-CodeBlock--BackgroundColor: var(--bg-text);
+ --nlux-CodeBlock--BorderColor: var(--secondary);
+ --nlux-CodeBlock--Padding: 20px;
+ --nlux-CodeBlock--TextColor: var(--text);
+ --nlux-CodeBlock--FontSize: 14px;
+
+ /* Conversation starter colors */
+ --nlux-ConversationStarter--BackgroundColor: var(--light);
+ --nlux-copy-icon: url('data:image/svg+xml,\
+
\
+ ');
+
+ /* Override icon for the send button */
+ --nlux-send-icon: url('data:image/svg+xml,\
+
\
+ ');
+
+}
From 39e89ab6ac8252230830a76ade84c36ca82b1685 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Thu, 10 Oct 2024 18:40:28 +0200
Subject: [PATCH 07/27] turned remote to live
---
libs/remix-ai-core/src/inferencers/remote/remoteInference.ts | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
index ea07cee5652..9463ad8c255 100644
--- a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
+++ b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
@@ -16,7 +16,7 @@ export class RemoteInferencer implements ICompletions {
max_history = 7
model_op = RemoteBackendOPModel.CODELLAMA // default model operation change this to llama if necessary
event: EventEmitter
- test_env=true
+ test_env=false
test_url="http://solcodertest.org/"
constructor(apiUrl?:string, completionUrl?:string) {
From 52408f32e5b3e2c78a3add8c480fe17eeaa83719 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Mon, 14 Oct 2024 15:00:50 +0200
Subject: [PATCH 08/27] minor
---
apps/remix-ide/src/app/plugins/remixAIPlugin.tsx | 15 +++++++--------
.../src/inferencers/remote/remoteInference.ts | 16 +++++++---------
libs/remix-ai-core/src/types/models.ts | 2 ++
libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx | 9 +++------
4 files changed, 19 insertions(+), 23 deletions(-)
diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
index 1ffa118234e..44fd4ad70c6 100644
--- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
+++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
@@ -3,7 +3,7 @@ import { ViewPlugin } from '@remixproject/engine-web'
import { Plugin } from '@remixproject/engine';
import { RemixAITab, ChatApi } from '@remix-ui/remix-ai'
import React, { useCallback } from 'react';
-import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, CodeExplainAgent} from '@remix/remix-ai-core';
+import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, CodeExplainAgent } from '@remix/remix-ai-core';
type chatRequestBufferT
= {
[key in keyof T]: T[key]
@@ -52,8 +52,8 @@ export class RemixAIPlugin extends ViewPlugin {
console.log('Activating RemixAIPlugin on browser')
this.initialize()
}
+ this.call('sidePanel', 'pinView', profile)
}
-
async initialize(model1?:IModel, model2?:IModel, remoteModel?:IRemoteModel, useRemote?:boolean){
if (this.isOnDesktop) {
// on desktop use remote inferencer -> false
@@ -130,7 +130,7 @@ export class RemixAIPlugin extends ViewPlugin {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" })
return
}
-
+
let result
if (this.isOnDesktop) {
result = await this.call(this.remixDesktopPluginName, 'code_explaining', prompt, context, params)
@@ -175,25 +175,24 @@ export class RemixAIPlugin extends ViewPlugin {
}
if (pipeMessage) ChatApi.composer.send(pipeMessage)
else {
- if (fn === "code_explaining") ChatApi.composer.send("Explain the current code")
+ if (fn === "code_explaining") ChatApi.composer.send("Explain the current code")
else if (fn === "error_explaining") ChatApi.composer.send("Explain the error")
- else if (fn === "solidity_answer") ChatApi.composer.send("Answer the following question")
+ else if (fn === "solidity_answer") ChatApi.composer.send("Answer the following question")
else console.log("chatRequestBuffer is not empty. First process the last request.")
}
}
- else{
+ else {
console.log("chatRequestBuffer is not empty. First process the last request.")
}
}
-
async ProcessChatRequestBuffer(params:IParams=GenerationParams){
if (this.chatRequestBuffer != null){
const result = this[this.chatRequestBuffer.fn_name](this.chatRequestBuffer.prompt, this.chatRequestBuffer.context, params)
this.chatRequestBuffer = null
return result
}
- else{
+ else {
console.log("chatRequestBuffer is empty.")
return ""
}
diff --git a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
index 9463ad8c255..36bb409fee7 100644
--- a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
+++ b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
@@ -17,7 +17,7 @@ export class RemoteInferencer implements ICompletions {
model_op = RemoteBackendOPModel.CODELLAMA // default model operation change this to llama if necessary
event: EventEmitter
test_env=false
- test_url="http://solcodertest.org/"
+ test_url="http://solcodertest.org"
constructor(apiUrl?:string, completionUrl?:string) {
this.api_url = apiUrl!==undefined ? apiUrl: this.test_env? this.test_url : "https://solcoder.remixproject.org"
@@ -42,6 +42,7 @@ export class RemoteInferencer implements ICompletions {
}
case AIRequestType.GENERAL:
if (result.statusText === "OK") {
+ if (result.data?.error) return result.data?.error
const resultText = result.data.generatedText
ChatHistory.pushHistory(payload.prompt, resultText)
return resultText
@@ -75,11 +76,9 @@ export class RemoteInferencer implements ICompletions {
if (payload.return_stream_response) {
return response
}
-
const reader = response.body!.getReader();
const decoder = new TextDecoder();
const parser = new JsonStreamParser();
-
while (true) {
const { done, value } = await reader.read();
if (done) break;
@@ -87,7 +86,6 @@ export class RemoteInferencer implements ICompletions {
try {
console.log("value" + decoder.decode(value))
const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(decoder.decode(value, { stream: true }));
-
for (const parsedData of chunk) {
if (parsedData.isGenerating) {
this.event.emit('onStreamResult', parsedData.generatedText);
@@ -123,14 +121,14 @@ export class RemoteInferencer implements ICompletions {
async code_insertion(msg_pfx, msg_sfx, options:IParams=InsertionParams): Promise {
// const payload = { "data":[msg_pfx, "code_insertion", msg_sfx, 1024, 0.5, 0.92, 50]}
- const payload = {"endpoint":"code_insertion", msg_pfx, msg_sfx, ...options, prompt: '' }
+ const payload = { "endpoint":"code_insertion", msg_pfx, msg_sfx, ...options, prompt: '' }
return this._makeRequest(payload, AIRequestType.COMPLETION)
}
async code_generation(prompt, options:IParams=GenerationParams): Promise {
// const payload = { "data":[prompt, "code_completion", "", false,1000,0.9,0.92,50]}
const payload = { prompt, "endpoint":"code_completion", ...options }
- if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
+ if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
else return this._makeRequest(payload, AIRequestType.COMPLETION)
}
@@ -138,20 +136,20 @@ export class RemoteInferencer implements ICompletions {
const main_prompt = buildSolgptPromt(prompt, this.model_op)
// const payload = { "data":[main_prompt, "solidity_answer", false,2000,0.9,0.8,50]}
const payload = { 'prompt': main_prompt, "endpoint":"solidity_answer", ...options }
- if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL)
+ if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL)
else return this._makeRequest(payload, AIRequestType.GENERAL)
}
async code_explaining(prompt, context:string="", options:IParams=GenerationParams): Promise {
// const payload = { "data":[prompt, "code_explaining", false,2000,0.9,0.8,50, context]}
const payload = { prompt, "endpoint":"code_explaining", context, ...options }
- if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL)
+ if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL)
else return this._makeRequest(payload, AIRequestType.GENERAL)
}
async error_explaining(prompt, options:IParams=GenerationParams): Promise {
const payload = { prompt, "endpoint":"error_explaining", ...options }
- if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload , AIRequestType.GENERAL)
+ if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload , AIRequestType.GENERAL)
else return this._makeRequest(payload, AIRequestType.GENERAL)
}
}
diff --git a/libs/remix-ai-core/src/types/models.ts b/libs/remix-ai-core/src/types/models.ts
index d44f0ac734a..0f46dbd75bc 100644
--- a/libs/remix-ai-core/src/types/models.ts
+++ b/libs/remix-ai-core/src/types/models.ts
@@ -61,6 +61,7 @@ const CompletionParams:IParams = {
topK: 40,
topP: 0.92,
max_new_tokens: 15,
+ stream_result: false,
}
const InsertionParams:IParams = {
@@ -68,6 +69,7 @@ const InsertionParams:IParams = {
topK: 40,
topP: 0.92,
max_new_tokens: 150,
+ stream_result: false,
}
const GenerationParams:IParams = {
diff --git a/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx b/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx
index 7acb7251851..ed5c779f296 100644
--- a/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx
+++ b/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx
@@ -252,10 +252,9 @@ export const TabsUI = (props: TabsUIProps) => {
setExplaining(true)
// if plugin is pinned,
if (await props.plugin.call('pinnedPanel', 'currentFocus') === 'remixAI'){
- console.log("pinned has focus")
await props.plugin.call('remixAI', 'chatPipe', 'code_explaining', content)
}
- else{
+ else {
const profile = {
name: 'remixAI',
displayName: 'Remix AI',
@@ -271,14 +270,12 @@ export const TabsUI = (props: TabsUIProps) => {
documentation: 'https://remix-ide.readthedocs.io/en/latest/remixai.html',
maintainedBy: 'Remix'
}
- console.log("pinned does not have focus")
// await props.plugin.call('sidePanel', 'focus', 'remixAI')
await props.plugin.call('sidePanel', 'pinView', profile)
setTimeout(async () => {
- await props.plugin.call('remixAI', 'chatPipe', 'code_explaining', content)
- }, 500)
+ await props.plugin.call('remixAI', 'chatPipe', 'code_explaining', content)
+ }, 500)
}
-
// await props.plugin.call('remixAI', 'code_explaining', content)
setExplaining(false)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explain_file'])
From 1348d1f76b540aa41c147051e52aeb7adf3c44d2 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Mon, 14 Oct 2024 16:01:34 +0200
Subject: [PATCH 09/27] minor fix docs
---
apps/remix-ide/src/app/plugins/remixAIPlugin.tsx | 12 +++++++++++-
libs/remix-ui/editor/src/lib/remix-ui-editor.tsx | 6 ++++--
2 files changed, 15 insertions(+), 3 deletions(-)
diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
index 44fd4ad70c6..af3b617a200 100644
--- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
+++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
@@ -52,8 +52,18 @@ export class RemixAIPlugin extends ViewPlugin {
console.log('Activating RemixAIPlugin on browser')
this.initialize()
}
- this.call('sidePanel', 'pinView', profile)
+ this.setRemixAIOnSidePannel(false)
}
+
+ setRemixAIOnSidePannel(resize:boolean=false){
+ if (resize){
+ this.call('sidePanel', 'pinView', profile)
+
+ } else {
+ this.call('sidePanel', 'pinView', profile)
+ }
+ }
+
async initialize(model1?:IModel, model2?:IModel, remoteModel?:IRemoteModel, useRemote?:boolean){
if (this.isOnDesktop) {
// on desktop use remote inferencer -> false
diff --git a/libs/remix-ui/editor/src/lib/remix-ui-editor.tsx b/libs/remix-ui/editor/src/lib/remix-ui-editor.tsx
index 37be0a07aba..eee67fbdcce 100644
--- a/libs/remix-ui/editor/src/lib/remix-ui-editor.tsx
+++ b/libs/remix-ui/editor/src/lib/remix-ui-editor.tsx
@@ -776,9 +776,11 @@ export const EditorUI = (props: EditorUIProps) => {
const file = await props.plugin.call('fileManager', 'getCurrentFile')
const content = await props.plugin.call('fileManager', 'readFile', file)
const message = intl.formatMessage({ id: 'editor.generateDocumentationByAI' }, { content, currentFunction: currentFunction.current })
-
+
// do not stream this response
- const cm = await await props.plugin.call('remixAI', 'code_explaining', message)
+ const pipeMessage = `Generate the documentation for the function **${currentFunction.current}**`
+ // const cm = await await props.plugin.call('remixAI', 'code_explaining', message)
+ const cm = await props.plugin.call('remixAI' as any, 'chatPipe', 'solidity_answer', message, '', pipeMessage)
const natSpecCom = "\n" + extractNatspecComments(cm)
const cln = await props.plugin.call('codeParser', "getLineColumnOfNode", currenFunctionNode)
From 1743146a1cf72803e3530aea1d670c13b8bcf0b7 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Mon, 14 Oct 2024 16:48:10 +0200
Subject: [PATCH 10/27] api ai desktop
---
.../src/lib/plugins/remixaiDesktop-api.ts | 21 ++++++++++++-------
1 file changed, 14 insertions(+), 7 deletions(-)
diff --git a/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts b/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts
index 0c4ca0b872a..dc9deb7e82d 100644
--- a/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts
+++ b/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts
@@ -1,16 +1,23 @@
+import { IParams } from "@remix/remix-ai-core";
import { StatusEvents } from "@remixproject/plugin-utils";
-export interface IRemixAIDesktop {
+export interface IRemixAID {
events: {
- onStreamResult(streamText: string): Promise,
+ activated():void,
+ onInference():void,
+ onInferenceDone():void,
+ onStreamResult(streamText: string):void,
+
} & StatusEvents,
methods: {
code_completion(context: string): Promise
code_insertion(msg_pfx: string, msg_sfx: string): Promise,
- code_generation(prompt: string): Promise,
- code_explaining(code: string, context?: string): Promise,
- error_explaining(prompt: string): Promise,
- solidity_answer(prompt: string): Promise,
- initializeModelBackend(local: boolean, generalModel?, completionModel?): Promise,
+ code_generation(prompt: string): Promise,
+ code_explaining(code: string, context?: string): Promise,
+ error_explaining(prompt: string): Promise,
+ solidity_answer(prompt: string): Promise,
+ initializeModelBackend(local: boolean, generalModel?, completionModel?): Promise,
+ chatPipe(pipeMessage: string): Promise,
+ ProcessChatRequestBuffer(params:IParams): Promise,
}
}
\ No newline at end of file
From 2b0f7a2cceab096d648f405bcd03bf946b1f8d36 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Tue, 15 Oct 2024 12:29:22 +0200
Subject: [PATCH 11/27] cleanup and new send button on remixai chat
---
.../src/app/components/container.tsx | 6 ----
.../src/app/plugins/remixAIPlugin.tsx | 2 ++
.../src/helpers/streamHandler.ts | 3 +-
.../src/inferencers/remote/remoteInference.ts | 12 +++-----
libs/remix-api/src/lib/plugins/remixai-api.ts | 4 ++-
.../src/lib/plugins/remixaiDesktop-api.ts | 2 +-
libs/remix-api/src/lib/remix-api.ts | 1 -
.../editor/src/lib/remix-ui-editor.tsx | 2 --
.../remix-ai/src/lib/components/Default.tsx | 29 +++++++------------
.../remix-ai/src/lib/components/color.css | 9 ++----
libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx | 1 -
11 files changed, 25 insertions(+), 46 deletions(-)
diff --git a/apps/circuit-compiler/src/app/components/container.tsx b/apps/circuit-compiler/src/app/components/container.tsx
index faa12d2c7c2..80663789cd3 100644
--- a/apps/circuit-compiler/src/app/components/container.tsx
+++ b/apps/circuit-compiler/src/app/components/container.tsx
@@ -73,18 +73,14 @@ export function Container () {
full circom error: ${JSON.stringify(report, null, 2)}
explain why the error occurred and how to fix it.
`
- // @ts-ignore
await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
- // await circuitApp.plugin.call('remixAI', 'error_explaining', message)
} else {
const message = `
error message: ${error}
full circom error: ${JSON.stringify(report, null, 2)}
explain why the error occurred and how to fix it.
`
- // @ts-ignore
await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
- //await circuitApp.plugin.call('remixAI', 'error_explaining', message)
}
} else {
const error = report.message
@@ -93,8 +89,6 @@ export function Container () {
full circom error: ${JSON.stringify(report, null, 2)}
explain why the error occurred and how to fix it.
`
- // @ts-ignore
- //await circuitApp.plugin.call('remixAI', 'error_explaining', message)
await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
}
}
diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
index af3b617a200..f16ffacebde 100644
--- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
+++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
@@ -4,6 +4,7 @@ import { Plugin } from '@remixproject/engine';
import { RemixAITab, ChatApi } from '@remix-ui/remix-ai'
import React, { useCallback } from 'react';
import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, CodeExplainAgent } from '@remix/remix-ai-core';
+import { CustomRemixApi } from '@remix-api'
type chatRequestBufferT = {
[key in keyof T]: T[key]
@@ -26,6 +27,7 @@ const profile = {
maintainedBy: 'Remix'
}
+// add Plugin
export class RemixAIPlugin extends ViewPlugin {
isOnDesktop:boolean = false
aiIsActivated:boolean = false
diff --git a/libs/remix-ai-core/src/helpers/streamHandler.ts b/libs/remix-ai-core/src/helpers/streamHandler.ts
index e4e04d8ac24..af094fe564a 100644
--- a/libs/remix-ai-core/src/helpers/streamHandler.ts
+++ b/libs/remix-ai-core/src/helpers/streamHandler.ts
@@ -24,9 +24,10 @@ export const HandleStreamResponse = async (streamResponse,
try {
let resultText = ''
const parser = new JsonStreamParser();
- const reader = streamResponse.body!.getReader();
+ const reader = streamResponse.body?.getReader();
const decoder = new TextDecoder();
+ // eslint-disable-next-line no-constant-condition
while (true) {
const { done, value } = await reader.read();
if (done) break;
diff --git a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
index 36bb409fee7..3bbfc173af6 100644
--- a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
+++ b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
@@ -3,13 +3,9 @@ import { GenerationParams, CompletionParams, InsertionParams } from "../../types
import { buildSolgptPromt } from "../../prompts/promptBuilder";
import EventEmitter from "events";
import { ChatHistory } from "../../prompts/chat";
-import axios, { AxiosResponse } from 'axios';
-import { Readable } from 'stream';
-import { StreamingAdapterObserver } from '@nlux/react';
+import axios from 'axios';
const defaultErrorMessage = `Unable to get a response from AI server`
-
-
export class RemoteInferencer implements ICompletions {
api_url: string
completion_url: string
@@ -76,9 +72,10 @@ export class RemoteInferencer implements ICompletions {
if (payload.return_stream_response) {
return response
}
- const reader = response.body!.getReader();
+ const reader = response.body?.getReader();
const decoder = new TextDecoder();
const parser = new JsonStreamParser();
+ // eslint-disable-next-line no-constant-condition
while (true) {
const { done, value } = await reader.read();
if (done) break;
@@ -113,7 +110,6 @@ export class RemoteInferencer implements ICompletions {
}
}
-
async code_completion(prompt, options:IParams=CompletionParams): Promise {
const payload = { prompt, "endpoint":"code_completion", ...options }
return this._makeRequest(payload, AIRequestType.COMPLETION)
@@ -149,7 +145,7 @@ export class RemoteInferencer implements ICompletions {
async error_explaining(prompt, options:IParams=GenerationParams): Promise {
const payload = { prompt, "endpoint":"error_explaining", ...options }
- if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload , AIRequestType.GENERAL)
+ if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL)
else return this._makeRequest(payload, AIRequestType.GENERAL)
}
}
diff --git a/libs/remix-api/src/lib/plugins/remixai-api.ts b/libs/remix-api/src/lib/plugins/remixai-api.ts
index cb32e2a61c3..0ea1498151a 100644
--- a/libs/remix-api/src/lib/plugins/remixai-api.ts
+++ b/libs/remix-api/src/lib/plugins/remixai-api.ts
@@ -5,9 +5,11 @@ export interface IRemixAI {
events: {
onStreamResult(streamText: string): Promise,
activated(): Promise,
+ onInference():void,
+ onInferenceDone():void,
} & StatusEvents,
methods: {
- code_completion(context: string): Promise
+ code_completion(context: string): Promise
code_insertion(msg_pfx: string, msg_sfx: string): Promise,
code_generation(prompt: string): Promise,
code_explaining(code: string, context?: string): Promise,
diff --git a/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts b/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts
index dc9deb7e82d..38127699ce1 100644
--- a/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts
+++ b/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts
@@ -10,7 +10,7 @@ export interface IRemixAID {
} & StatusEvents,
methods: {
- code_completion(context: string): Promise
+ code_completion(context: string): Promise
code_insertion(msg_pfx: string, msg_sfx: string): Promise,
code_generation(prompt: string): Promise,
code_explaining(code: string, context?: string): Promise,
diff --git a/libs/remix-api/src/lib/remix-api.ts b/libs/remix-api/src/lib/remix-api.ts
index fa052bdd034..12e29f62f5e 100644
--- a/libs/remix-api/src/lib/remix-api.ts
+++ b/libs/remix-api/src/lib/remix-api.ts
@@ -16,7 +16,6 @@ import { IMatomoApi } from "./plugins/matomo-api"
import { IRemixAI } from "./plugins/remixai-api"
import { IRemixAID } from "./plugins/remixAIDesktop-api"
-
export interface ICustomRemixApi extends IRemixApi {
dgitApi: IGitApi
config: IConfigApi
diff --git a/libs/remix-ui/editor/src/lib/remix-ui-editor.tsx b/libs/remix-ui/editor/src/lib/remix-ui-editor.tsx
index f7c0fe97784..ab8de7ec347 100644
--- a/libs/remix-ui/editor/src/lib/remix-ui-editor.tsx
+++ b/libs/remix-ui/editor/src/lib/remix-ui-editor.tsx
@@ -833,7 +833,6 @@ export const EditorUI = (props: EditorUIProps) => {
const file = await props.plugin.call('fileManager', 'getCurrentFile')
const context = await props.plugin.call('fileManager', 'readFile', file)
const message = intl.formatMessage({ id: 'editor.explainFunctionByAI' }, { content:context, currentFunction: currentFunction.current })
- // await props.plugin.call('remixAI', 'code_explaining', message, context)
await props.plugin.call('remixAI' as any, 'chatPipe', 'code_explaining', message, context)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explainFunction'])
},
@@ -856,7 +855,6 @@ export const EditorUI = (props: EditorUIProps) => {
const pipeMessage = intl.formatMessage({ id: 'editor.ExplainPipeMessage' }, { content:selectedCode })
await props.plugin.call('remixAI' as any, 'chatPipe', 'code_explaining', selectedCode, content, pipeMessage)
- // await props.plugin.call('remixAI', 'code_explaining', selectedCode, content)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explainFunction'])
},
}
diff --git a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
index 0d50023776e..784af476abe 100644
--- a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
+++ b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
@@ -1,18 +1,13 @@
-import React, { useContext, useEffect, useState, useCallback} from 'react'
+import React from 'react'
import '../remix-ai.css'
import { DefaultModels, GenerationParams, ChatHistory, HandleStreamResponse, HandleSimpleResponse } from '@remix/remix-ai-core';
import { ConversationStarter, StreamSend, StreamingAdapterObserver, useAiChatApi } from '@nlux/react';
-import axios from 'axios';
-import { AiChat, useAsStreamAdapter, ChatItem, AiChatUI} from '@nlux/react';
+import { AiChat, useAsStreamAdapter, ChatItem } from '@nlux/react';
import { JsonStreamParser } from '@remix/remix-ai-core';
import { user, assistantAvatar } from './personas';
-import {highlighter} from '@nlux/highlighter'
+import { highlighter } from '@nlux/highlighter'
import './color.css'
import '@nlux/themes/unstyled.css';
-// import '@nlux/themes'
-import { result } from 'lodash';
-
-const demoProxyServerUrl = 'https://solcoder.remixproject.org';
export let ChatApi = null
@@ -27,28 +22,27 @@ export const Default = (props) => {
let response = null
if (await props.plugin.call('remixAI', 'isChatRequestPending')){
response = await props.plugin.call('remixAI', 'ProcessChatRequestBuffer', GenerationParams);
- }
- else{
+ } else {
response = await props.plugin.call('remixAI', 'solidity_answer', prompt, GenerationParams);
}
- if (GenerationParams.return_stream_response) HandleStreamResponse(response,
+ if (GenerationParams.return_stream_response) HandleStreamResponse(response,
(text) => {observer.next(text)},
- (result) => {
+ (result) => {
ChatHistory.pushHistory(prompt, result)
observer.complete()
}
)
- else{
- observer.next(response)
- observer.complete()
+ else {
+ observer.next(response)
+ observer.complete()
}
};
ChatApi = useAiChatApi();
const conversationStarters: ConversationStarter[] = [
- {prompt: 'Explain briefly the current file in Editor', icon: ⭐️},
- {prompt: 'Explain what is a solidity contract!'}]
+ { prompt: 'Explain briefly the current file in Editor', icon: ⭐️ },
+ { prompt: 'Explain what is a solidity contract!' }]
// Define initial messages
const initialMessages: ChatItem[] = [
@@ -70,7 +64,6 @@ export const Default = (props) => {
avatar: assistantAvatar
},
user
-
}}
//initialConversation={initialMessages}
conversationOptions={{ layout: 'bubbles', conversationStarters }}
diff --git a/libs/remix-ui/remix-ai/src/lib/components/color.css b/libs/remix-ui/remix-ai/src/lib/components/color.css
index 67f5b6ab81a..e8f83422bbf 100644
--- a/libs/remix-ui/remix-ai/src/lib/components/color.css
+++ b/libs/remix-ui/remix-ai/src/lib/components/color.css
@@ -73,6 +73,7 @@
--nlux-CodeBlock--Padding: 20px;
--nlux-CodeBlock--TextColor: var(--text);
--nlux-CodeBlock--FontSize: 14px;
+ --nlux-cvStrt--wd: 180px
/* Conversation starter colors */
--nlux-ConversationStarter--BackgroundColor: var(--light);
@@ -83,12 +84,6 @@
');
/* Override icon for the send button */
- --nlux-send-icon: url('data:image/svg+xml,\
- \
- ');
+ --nlux-send-icon: url('data:image/svg+xml, ');
}
diff --git a/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx b/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx
index ed5c779f296..bfa136f185f 100644
--- a/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx
+++ b/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx
@@ -276,7 +276,6 @@ export const TabsUI = (props: TabsUIProps) => {
await props.plugin.call('remixAI', 'chatPipe', 'code_explaining', content)
}, 500)
}
- // await props.plugin.call('remixAI', 'code_explaining', content)
setExplaining(false)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explain_file'])
}
From 5b69872e8ed7735a56742952c45fe8d7114963a1 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Tue, 15 Oct 2024 14:27:50 +0200
Subject: [PATCH 12/27] fixed lint
---
libs/remix-ai-core/src/agents/codeExplainAgent.ts | 3 +--
libs/remix-ai-core/src/prompts/promptBuilder.ts | 2 --
libs/remix-ai-core/src/types/types.ts | 2 ++
libs/remix-ui/remix-ai/src/lib/components/color.css | 2 +-
4 files changed, 4 insertions(+), 5 deletions(-)
diff --git a/libs/remix-ai-core/src/agents/codeExplainAgent.ts b/libs/remix-ai-core/src/agents/codeExplainAgent.ts
index 176de4bbba9..83e6adf9142 100644
--- a/libs/remix-ai-core/src/agents/codeExplainAgent.ts
+++ b/libs/remix-ai-core/src/agents/codeExplainAgent.ts
@@ -8,7 +8,6 @@ export class CodeExplainAgent {
constructor(props) {
this.plugin = props
-
// git or fs
const codebase = this.loadCodebase("codebasePath");
}
@@ -23,7 +22,7 @@ export class CodeExplainAgent {
async chatCommand(prompt:string){
// change this function with indexer or related
- try{
+ try {
if (prompt.includes('Explain briefly the current file')){
const file = await this.plugin.call('fileManager', 'getCurrentFile')
const content = `Explain this code:\n ${await this.plugin.call('fileManager', 'readFile', file)}`
diff --git a/libs/remix-ai-core/src/prompts/promptBuilder.ts b/libs/remix-ai-core/src/prompts/promptBuilder.ts
index 87e01524201..27c1c3705d2 100644
--- a/libs/remix-ai-core/src/prompts/promptBuilder.ts
+++ b/libs/remix-ai-core/src/prompts/promptBuilder.ts
@@ -24,8 +24,6 @@ export const buildSolgptPromt = (userPrompt:string, modelOP:RemoteBackendOPModel
// remove sol-gpt or gpt from the start of the prompt
const parsedPrompt = userPrompt.replace(/^sol-gpt|^gpt/gm, '')
-
- // finally add the new prompt to the end of the history
newPrompt = "sol-gpt " + newPrompt + PromptBuilder(parsedPrompt, "", modelOP)
return newPrompt
}
diff --git a/libs/remix-ai-core/src/types/types.ts b/libs/remix-ai-core/src/types/types.ts
index 14c5af2cc32..0454798ce54 100644
--- a/libs/remix-ai-core/src/types/types.ts
+++ b/libs/remix-ai-core/src/types/types.ts
@@ -98,6 +98,7 @@ export class JsonStreamParser {
this.buffer += chunk;
const results = [];
+ // eslint-disable-next-line no-constant-condition
while (true) {
try {
const result = JSON.parse(this.buffer);
@@ -105,6 +106,7 @@ export class JsonStreamParser {
this.buffer = '';
break;
} catch (error) {
+ // eslint-disable-next-line no-useless-escape
const match = /^([^\{]*\{[^\}]*\})(.*)/.exec(this.buffer);
if (match) {
try {
diff --git a/libs/remix-ui/remix-ai/src/lib/components/color.css b/libs/remix-ui/remix-ai/src/lib/components/color.css
index e8f83422bbf..836a05f1034 100644
--- a/libs/remix-ui/remix-ai/src/lib/components/color.css
+++ b/libs/remix-ui/remix-ai/src/lib/components/color.css
@@ -73,7 +73,7 @@
--nlux-CodeBlock--Padding: 20px;
--nlux-CodeBlock--TextColor: var(--text);
--nlux-CodeBlock--FontSize: 14px;
- --nlux-cvStrt--wd: 180px
+ --nlux-cvStrt--wd: var(--nlux-ConversationStarter--Width, 100px);
/* Conversation starter colors */
--nlux-ConversationStarter--BackgroundColor: var(--light);
From b6eeacfb708972fcee880f5f85468ba289948f59 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Wed, 16 Oct 2024 14:16:32 +0200
Subject: [PATCH 13/27] enforced insertion on completion
---
.../src/app/plugins/remixAIPlugin.tsx | 4 +--
.../src/inferencers/remote/remoteInference.ts | 8 ++---
.../src/types/remix-project.code-workspace | 7 +++++
.../lib/providers/inlineCompletionProvider.ts | 31 +++++++------------
4 files changed, 22 insertions(+), 28 deletions(-)
create mode 100644 libs/remix-ai-core/src/types/remix-project.code-workspace
diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
index f16ffacebde..aad61e2a0f5 100644
--- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
+++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
@@ -112,11 +112,11 @@ export class RemixAIPlugin extends ViewPlugin {
}
}
- async code_completion(prompt: string): Promise {
+ async code_completion(prompt: string, promptAfter: string): Promise {
if (this.isOnDesktop) {
return await this.call(this.remixDesktopPluginName, 'code_completion', prompt)
} else {
- return await this.remoteInferencer.code_completion(prompt)
+ return await this.remoteInferencer.code_completion(prompt, promptAfter)
}
}
diff --git a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
index 3bbfc173af6..f618133a0f1 100644
--- a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
+++ b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
@@ -110,19 +110,17 @@ export class RemoteInferencer implements ICompletions {
}
}
- async code_completion(prompt, options:IParams=CompletionParams): Promise {
- const payload = { prompt, "endpoint":"code_completion", ...options }
+ async code_completion(prompt, promptAfter, options:IParams=CompletionParams): Promise {
+ const payload = { prompt, 'context':promptAfter, "endpoint":"code_completion", ...options }
return this._makeRequest(payload, AIRequestType.COMPLETION)
}
async code_insertion(msg_pfx, msg_sfx, options:IParams=InsertionParams): Promise {
- // const payload = { "data":[msg_pfx, "code_insertion", msg_sfx, 1024, 0.5, 0.92, 50]}
const payload = { "endpoint":"code_insertion", msg_pfx, msg_sfx, ...options, prompt: '' }
return this._makeRequest(payload, AIRequestType.COMPLETION)
}
async code_generation(prompt, options:IParams=GenerationParams): Promise {
- // const payload = { "data":[prompt, "code_completion", "", false,1000,0.9,0.92,50]}
const payload = { prompt, "endpoint":"code_completion", ...options }
if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
else return this._makeRequest(payload, AIRequestType.COMPLETION)
@@ -130,14 +128,12 @@ export class RemoteInferencer implements ICompletions {
async solidity_answer(prompt, options:IParams=GenerationParams): Promise {
const main_prompt = buildSolgptPromt(prompt, this.model_op)
- // const payload = { "data":[main_prompt, "solidity_answer", false,2000,0.9,0.8,50]}
const payload = { 'prompt': main_prompt, "endpoint":"solidity_answer", ...options }
if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL)
else return this._makeRequest(payload, AIRequestType.GENERAL)
}
async code_explaining(prompt, context:string="", options:IParams=GenerationParams): Promise {
- // const payload = { "data":[prompt, "code_explaining", false,2000,0.9,0.8,50, context]}
const payload = { prompt, "endpoint":"code_explaining", context, ...options }
if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL)
else return this._makeRequest(payload, AIRequestType.GENERAL)
diff --git a/libs/remix-ai-core/src/types/remix-project.code-workspace b/libs/remix-ai-core/src/types/remix-project.code-workspace
new file mode 100644
index 00000000000..596ef7a482b
--- /dev/null
+++ b/libs/remix-ai-core/src/types/remix-project.code-workspace
@@ -0,0 +1,7 @@
+{
+ "folders": [
+ {
+ "path": "../../../.."
+ }
+ ]
+}
\ No newline at end of file
diff --git a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
index 4a792bad4bb..51790a8b029 100644
--- a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
+++ b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
@@ -1,6 +1,8 @@
/* eslint-disable no-control-regex */
import { EditorUIProps, monacoTypes } from '@remix-ui/editor';
import { JsonStreamParser } from '@remix/remix-ai-core';
+import * as monaco from 'monaco-editor';
+
const _paq = (window._paq = window._paq || [])
export class RemixInLineCompletionProvider implements monacoTypes.languages.InlineCompletionsProvider {
@@ -26,9 +28,8 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
}
async provideInlineCompletions(model: monacoTypes.editor.ITextModel, position: monacoTypes.Position, context: monacoTypes.languages.InlineCompletionContext, token: monacoTypes.CancellationToken): Promise> {
- if (context.selectedSuggestionInfo) {
- return { items: []};
- }
+ const isActivate = await await this.props.plugin.call('settings', 'get', 'settings/copilot/suggest/activate')
+ if (!isActivate) return
const currentTime = Date.now();
const timeSinceLastRequest = currentTime - this.lastRequestTime;
@@ -65,13 +66,6 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
return;
}
- try {
- const isActivate = await await this.props.plugin.call('settings', 'get', 'settings/copilot/suggest/activate')
- if (!isActivate) return
- } catch (err) {
- return;
- }
-
try {
const split = word.split('\n')
if (split.length < 2) return
@@ -109,11 +103,6 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
return { items: []}; // do not do completion on single and multiline comment
}
- // abort if there is a signal
- if (token.isCancellationRequested) {
- return
- }
-
if (word.replace(/ +$/, '').endsWith('\n')){
// Code insertion
try {
@@ -122,7 +111,8 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
this.task = 'code_insertion'
const item: monacoTypes.languages.InlineCompletion = {
- insertText: generatedText
+ insertText: generatedText,
+ range: new monaco.Range(position.lineNumber, position.column, position.lineNumber, position.column)
};
this.currentCompletion.text = generatedText
this.currentCompletion.item = item
@@ -141,25 +131,26 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
try {
// Code completion
this.task = 'code_completion'
- const output = await this.props.plugin.call('remixAI', 'code_completion', word)
+ const output = await this.props.plugin.call('remixAI', 'code_completion', word, word_after)
const generatedText = output
let clean = generatedText
if (generatedText.indexOf('@custom:dev-run-script./') !== -1) {
clean = generatedText.replace('@custom:dev-run-script', '@custom:dev-run-script ')
}
- clean = clean.replace(word, '').trimStart()
+ clean = clean.replace(word, '')
clean = this.process_completion(clean)
const item: monacoTypes.languages.InlineCompletion = {
insertText: clean,
+ range: new monaco.Range(position.lineNumber, position.column, position.lineNumber, position.column)
};
this.currentCompletion.text = clean
this.currentCompletion.item = item
return {
items: [item],
- enableForwardStability: true
+ enableForwardStability: false
}
} catch (err) {
return
@@ -174,7 +165,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
return ""
}
// remove comment inline
- clean = clean.split('//')[0].trimEnd()
+ clean = clean.split('//')[0]
return clean
}
From 917f5629531ed28f649c726db1ab5519419ae49a Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Wed, 16 Oct 2024 14:59:52 +0200
Subject: [PATCH 14/27] minor
---
.../editor/src/lib/providers/inlineCompletionProvider.ts | 2 ++
1 file changed, 2 insertions(+)
diff --git a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
index 51790a8b029..d806249575f 100644
--- a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
+++ b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
@@ -110,6 +110,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
const generatedText = output // no need to clean it. should already be
this.task = 'code_insertion'
+ _paq.push(['trackEvent', 'ai', 'remixAI', this.task])
const item: monacoTypes.languages.InlineCompletion = {
insertText: generatedText,
range: new monaco.Range(position.lineNumber, position.column, position.lineNumber, position.column)
@@ -132,6 +133,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
// Code completion
this.task = 'code_completion'
const output = await this.props.plugin.call('remixAI', 'code_completion', word, word_after)
+ _paq.push(['trackEvent', 'ai', 'remixAI', this.task])
const generatedText = output
let clean = generatedText
From 9d782ea085eeb46008fb11dec320ecbf0015e835 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Thu, 17 Oct 2024 09:38:46 +0200
Subject: [PATCH 15/27] minor
---
apps/remix-ide/src/app/plugins/remixAIPlugin.tsx | 2 +-
apps/remixdesktop/src/lib/InferenceServerManager.ts | 4 ++--
apps/remixdesktop/src/plugins/remixAIDektop.ts | 4 ++--
3 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
index aad61e2a0f5..5461e6d93f9 100644
--- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
+++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
@@ -114,7 +114,7 @@ export class RemixAIPlugin extends ViewPlugin {
async code_completion(prompt: string, promptAfter: string): Promise {
if (this.isOnDesktop) {
- return await this.call(this.remixDesktopPluginName, 'code_completion', prompt)
+ return await this.call(this.remixDesktopPluginName, 'code_completion', prompt, promptAfter)
} else {
return await this.remoteInferencer.code_completion(prompt, promptAfter)
}
diff --git a/apps/remixdesktop/src/lib/InferenceServerManager.ts b/apps/remixdesktop/src/lib/InferenceServerManager.ts
index 741295cba85..fb180a26d68 100644
--- a/apps/remixdesktop/src/lib/InferenceServerManager.ts
+++ b/apps/remixdesktop/src/lib/InferenceServerManager.ts
@@ -453,14 +453,14 @@ export class InferenceManager implements ICompletions {
}
}
- async code_completion(context: any, params:IParams=CompletionParams): Promise {
+ async code_completion(prompt, promptAfter, params:IParams=CompletionParams): Promise {
if (!this.isReady) {
console.log('model not ready yet')
return
}
// as of now no prompt required
- const payload = { context_code: context, ...params }
+ const payload = { prompt, 'context':promptAfter, ...params }
return this._makeInferenceRequest('code_completion', payload, AIRequestType.COMPLETION)
}
diff --git a/apps/remixdesktop/src/plugins/remixAIDektop.ts b/apps/remixdesktop/src/plugins/remixAIDektop.ts
index 30e58a8e4e9..8497874cfe6 100644
--- a/apps/remixdesktop/src/plugins/remixAIDektop.ts
+++ b/apps/remixdesktop/src/plugins/remixAIDektop.ts
@@ -81,9 +81,9 @@ class RemixAIDesktopPluginClient extends ElectronBasePluginClient {
return true
}
- async code_completion(context: any) {
+ async code_completion(prompt: string, promptAfter: string) {
// use general purpose model
- return this.desktopInferencer.code_completion(context)
+ return this.desktopInferencer.code_completion(prompt, promptAfter)
}
async code_insertion(msg_pfx: string, msg_sfx: string) {
From bfa5eda3a33ab36bc3dab112f0ba134a480f8629 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Thu, 17 Oct 2024 12:54:39 +0200
Subject: [PATCH 16/27] enable forward stability
---
.../editor/src/lib/providers/inlineCompletionProvider.ts | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
index d806249575f..fe1fc15dc48 100644
--- a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
+++ b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
@@ -86,7 +86,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
this.currentCompletion.item = item
return {
items: [item],
- enableForwardStability: false
+ enableForwardStability: true
}
}
} catch (e) {
@@ -120,7 +120,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
return {
items: [item],
- enableForwardStability: false,
+ enableForwardStability: true,
}
}
catch (err){
@@ -152,7 +152,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
return {
items: [item],
- enableForwardStability: false
+ enableForwardStability: true,
}
} catch (err) {
return
From 707699bc231d7c9abdba8c6659ba1ee9d3f9d7c1 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Fri, 18 Oct 2024 12:45:14 +0200
Subject: [PATCH 17/27] fixed streaming error leaving out some text in chat UI
---
.../src/helpers/streamHandler.ts | 2 +-
.../src/types/remix-project.code-workspace | 3 ++
libs/remix-ai-core/src/types/types.ts | 45 ++++++++++---------
.../remix-ai/src/lib/components/Default.tsx | 1 +
4 files changed, 30 insertions(+), 21 deletions(-)
diff --git a/libs/remix-ai-core/src/helpers/streamHandler.ts b/libs/remix-ai-core/src/helpers/streamHandler.ts
index af094fe564a..a42db7b6458 100644
--- a/libs/remix-ai-core/src/helpers/streamHandler.ts
+++ b/libs/remix-ai-core/src/helpers/streamHandler.ts
@@ -19,7 +19,7 @@ export const HandleSimpleResponse = async (response,
}
export const HandleStreamResponse = async (streamResponse,
- cb?: (streamText: string) => void,
+ cb: (streamText: string) => void,
done_cb?: (result: string) => void) => {
try {
let resultText = ''
diff --git a/libs/remix-ai-core/src/types/remix-project.code-workspace b/libs/remix-ai-core/src/types/remix-project.code-workspace
index 596ef7a482b..01fe49386a3 100644
--- a/libs/remix-ai-core/src/types/remix-project.code-workspace
+++ b/libs/remix-ai-core/src/types/remix-project.code-workspace
@@ -2,6 +2,9 @@
"folders": [
{
"path": "../../../.."
+ },
+ {
+ "path": "../../../../../remix-wildcard"
}
]
}
\ No newline at end of file
diff --git a/libs/remix-ai-core/src/types/types.ts b/libs/remix-ai-core/src/types/types.ts
index 0454798ce54..b170fc62598 100644
--- a/libs/remix-ai-core/src/types/types.ts
+++ b/libs/remix-ai-core/src/types/types.ts
@@ -88,6 +88,10 @@ export enum RemoteBackendOPModel{
MISTRAL
}
+interface GeneratedTextObject {
+ generatedText: string;
+ isGenerating: boolean;
+}
export class JsonStreamParser {
buffer: string
constructor() {
@@ -97,33 +101,34 @@ export class JsonStreamParser {
safeJsonParse(chunk: string): T[] | null {
this.buffer += chunk;
const results = [];
+ let startIndex = 0;
+ let endIndex: number;
+ while ((endIndex = this.buffer.indexOf('}', startIndex)) !== -1) {
+ // check if next character is a opening curly bracket
+ let modifiedEndIndex = endIndex;
+ if ((modifiedEndIndex = this.buffer.indexOf('{', endIndex)) !== -1 ) {
+ endIndex = modifiedEndIndex - 1;
+ }
- // eslint-disable-next-line no-constant-condition
- while (true) {
+ if (((modifiedEndIndex = this.buffer.indexOf('{', endIndex)) === -1) &&
+ (this.buffer.indexOf('}', endIndex) < this.buffer.length)) {
+ endIndex = this.buffer.indexOf('}', endIndex+1) <0 ? this.buffer.length - 1 : this.buffer.indexOf('}', endIndex+1);
+ }
+
+ const jsonStr = this.buffer.slice(startIndex, endIndex + 1);
try {
- const result = JSON.parse(this.buffer);
- results.push(result);
- this.buffer = '';
- break;
+ const obj: GeneratedTextObject = JSON.parse(jsonStr);
+ console.log('parsed:', obj);
+ results.push(obj);
} catch (error) {
- // eslint-disable-next-line no-useless-escape
- const match = /^([^\{]*\{[^\}]*\})(.*)/.exec(this.buffer);
- if (match) {
- try {
- const result = JSON.parse(match[1]);
- results.push(result);
- this.buffer = match[2];
- } catch (e) {
- break;
- }
- } else {
- break;
- }
+ console.error('Error parsing JSON:', error);
}
+ startIndex = endIndex + 1;
}
-
+ this.buffer = this.buffer.slice(startIndex);
return results;
}
+
safeJsonParseSingle(chunk: string): T[] | null {
return JSON.parse(this.buffer);
}
diff --git a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
index 784af476abe..b2555f18021 100644
--- a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
+++ b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
@@ -29,6 +29,7 @@ export const Default = (props) => {
if (GenerationParams.return_stream_response) HandleStreamResponse(response,
(text) => {observer.next(text)},
(result) => {
+ observer.next(' ') // Add a space to flush
ChatHistory.pushHistory(prompt, result)
observer.complete()
}
From b237be846066032bf2f4561cd26300629a08b7df Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Fri, 18 Oct 2024 12:53:13 +0200
Subject: [PATCH 18/27] matomo back in
---
.../editor/src/lib/providers/inlineCompletionProvider.ts | 5 ++++-
libs/remix-ui/remix-ai/src/lib/components/Default.tsx | 2 +-
2 files changed, 5 insertions(+), 2 deletions(-)
diff --git a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
index fe1fc15dc48..401ed47f790 100644
--- a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
+++ b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
@@ -75,6 +75,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
this.props.plugin.call('terminal', 'log', { type: 'aitypewriterwarning', value: 'RemixAI - generating code for following comment: ' + ask.replace('///', '') })
const data = await this.props.plugin.call('remixAI', 'code_insertion', word, word_after)
+ _paq.push(['trackEvent', 'ai', 'remixAI', 'code_generation'])
this.task = 'code_generation'
console.log("data: " + this.task, data)
@@ -107,6 +108,8 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
// Code insertion
try {
const output = await this.props.plugin.call('remixAI', 'code_insertion', word, word_after)
+ _paq.push(['trackEvent', 'ai', 'remixAI', 'code_insertion'])
+
const generatedText = output // no need to clean it. should already be
this.task = 'code_insertion'
@@ -133,7 +136,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
// Code completion
this.task = 'code_completion'
const output = await this.props.plugin.call('remixAI', 'code_completion', word, word_after)
- _paq.push(['trackEvent', 'ai', 'remixAI', this.task])
+ _paq.push(['trackEvent', 'ai', 'remixAI', 'code_completion'])
const generatedText = output
let clean = generatedText
diff --git a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
index b2555f18021..459797294c9 100644
--- a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
+++ b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx
@@ -29,7 +29,7 @@ export const Default = (props) => {
if (GenerationParams.return_stream_response) HandleStreamResponse(response,
(text) => {observer.next(text)},
(result) => {
- observer.next(' ') // Add a space to flush
+ observer.next(' ') // Add a space to flush the last message
ChatHistory.pushHistory(prompt, result)
observer.complete()
}
From e89e938d6385e2fbbedce55fcf533f6e47090218 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Mon, 21 Oct 2024 10:42:46 +0200
Subject: [PATCH 19/27] off rexix AI when starting
---
apps/remix-ide/src/app/plugins/remixAIPlugin.tsx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
index 5461e6d93f9..493f053b564 100644
--- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
+++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
@@ -54,7 +54,7 @@ export class RemixAIPlugin extends ViewPlugin {
console.log('Activating RemixAIPlugin on browser')
this.initialize()
}
- this.setRemixAIOnSidePannel(false)
+ // this.setRemixAIOnSidePannel(false)
}
setRemixAIOnSidePannel(resize:boolean=false){
From dd061e587fd457fd5bf62ba423b27c8097d0c91f Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Tue, 22 Oct 2024 09:50:44 +0200
Subject: [PATCH 20/27] minor
---
libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts b/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts
index 38127699ce1..dc9deb7e82d 100644
--- a/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts
+++ b/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts
@@ -10,7 +10,7 @@ export interface IRemixAID {
} & StatusEvents,
methods: {
- code_completion(context: string): Promise
+ code_completion(context: string): Promise
code_insertion(msg_pfx: string, msg_sfx: string): Promise,
code_generation(prompt: string): Promise,
code_explaining(code: string, context?: string): Promise,
From 2d4fbb5330256ace41b4155fefa47a383da1dc25 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Tue, 22 Oct 2024 15:18:09 +0200
Subject: [PATCH 21/27] enable remixai remote on desktop
---
apps/remixdesktop/src/plugins/remixAIDektop.ts | 1 +
1 file changed, 1 insertion(+)
diff --git a/apps/remixdesktop/src/plugins/remixAIDektop.ts b/apps/remixdesktop/src/plugins/remixAIDektop.ts
index 8497874cfe6..6382e28bba9 100644
--- a/apps/remixdesktop/src/plugins/remixAIDektop.ts
+++ b/apps/remixdesktop/src/plugins/remixAIDektop.ts
@@ -45,6 +45,7 @@ class RemixAIDesktopPluginClient extends ElectronBasePluginClient {
async onActivation(): Promise {
this.onload(() => {
+ this.emit('activated')
})
}
From 8968ce6b78efa055a70fce33ce6de458c07d537a Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Wed, 23 Oct 2024 10:56:23 +0200
Subject: [PATCH 22/27] remixai remote inference working on desktop
---
apps/remix-ide/src/app.js | 2 +-
.../src/app/plugins/remixAIPlugin.tsx | 24 ++++++++++---------
libs/remix-ai-core/src/types/types.ts | 1 -
3 files changed, 14 insertions(+), 13 deletions(-)
diff --git a/apps/remix-ide/src/app.js b/apps/remix-ide/src/app.js
index 91b2a42f337..796d354c5c6 100644
--- a/apps/remix-ide/src/app.js
+++ b/apps/remix-ide/src/app.js
@@ -557,7 +557,7 @@ class AppComponent {
await this.appManager.activatePlugin(['solidity-script', 'remix-templates'])
if (isElectron()) {
- await this.appManager.activatePlugin(['isogit', 'electronconfig', 'electronTemplates', 'xterm', 'ripgrep', 'appUpdater', 'slither', 'foundry', 'hardhat', 'remixAID'])
+ await this.appManager.activatePlugin(['isogit', 'electronconfig', 'electronTemplates', 'xterm', 'ripgrep', 'appUpdater', 'slither', 'foundry', 'hardhat']) // 'remixAID'
}
this.appManager.on(
diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
index 493f053b564..f6005ee3b3d 100644
--- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
+++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
@@ -36,6 +36,7 @@ export class RemixAIPlugin extends ViewPlugin {
isInferencing: boolean = false
chatRequestBuffer: chatRequestBufferT = null
agent: CodeExplainAgent
+ useRemoteInferencer:boolean = false
constructor(inDesktop:boolean) {
super(profile)
@@ -47,11 +48,13 @@ export class RemixAIPlugin extends ViewPlugin {
onActivation(): void {
if (this.isOnDesktop) {
console.log('Activating RemixAIPlugin on desktop')
- this.on(this.remixDesktopPluginName, 'activated', () => {
- this.call("remixAI", 'initialize', null, null, null, false);
- })
+ // this.on(this.remixDesktopPluginName, 'activated', () => {
+ this.useRemoteInferencer = true
+ this.initialize(null, null, null, this.useRemoteInferencer);
+ // })
} else {
console.log('Activating RemixAIPlugin on browser')
+ this.useRemoteInferencer = true
this.initialize()
}
// this.setRemixAIOnSidePannel(false)
@@ -67,9 +70,8 @@ export class RemixAIPlugin extends ViewPlugin {
}
async initialize(model1?:IModel, model2?:IModel, remoteModel?:IRemoteModel, useRemote?:boolean){
- if (this.isOnDesktop) {
+ if (this.isOnDesktop && !this.useRemoteInferencer) {
// on desktop use remote inferencer -> false
- console.log('initialize on desktop')
const res = await this.call(this.remixDesktopPluginName, 'initializeModelBackend', useRemote, model1, model2)
if (res) {
this.on(this.remixDesktopPluginName, 'onStreamResult', (value) => {
@@ -105,7 +107,7 @@ export class RemixAIPlugin extends ViewPlugin {
return
}
- if (this.isOnDesktop) {
+ if (this.isOnDesktop && !this.useRemoteInferencer) {
return await this.call(this.remixDesktopPluginName, 'code_generation', prompt)
} else {
return await this.remoteInferencer.code_generation(prompt)
@@ -113,7 +115,7 @@ export class RemixAIPlugin extends ViewPlugin {
}
async code_completion(prompt: string, promptAfter: string): Promise {
- if (this.isOnDesktop) {
+ if (this.isOnDesktop && !this.useRemoteInferencer) {
return await this.call(this.remixDesktopPluginName, 'code_completion', prompt, promptAfter)
} else {
return await this.remoteInferencer.code_completion(prompt, promptAfter)
@@ -128,7 +130,7 @@ export class RemixAIPlugin extends ViewPlugin {
const newPrompt = await this.agent.chatCommand(prompt)
let result
- if (this.isOnDesktop) {
+ if (this.isOnDesktop && !this.useRemoteInferencer) {
result = await this.call(this.remixDesktopPluginName, 'solidity_answer', newPrompt)
} else {
result = await this.remoteInferencer.solidity_answer(newPrompt)
@@ -144,7 +146,7 @@ export class RemixAIPlugin extends ViewPlugin {
}
let result
- if (this.isOnDesktop) {
+ if (this.isOnDesktop && !this.useRemoteInferencer) {
result = await this.call(this.remixDesktopPluginName, 'code_explaining', prompt, context, params)
} else {
@@ -161,7 +163,7 @@ export class RemixAIPlugin extends ViewPlugin {
}
let result
- if (this.isOnDesktop) {
+ if (this.isOnDesktop && !this.useRemoteInferencer) {
result = await this.call(this.remixDesktopPluginName, 'error_explaining', prompt)
} else {
result = await this.remoteInferencer.error_explaining(prompt, params)
@@ -171,7 +173,7 @@ export class RemixAIPlugin extends ViewPlugin {
}
async code_insertion(msg_pfx: string, msg_sfx: string): Promise {
- if (this.isOnDesktop) {
+ if (this.isOnDesktop && !this.useRemoteInferencer) {
return await this.call(this.remixDesktopPluginName, 'code_insertion', msg_pfx, msg_sfx)
} else {
return await this.remoteInferencer.code_insertion(msg_pfx, msg_sfx)
diff --git a/libs/remix-ai-core/src/types/types.ts b/libs/remix-ai-core/src/types/types.ts
index b170fc62598..a6d2c9eb885 100644
--- a/libs/remix-ai-core/src/types/types.ts
+++ b/libs/remix-ai-core/src/types/types.ts
@@ -118,7 +118,6 @@ export class JsonStreamParser {
const jsonStr = this.buffer.slice(startIndex, endIndex + 1);
try {
const obj: GeneratedTextObject = JSON.parse(jsonStr);
- console.log('parsed:', obj);
results.push(obj);
} catch (error) {
console.error('Error parsing JSON:', error);
From 51c4fbd2e83620629028e438f51b13985a297346 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Thu, 24 Oct 2024 10:30:06 +0200
Subject: [PATCH 23/27] improving completion process
---
.../lib/providers/inlineCompletionProvider.ts | 18 +++++++++++-------
1 file changed, 11 insertions(+), 7 deletions(-)
diff --git a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
index 401ed47f790..37a396d05fd 100644
--- a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
+++ b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
@@ -135,7 +135,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
try {
// Code completion
this.task = 'code_completion'
- const output = await this.props.plugin.call('remixAI', 'code_completion', word, word_after)
+ const output = await this.props.plugin.call('remixAI', 'code_completion', word, word_after.trimStart())
_paq.push(['trackEvent', 'ai', 'remixAI', 'code_completion'])
const generatedText = output
let clean = generatedText
@@ -144,7 +144,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
clean = generatedText.replace('@custom:dev-run-script', '@custom:dev-run-script ')
}
clean = clean.replace(word, '')
- clean = this.process_completion(clean)
+ clean = this.process_completion(clean, word_after)
const item: monacoTypes.languages.InlineCompletion = {
insertText: clean,
@@ -162,15 +162,19 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
}
}
- process_completion(data: any) {
- let clean = data.split('\n')[0].startsWith('\n') ? [data.split('\n')[0], data.split('\n')[1]].join('\n'): data.split('\n')[0]
-
+ process_completion(data: any, word_after: any) {
+ let clean = data
// if clean starts with a comment, remove it
if (clean.startsWith('//') || clean.startsWith('/*') || clean.startsWith('*') || clean.startsWith('*/')){
+ console.log("clean starts with comment")
return ""
}
- // remove comment inline
- clean = clean.split('//')[0]
+
+ const text_after = word_after.split('\n')[0].trim()
+ // if clean contains the content of text_after, remove it
+ if (clean.includes(text_after)){
+ clean = clean.replace(text_after, '')
+ }
return clean
}
From f9646717769380c305066dd69cc3cdccc9cedc18 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Thu, 24 Oct 2024 11:41:52 +0200
Subject: [PATCH 24/27] improving completion process
---
apps/remix-ide/src/assets/list.json | 16 ++++++++++++++--
.../lib/providers/inlineCompletionProvider.ts | 6 +++---
2 files changed, 17 insertions(+), 5 deletions(-)
diff --git a/apps/remix-ide/src/assets/list.json b/apps/remix-ide/src/assets/list.json
index 82f8fe17bbf..6a47e646127 100644
--- a/apps/remix-ide/src/assets/list.json
+++ b/apps/remix-ide/src/assets/list.json
@@ -1033,9 +1033,21 @@
"urls": [
"dweb:/ipfs/QmVTALD1WUQwRvEL19jgwrEFyBJMQmy9z32zvT6TAtYPY1"
]
+ },
+ {
+ "path": "soljson-v0.8.28+commit.7893614a.js",
+ "version": "0.8.28",
+ "build": "commit.7893614a",
+ "longVersion": "0.8.28+commit.7893614a",
+ "keccak256": "0x8e01bd0cafb8a8bab060453637101a88e4ab6d41c32645a26eaca541fb169c8e",
+ "sha256": "0x72ef580a6ec5943130028e5294313f24e9435520acc89f8c9dbfd0139d9ae146",
+ "urls": [
+ "dweb:/ipfs/QmVtdNYdUC4aX6Uk5LrxDT55B7NgGLnLcA2wTecF5xUbSS"
+ ]
}
],
"releases": {
+ "0.8.28": "soljson-v0.8.28+commit.7893614a.js",
"0.8.27": "soljson-v0.8.27+commit.40a35a09.js",
"0.8.26": "soljson-v0.8.26+commit.8a97fa7a.js",
"0.8.25": "soljson-v0.8.25+commit.b61c2a91.js",
@@ -1131,5 +1143,5 @@
"0.4.0": "soljson-v0.4.0+commit.acd334c9.js",
"0.3.6": "soljson-v0.3.6+commit.3fc68da5.js"
},
- "latestRelease": "0.8.27"
-}
\ No newline at end of file
+ "latestRelease": "0.8.28"
+}
diff --git a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
index 37a396d05fd..354665b6204 100644
--- a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
+++ b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
@@ -171,10 +171,10 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
}
const text_after = word_after.split('\n')[0].trim()
- // if clean contains the content of text_after, remove it
- if (clean.includes(text_after)){
- clean = clean.replace(text_after, '')
+ if (clean.toLowerCase().includes(text_after.toLowerCase())){
+ clean = clean.replace(text_after, '') // apply regex to conserve the case
}
+
return clean
}
From e33392f88738967bd4878a9c0fdd77e0286b7646 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Thu, 24 Oct 2024 11:44:05 +0200
Subject: [PATCH 25/27] improving completion process
---
.../editor/src/lib/providers/inlineCompletionProvider.ts | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
index 354665b6204..6764a73d18d 100644
--- a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
+++ b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
@@ -158,7 +158,11 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
enableForwardStability: true,
}
} catch (err) {
- return
+ const item: monacoTypes.languages.InlineCompletion = { insertText: " " }
+ return {
+ items: [item],
+ enableForwardStability: true,
+ }
}
}
From 5f0dc4d6806aaccfd236c0dd59b0b071067cb216 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Thu, 24 Oct 2024 11:52:46 +0200
Subject: [PATCH 26/27] enabled completion at string begin
---
.../editor/src/lib/providers/inlineCompletionProvider.ts | 1 +
1 file changed, 1 insertion(+)
diff --git a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
index 6764a73d18d..d1260f84b79 100644
--- a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
+++ b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
@@ -62,6 +62,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
if (!word.endsWith(' ') &&
!word.endsWith('.') &&
+ !word.endsWith('"') &&
!word.endsWith('(')) {
return;
}
From d52765b6d162985ce7034c953809ea63cb6bc659 Mon Sep 17 00:00:00 2001
From: STetsing <41009393+STetsing@users.noreply.github.com>
Date: Thu, 24 Oct 2024 14:47:51 +0200
Subject: [PATCH 27/27] Update remixAIPlugin.tsx rm setting to side panel
---
apps/remix-ide/src/app/plugins/remixAIPlugin.tsx | 10 ----------
1 file changed, 10 deletions(-)
diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
index f6005ee3b3d..d6d3c78a480 100644
--- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
+++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
@@ -57,16 +57,6 @@ export class RemixAIPlugin extends ViewPlugin {
this.useRemoteInferencer = true
this.initialize()
}
- // this.setRemixAIOnSidePannel(false)
- }
-
- setRemixAIOnSidePannel(resize:boolean=false){
- if (resize){
- this.call('sidePanel', 'pinView', profile)
-
- } else {
- this.call('sidePanel', 'pinView', profile)
- }
}
async initialize(model1?:IModel, model2?:IModel, remoteModel?:IRemoteModel, useRemote?:boolean){