Skip to content

Commit 4bcc35a

Browse files
committed
moved to vlite for filedimegpt and all other errors also fixed
1 parent 649db94 commit 4bcc35a

File tree

8 files changed

+52
-35
lines changed

8 files changed

+52
-35
lines changed

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "filedime",
3-
"version": "0.9.62",
3+
"version": "0.9.65",
44
"private": true,
55
"engines": {
66
"node": "20.x"

src-tauri/Cargo.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src-tauri/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "filedime"
3-
version = "0.9.62"
3+
version = "0.9.65"
44
description = "rust based file explorer."
55
authors = ["visnk"]
66
license = ""

src/app/filegpt/page.tsx

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
"use client"
22
import React from "react";
33
import GPTchatinterface from "../../components/gptchatinterface";
4+
import {Metadata} from 'next'
5+
46
export default function RootLayout(){
57
let url=typeof window !== 'undefined' ? window.location.hostname : '/'
68
console.log(url)
7-
return <GPTchatinterface fgptendpoint={url}/>
9+
return <GPTchatinterface fgptendpoint={url} setasollama={true}/>
810
}

src/app/layout.tsx

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,13 @@ import '../styles/globals.css'
1010
import DarkButton from "../components/but"
1111
import {Metadata} from 'next'
1212

13+
1314
export const metadata:Metadata = {
14-
title: 'Filedime',
15-
description: 'Rust logic based, NextJS frontend file explorer',
15+
title: 'FileGPT-Filedime',
16+
description: 'Query your files.',
1617
}
1718

19+
1820
export default function RootLayout({
1921
children,
2022
}: {

src/components/FIleuploadfromremote.tsx

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,9 @@ import axios from 'axios';
33
interface fuargs{
44
fge:string;
55
setcmsg:Dispatch<SetStateAction<string>>
6+
setasollama:Dispatch<SetStateAction<boolean>>
67
}
7-
const FileUploadComponent = ({fge,setcmsg}:fuargs) => {
8+
const FileUploadComponent = ({fge,setcmsg,setasollama}:fuargs) => {
89
const [files, setFiles] = useState([]);
910
const [collectionName, setCollectionName] = useState('');
1011

@@ -35,6 +36,7 @@ const FileUploadComponent = ({fge,setcmsg}:fuargs) => {
3536
'Content-Type': 'multipart/form-data',
3637
},
3738
});
39+
setasollama(false)
3840
console.log(response.data['message']);
3941
setcmsg(response.data['message'])
4042
// Handle the response as needed

src/components/gptchatinterface.tsx

Lines changed: 38 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,8 @@ import { Checkbox } from "./ui/checkbox";
1414
// import MyComponent from "./route";
1515
interface gptargs{
1616
message?:FileItem,
17-
fgptendpoint?:string
17+
fgptendpoint?:string,
18+
setasollama:boolean
1819
// localorremote:boolean
1920
}
2021
interface mitem{
@@ -24,12 +25,16 @@ interface mitem{
2425
timestamp:number
2526
}
2627
function getchattime(){
27-
return `${new Date().getHours()}:${new Date().getMinutes() < 10 ? '0' : ''}${new Date().getMinutes()}`
28+
return `${new Date().getHours()}:${new Date().getMinutes() < 10 ? '0' : ''}${new Date().getMinutes()}:${new Date().getSeconds() < 10 ? '0' : ''}${new Date().getSeconds()}`
2829
}
2930
function getchattimestamp(){
3031
return new Date().getTime()
3132
}
32-
export default function GPTchatinterface({message,fgptendpoint="localhost"}:gptargs){
33+
export default function GPTchatinterface({message,fgptendpoint="localhost",setasollama=false}:gptargs){
34+
// let sao=(value:boolean)=>{setasollama=value};
35+
const [isollama,sao]=useState(setasollama)
36+
37+
// const [useollama,seto]=useState(setasollama)
3338
console.log("endpoint-->"+fgptendpoint)
3439
// const [time, setTime] = useState(new Date());
3540
// useEffect(() => {
@@ -66,27 +71,31 @@ export default function GPTchatinterface({message,fgptendpoint="localhost"}:gpta
6671
// const [querystring, setqs] = useState([message.path]);
6772

6873
const embed = async () => {
69-
// if(localorremote){
70-
try {
71-
const response = await axios.post(`${filegptendpoint}/embed`, { files: filePaths });
72-
setchathistory((old)=>[...old,{
73-
from:"bot",
74-
message:`${message?message.path:"The file(s)"} is ready for your questions`,
75-
time:getchattime(),
76-
timestamp:getchattimestamp()
77-
}])
78-
setcbs(false)
79-
console.log(response.data);
80-
} catch (error) {
81-
setchathistory((old)=>[...old,{
82-
from:"bot",
83-
message:`Issue finding Filegpt endpoint, maybe its not be running.`,
84-
time:getchattime(),
85-
timestamp:getchattimestamp()
86-
}])
87-
console.error('Error:', error);
88-
}
89-
// }
74+
if(message.path){
75+
console.log("embed")
76+
// if(localorremote){
77+
try {
78+
const response = await axios.post(`${filegptendpoint}/embed`, { files: filePaths });
79+
sao(false)
80+
setchathistory((old)=>[...old,{
81+
from:"bot",
82+
message:`${message?message.path:"The file(s)"} is ready for your questions`,
83+
time:getchattime(),
84+
timestamp:getchattimestamp()
85+
}])
86+
setcbs(false)
87+
console.log(response.data);
88+
} catch (error) {
89+
setchathistory((old)=>[...old,{
90+
from:"bot",
91+
message:`Issue finding Filegpt endpoint, maybe its not be running.`,
92+
time:getchattime(),
93+
timestamp:getchattimestamp()
94+
}])
95+
console.error('Error:', error);
96+
}
97+
// }
98+
}
9099
};
91100
//scroll to bottom in chatview
92101
useEffect(()=> {
@@ -158,7 +167,7 @@ else{
158167
method: "POST",
159168
body: JSON.stringify({
160169
query:question,
161-
where:question.toLocaleLowerCase().startsWith("generally")?"ollama":""
170+
where:question.toLocaleLowerCase().startsWith("generally")||isollama?"ollama":""
162171
}),
163172
headers: { 'Content-Type': 'application/json', Accept: "text/event-stream" },
164173
onopen: async (res)=> {
@@ -321,6 +330,9 @@ else{
321330
}
322331
},[cmsg])
323332
const [autoscroll,setas]=useState(false)
333+
useEffect(()=>{
334+
console.log(setasollama)
335+
},[setasollama])
324336
return (<>
325337
{/* <MyComponent/> */}
326338
{/* {time.toLocaleString()} */}
@@ -329,7 +341,7 @@ else{
329341
<div className="flex flex-row p-2 border-2 place-items-center">{filedimegptisrunning?<CheckIcon className="w-4 h-4"/>:<XIcon className="w-4 h-4"/>} FiledimeGPT</div>
330342
</div>
331343
{localorremote?(<h1 className="flex flex-row gap-2"><BotIcon className="h-4 w-4"/>FileGPT : {message?message.path:null}</h1>):(<>
332-
<FileUploadComponent fge={filegptendpoint} setcmsg={setcmsg}/>
344+
<FileUploadComponent fge={filegptendpoint} setcmsg={setcmsg} setasollama={sao}/>
333345
</>)}
334346

335347
<div className="overflow-auto grid gap-4 p-4 h-[70%] mb-5" >

src/components/greet.tsx

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,6 @@ export let supportedfiles = [
9191
"xlsx",
9292
"xls",
9393
"odt",
94-
"py",
9594
"doc",
9695
"docx",
9796
"enex",
@@ -831,7 +830,7 @@ export default function Greet() {
831830
<ResizablePanel className={"bg-white dark:bg-gray-800"}> */}
832831

833832

834-
<GPTchatinterface message={row.original}/>
833+
<GPTchatinterface message={row.original} setasollama={true}/>
835834
{/* </ResizablePanel>
836835
</ResizablePanelGroup> */}
837836

0 commit comments

Comments
 (0)