diff --git a/tools/cortensor-openai-provider/.gitignore b/tools/cortensor-openai-provider/.gitignore new file mode 100644 index 0000000..2e83896 --- /dev/null +++ b/tools/cortensor-openai-provider/.gitignore @@ -0,0 +1,113 @@ +# Dependencies +node_modules/ +*.pnp +.pnp.js + +# Build outputs +dist/ +build/ +*.tsbuildinfo + +# Environment variables +.env +.env.local +.env.development.local +.env.test.local +.env.production.local + +# Logs +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Coverage directory used by tools like istanbul +coverage/ +*.lcov + +# nyc test coverage +.nyc_output + +# Dependency directories +jspm_packages/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional stylelint cache +.stylelintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# parcel-bundler cache (https://parceljs.org/) +.cache +.parcel-cache + +# Next.js build output +.next +out + +# Nuxt.js build / generate output +.nuxt + +# Gatsby files +.cache/ +public + +# Storybook build outputs +.out +.storybook-out + +# Temporary folders +tmp/ +temp/ + +# Editor directories and files +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# OS generated files +.DS_Store +.DS_Store? +._* +.Spotlight-V100 +.Trashes +ehthumbs.db +Thumbs.db + +sync-to-monorepo.ps1 +TODO.MD +log1.log +*.log + + +test-bitcoin-search.js \ No newline at end of file diff --git a/tools/cortensor-openai-provider/.sync-metadata.json b/tools/cortensor-openai-provider/.sync-metadata.json new file mode 100644 index 0000000..240ec99 --- /dev/null +++ b/tools/cortensor-openai-provider/.sync-metadata.json @@ -0,0 +1,11 @@ +{ + "sourceRepository": "https://github.com/Ezejaemmanuel/cortensor-openai-provider.git", + "lastSync": "2025-08-30T22:16:34.946Z", + "syncedBy": "HP", + "projectName": "cortensor-openai-provider", + "targetFolder": "tools", + "version": "1.0.0", + "syncTool": "Node.js", + "nodeVersion": "v22.14.0", + "communityProjectsPath": "C:\\Users\\HP\\development\\web-development\\javascript-node\\hackathon-dev\\cortensor-hackathon\\cortensor-community-sync\\community-projects" +} diff --git a/tools/cortensor-openai-provider/README.md b/tools/cortensor-openai-provider/README.md new file mode 100644 index 0000000..0a2ce3b --- /dev/null +++ b/tools/cortensor-openai-provider/README.md @@ -0,0 +1,530 @@ +# Cortensor OpenAI Provider + + 🚧 **EXPERIMENTAL - ACTIVELY IN DEVELOPMENT** 🚧 + +OpenAI-compatible provider for Cortensor AI models, designed to work seamlessly with Vercel AI SDK and popular agent frameworks. + +## Features + +- 🔄 **OpenAI Compatibility**: Drop-in replacement for OpenAI provider +- 🎯 **Session Management**: Built-in session handling for conversation continuity +- 🔀 **Request/Response Transformation**: Seamless format conversion between OpenAI and Cortensor APIs +- 🔍 **Web Search Integration**: Built-in web search capabilities with Tavily provider support +- 🔧 **Custom Search Providers**: Flexible web search provider interface for custom implementations +- 📘 **TypeScript Support**: Full type safety with comprehensive TypeScript definitions +- 🤖 **Agent Framework Ready**: Compatible with Mastra, Convex, and other AI agent frameworks +- ⚡ **Lightweight**: Minimal dependencies for optimal performance + +> **Note**: Streaming responses are currently disabled and will be available in future releases. + +## Installation + +```bash +pnpm add cortensor-openai-provider +# or +npm install cortensor-openai-provider +# or +yarn add cortensor-openai-provider +``` + +### Dependencies + +The package includes the following key dependencies: +- `@ai-sdk/openai-compatible`: OpenAI compatibility layer +- `@tavily/core`: Built-in web search provider (Tavily integration) +- `ai`: Peer dependency for Vercel AI SDK integration + +> **Note**: The `@tavily/core` dependency is included for the built-in web search functionality, but you can use custom search providers without requiring a Tavily API key. + +## Environment Setup + +```bash +# .env.local or .env +CORTENSOR_API_KEY=your_cortensor_api_key_here +CORTENSOR_BASE_URL=https://your-cortensor-api-url.com + +# Optional: For web search functionality +TAVILY_API_KEY=your_tavily_api_key_here +``` + +> **Important**: Both `CORTENSOR_API_KEY` and `CORTENSOR_BASE_URL` are required environment variables. `TAVILY_API_KEY` is optional and only needed if you want to use the built-in Tavily web search provider. + +## 🔍 Web Search Integration + +The Cortensor OpenAI Provider includes powerful web search capabilities that allow your AI models to access real-time information from the internet. This feature supports multiple search providers and flexible configuration options. + +### Search Modes + +The web search functionality supports three different modes: + +- **`prompt`** (default): Search is triggered by `[search]` markers in user messages +- **`force`**: Always perform web search for every request +- **`disable`**: Completely disable web search functionality + +### Search Directives + +You can control search behavior using special markers in your messages: + +- **`[**search**]`**: Forces a web search for this message (removed from final prompt) +- **`[**no-search**]`**: Prevents web search for this message (removed from final prompt) + +### Built-in Tavily Provider + +```typescript +import { cortensorModel, createTavilySearch } from 'cortensor-openai-provider'; +import { generateText } from 'ai'; + +// Create Tavily search provider +const tavilySearch = createTavilySearch({ + apiKey: process.env.TAVILY_API_KEY, // Optional if set in environment + maxResults: 1, + searchDepth: 'advanced' +}); + +// Use with cortensorModel +const result = await generateText({ + model: cortensorModel({ + sessionId: 12345, + webSearch: { + mode: 'prompt', // 'prompt' | 'force' | 'disable' + provider: tavilySearch, + maxResults: 1 + } + }), + messages: [{ + role: 'user', + content: '[**search**] What are the latest developments in AI?' + }], +}); +``` + +### Custom Search Providers + +You can implement your own search provider by following the `WebSearchProvider` interface: + +```typescript +import type { WebSearchProvider, WebSearchResult } from 'cortensor-openai-provider'; + +// Option 1: Implement WebSearchProvider interface +class CustomSearchProvider implements WebSearchProvider { + async search(query: string, maxResults?: number): Promise { + // Your custom search implementation + const results = await yourSearchAPI(query, maxResults); + + return results.map(result => ({ + title: result.title, + url: result.url, + snippet: result.description, + publishedDate: result.date // optional + })); + } +} + +// Option 2: Use a simple function +const customSearchFunction = async (query: string, maxResults?: number) => { + // Your search logic here + return [ + { + title: "Example Result", + url: "https://example.com", + snippet: "This is an example search result" + } + ]; +}; + +// Use either approach +const model = cortensorModel({ + sessionId: 12345, + webSearch: { + mode: 'prompt', + provider: new CustomSearchProvider(), // or customSearchFunction + maxResults: 3 + } +}); +``` + +### Web Search Configuration + +```typescript +interface WebSearchConfig { + mode: 'prompt' | 'force' | 'disable'; + provider?: WebSearchProvider | ((query: string, maxResults?: number) => Promise); + maxResults?: number; // Default: 5 +} +``` + +### Search Result Format + +Search results are automatically formatted with numbered citations and included in the model's response: + +```markdown +**Sources:** +[1] [First Result Title](https://example1.com) +[2] [Second Result Title](https://example2.com) +[3] [Third Result Title](https://example3.com) +``` + +This format follows modern AI chatbot best practices, similar to platforms like Perplexity AI, providing clean numbered citations that make it easy to reference specific sources. + +## Quick Start + +### Basic Usage with Vercel AI SDK + +```typescript +import { cortensorModel } from 'cortensor-openai-provider'; +import { generateText } from 'ai'; + +const result = await generateText({ + model: cortensorModel({ + sessionId: 12345, + modelName: 'cortensor-chat', + temperature: 0.7, + maxTokens: 3000, + }), + messages: [{ role: 'user', content: 'Hello!' }], +}); + +console.log(result.text); +``` + +### Environment Variables Required + +```bash +# .env.local or .env +CORTENSOR_API_KEY=your_cortensor_api_key_here +CORTENSOR_BASE_URL=https://your-cortensor-api-url.com +``` + +## Agent Framework Integration + +### 🤖 Mastra Agents + +```typescript +import { cortensorModel } from 'cortensor-openai-provider'; +import { Agent, createMastra } from '@mastra/core'; + +const mastra = createMastra({ + agents: { + cortensorAgent: new Agent({ + name: 'cortensor-agent', + instructions: 'You are a helpful AI assistant.', + model: cortensorModel({ + sessionId: 11111, + modelName: 'cortensor-chat', + temperature: 0.7, + maxTokens: 256, + }), + }), + }, +}); + +// Use the agent +const response = await mastra.agents.cortensorAgent.generate({ + messages: [{ role: 'user', content: 'Hello!' }], +}); +``` + +### 🔄 Convex Agents + +```typescript +// convex/agents.ts +import { cortensorModel } from 'cortensor-openai-provider'; +import { generateText } from 'ai'; +import { mutation } from './_generated/server'; +import { v } from 'convex/values'; + +export const sendMessage = mutation({ + args: { + conversationId: v.id('conversations'), + message: v.string(), + sessionId: v.number(), + }, + handler: async (ctx, { conversationId, message, sessionId }) => { + const conversation = await ctx.db.get(conversationId); + if (!conversation) throw new Error('Conversation not found'); + + const messages = [...conversation.messages, { role: 'user' as const, content: message }]; + + const result = await generateText({ + model: cortensorModel({ + sessionId, + modelName: 'cortensor-chat', + temperature: 0.7, + maxTokens: 512, + }), + messages, + }); + + const aiMessage = { role: 'assistant' as const, content: result.text }; + const updatedMessages = [...messages, aiMessage]; + + await ctx.db.patch(conversationId, { + messages: updatedMessages, + updatedAt: Date.now(), + }); + + return { message: result.text }; + }, +}); +``` + +## Framework Examples + +### Next.js API Route + +```typescript +// app/api/chat/route.ts +import { cortensorModel, createTavilySearch } from 'cortensor-openai-provider'; +import { generateText } from 'ai'; +import { NextRequest } from 'next/server'; + +// Create search provider (can be reused across requests) +const searchProvider = createTavilySearch({ + maxResults: 3, + searchDepth: 'basic' +}); + +export async function POST(req: NextRequest) { + const { messages, sessionId, enableSearch = false } = await req.json(); + + const result = await generateText({ + model: cortensorModel({ + sessionId, + modelName: 'cortensor-chat', + temperature: 0.7, + maxTokens: 256, + // Enable web search if requested + ...(enableSearch && { + webSearch: { + mode: 'prompt', + provider: searchProvider, + maxResults: 3 + } + }) + }), + messages, + }); + + return Response.json({ response: result.text }); +} +``` + +#### Usage with Web Search + +```typescript +// Client-side usage +const response = await fetch('/api/chat', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + sessionId: 12345, + enableSearch: true, + messages: [{ + role: 'user', + content: '[**search**] What are the latest AI developments in 2024?' + }] + }) +}); +``` + +### Express.js Server + +```typescript +import express from 'express'; +import { cortensorModel } from 'cortensor-openai-provider'; +import { generateText } from 'ai'; + +const app = express(); +app.use(express.json()); + +app.post('/api/chat', async (req, res) => { + const { messages, sessionId } = req.body; + + const result = await generateText({ + model: cortensorModel({ + sessionId, + modelName: 'cortensor-chat', + temperature: 0.7, + maxTokens: 256, + }), + messages, + }); + + res.json({ response: result.text }); +}); +``` + +## API Reference + +### `cortensorModel(config)` + +Creates a Cortensor model instance with session management. + +**Parameters:** +- `config` (object, required): + - `sessionId` (number, required): Session ID for conversation continuity + - `modelName` (string, optional): Model name (default: 'cortensor-chat') + - `temperature` (number, optional): Sampling temperature 0.0-2.0 (default: 0.7) + - `maxTokens` (number, optional): Maximum tokens to generate (default: 3000) + - `topP` (number, optional): Top-p sampling parameter (default: 0.95) + - `topK` (number, optional): Top-k sampling parameter (default: 40) + - `presencePenalty` (number, optional): Presence penalty -2.0 to 2.0 (default: 0) + - `frequencyPenalty` (number, optional): Frequency penalty -2.0 to 2.0 (default: 0) + - `stream` (boolean, optional): Enable streaming (default: false, currently disabled) + - `timeout` (number, optional): Request timeout in seconds (default: 60) + - `promptType` (number, optional): Prompt type identifier (default: 1) + - `promptTemplate` (string, optional): Custom prompt template (default: '') + - `webSearch` (object, optional): Web search configuration + - `mode` ('prompt' | 'force' | 'disable'): Search mode (default: 'prompt') + - `provider` (WebSearchProvider | function): Search provider instance or function + - `maxResults` (number, optional): Maximum search results (default: 3) + +### `createCortensorProvider(config?)` + +Creates a custom Cortensor provider with specific configuration. + +**Parameters:** +- `config` (object, optional): + - `apiKey` (string, optional): Override API key + - `baseURL` (string, optional): Override base URL + - `timeout` (number, optional): Request timeout + - `sessionTimeout` (number, optional): Session timeout + +### `clearModelConfigurations(sessionId?)` + +Clears stored model configurations. + +**Parameters:** +- `sessionId` (number, optional): Clear configs for specific session, or all if omitted + +## Session Management + +Sessions maintain conversation context across multiple requests: + +```typescript +// Use consistent sessionId for conversation continuity +const sessionId = 98765; + +const model = cortensorModel({ + sessionId, + modelName: 'cortensor-chat', + temperature: 0.7, + maxTokens: 256, +}); + +// All requests with this model will share the same session +const response1 = await generateText({ model, messages: [...] }); +const response2 = await generateText({ model, messages: [...] }); + +// Clear session when done +import { clearModelConfigurations } from 'cortensor-openai-provider'; +clearModelConfigurations(sessionId); +``` + +## Error Handling + +```typescript +try { + const result = await generateText({ + model: cortensorModel({ + sessionId: 12345, + modelName: 'cortensor-chat', + temperature: 0.7, + maxTokens: 3000, + }), + messages, + }); +} catch (error) { + console.error('Cortensor API error:', error); + // Handle error appropriately +} +``` + +## Development Status + +### Current Status +- ✅ Basic OpenAI compatibility +- ✅ Session management with automatic cleanup +- ✅ **Web search integration with Tavily provider** +- ✅ **Custom web search provider support** +- ✅ **Search directives and flexible search modes** +- ✅ Full TypeScript support with comprehensive types +- ✅ Agent framework integration (Mastra, Convex) +- ✅ Request/response transformation +- ✅ Error handling and validation +- ❌ Streaming responses (coming in future releases) +- ❌ Image handling (planned) +- ❌ Advanced prompt template handling (experimental) + +### Known Limitations +- Streaming is currently disabled +- Image processing not yet supported +- Prompt template functionality may not work reliably +- Web search requires external API keys (Tavily or custom provider) + +## Roadmap + +### 🚀 Upcoming Features + +#### Support for LanguageModelV2 +- **Enhanced model capabilities**: Leverage Cortensor's advanced language models with LanguageModelV2 interface +- **Batch processing**: Support for processing multiple requests in parallel +- **Advanced model features**: Full compatibility with AI SDK's LanguageModelV2 specification +- **Improved type safety**: Enhanced TypeScript support for LanguageModelV2 methods +- **Better error handling**: Comprehensive error management for LanguageModelV2 operations + +### Streaming Support +- **Real-time streaming responses**: Enable streaming for real-time AI responses +- **Stream cancellation**: Support for cancelling ongoing streams +- **Backpressure handling**: Proper stream flow control +- **Error recovery**: Graceful handling of stream interruptions + +#### Multimodal Support +- **Image input handling**: Support for image uploads and processing +- **Vision model integration**: Connect with Cortensor's vision capabilities +- **File attachment support**: Handle various file formats +- **Base64 image encoding**: Automatic image format conversion + +#### Advanced Prompt Engineering +- **Custom prompt templates**: Robust template system with variable substitution +- **Template validation**: Ensure prompt templates are properly formatted +- **Template library**: Pre-built templates for common use cases +- **Dynamic prompt generation**: Context-aware prompt modification + +#### Tool Calling & Enhanced Features +- **Tool calling**: Proper tool/function calling capabilities for agent interactions +- **Function calling**: Support for external function execution +- **Persistent sessions**: Database-backed session storage +- **Rate limiting**: Built-in request throttling +- **Caching layer**: Response caching for improved performance +- **Metrics and monitoring**: Usage analytics and performance tracking + +> **Note**: Some features depend on capabilities that are not yet available in the Cortensor network infrastructure. This provider is designed to work seamlessly with the Cortensor network as new features become available. + +### 🔬 Experimental Features + +> **Note**: These features are experimental and may not work reliably in the current version. + +- **Prompt Templates**: Basic template support is available but may have limitations +- **Custom Model Parameters**: Advanced model configuration options +- **Session Persistence**: Experimental session storage mechanisms + +### 🌐 Cortensor Network Integration + +This provider is specifically built to work with the Cortensor network infrastructure. For comprehensive documentation on building with Cortensor network, including API reference and integration guides, visit: + +**📚 [Cortensor Web2 API Reference](https://docs.cortensor.network/getting-started/web2-api-reference)** + +The provider abstracts the complexity of direct API calls while maintaining full compatibility with Cortensor's RESTful endpoints for sessions, tasks, miners, and completions. + +## Contributing + +This is an experimental package. Contributions, feedback, and bug reports are welcome! + +## License + +MIT License + +## Support + +For issues and questions, please open an issue on the repository. \ No newline at end of file diff --git a/tools/cortensor-openai-provider/package.json b/tools/cortensor-openai-provider/package.json new file mode 100644 index 0000000..1a13d67 --- /dev/null +++ b/tools/cortensor-openai-provider/package.json @@ -0,0 +1,61 @@ +{ + "name": "cortensor-openai-provider", + "version": "0.4.0", + "description": "🚧 EXPERIMENTAL: OpenAI-compatible Cortensor provider for Vercel AI SDK - Actively in development", + "main": "./dist/index.js", + "module": "./dist/index.mjs", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.mjs", + "require": "./dist/index.js" + } + }, + "files": [ + "dist", + "README.md" + ], + "scripts": { + "build": "tsup", + "dev": "tsup --watch", + "type-check": "tsc --noEmit", + "clean": "rimraf dist" + }, + "keywords": [ + "ai", + "cortensor", + "openai", + "vercel-ai", + "llm", + "provider", + "experimental" + ], + "author": "Jatique", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/Ezejaemmanuel/cortensor-openai-provider.git" + }, + "bugs": { + "url": "https://github.com/Ezejaemmanuel/cortensor-openai-provider/issues" + }, + "homepage": "https://github.com/Ezejaemmanuel/cortensor-openai-provider#readme", + "dependencies": { + "@ai-sdk/openai-compatible": "0.2.16", + "@tavily/core": "0.5.11", + "ai": "4.3.19" + }, + "devDependencies": { + "@types/node": "^22.10.5", + "rimraf": "^6.0.1", + "tsup": "^8.0.0", + "typescript": "^5.7.3" + }, + "engines": { + "node": ">=18.0.0" + }, + "publishConfig": { + "access": "public" + } +} \ No newline at end of file diff --git a/tools/cortensor-openai-provider/pnpm-lock.yaml b/tools/cortensor-openai-provider/pnpm-lock.yaml new file mode 100644 index 0000000..8618d73 --- /dev/null +++ b/tools/cortensor-openai-provider/pnpm-lock.yaml @@ -0,0 +1,1664 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + dependencies: + '@ai-sdk/openai-compatible': + specifier: 0.2.16 + version: 0.2.16(zod@4.1.1) + '@tavily/core': + specifier: 0.5.11 + version: 0.5.11 + ai: + specifier: 4.3.19 + version: 4.3.19(react@19.1.1)(zod@4.1.1) + devDependencies: + '@types/node': + specifier: ^22.10.5 + version: 22.17.2 + rimraf: + specifier: ^6.0.1 + version: 6.0.1 + tsup: + specifier: ^8.0.0 + version: 8.5.0(typescript@5.9.2) + typescript: + specifier: ^5.7.3 + version: 5.9.2 + +packages: + + '@ai-sdk/openai-compatible@0.2.16': + resolution: {integrity: sha512-LkvfcM8slJedRyJa/MiMiaOzcMjV1zNDwzTHEGz7aAsgsQV0maLfmJRi/nuSwf5jmp0EouC+JXXDUj2l94HgQw==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + + '@ai-sdk/provider-utils@2.2.8': + resolution: {integrity: sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.23.8 + + '@ai-sdk/provider@1.1.3': + resolution: {integrity: sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg==} + engines: {node: '>=18'} + + '@ai-sdk/react@1.2.12': + resolution: {integrity: sha512-jK1IZZ22evPZoQW3vlkZ7wvjYGYF+tRBKXtrcolduIkQ/m/sOAVcVeVDUDvh1T91xCnWCdUGCPZg2avZ90mv3g==} + engines: {node: '>=18'} + peerDependencies: + react: ^18 || ^19 || ^19.0.0-rc + zod: ^3.23.8 + peerDependenciesMeta: + zod: + optional: true + + '@ai-sdk/ui-utils@1.2.11': + resolution: {integrity: sha512-3zcwCc8ezzFlwp3ZD15wAPjf2Au4s3vAbKsXQVyhxODHcmu0iyPO2Eua6D/vicq/AUm/BAo60r97O6HU+EI0+w==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.23.8 + + '@esbuild/aix-ppc64@0.25.9': + resolution: {integrity: sha512-OaGtL73Jck6pBKjNIe24BnFE6agGl+6KxDtTfHhy1HmhthfKouEcOhqpSL64K4/0WCtbKFLOdzD/44cJ4k9opA==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + + '@esbuild/android-arm64@0.25.9': + resolution: {integrity: sha512-IDrddSmpSv51ftWslJMvl3Q2ZT98fUSL2/rlUXuVqRXHCs5EUF1/f+jbjF5+NG9UffUDMCiTyh8iec7u8RlTLg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm@0.25.9': + resolution: {integrity: sha512-5WNI1DaMtxQ7t7B6xa572XMXpHAaI/9Hnhk8lcxF4zVN4xstUgTlvuGDorBguKEnZO70qwEcLpfifMLoxiPqHQ==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + + '@esbuild/android-x64@0.25.9': + resolution: {integrity: sha512-I853iMZ1hWZdNllhVZKm34f4wErd4lMyeV7BLzEExGEIZYsOzqDWDf+y082izYUE8gtJnYHdeDpN/6tUdwvfiw==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + + '@esbuild/darwin-arm64@0.25.9': + resolution: {integrity: sha512-XIpIDMAjOELi/9PB30vEbVMs3GV1v2zkkPnuyRRURbhqjyzIINwj+nbQATh4H9GxUgH1kFsEyQMxwiLFKUS6Rg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-x64@0.25.9': + resolution: {integrity: sha512-jhHfBzjYTA1IQu8VyrjCX4ApJDnH+ez+IYVEoJHeqJm9VhG9Dh2BYaJritkYK3vMaXrf7Ogr/0MQ8/MeIefsPQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + + '@esbuild/freebsd-arm64@0.25.9': + resolution: {integrity: sha512-z93DmbnY6fX9+KdD4Ue/H6sYs+bhFQJNCPZsi4XWJoYblUqT06MQUdBCpcSfuiN72AbqeBFu5LVQTjfXDE2A6Q==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.25.9': + resolution: {integrity: sha512-mrKX6H/vOyo5v71YfXWJxLVxgy1kyt1MQaD8wZJgJfG4gq4DpQGpgTB74e5yBeQdyMTbgxp0YtNj7NuHN0PoZg==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + + '@esbuild/linux-arm64@0.25.9': + resolution: {integrity: sha512-BlB7bIcLT3G26urh5Dmse7fiLmLXnRlopw4s8DalgZ8ef79Jj4aUcYbk90g8iCa2467HX8SAIidbL7gsqXHdRw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm@0.25.9': + resolution: {integrity: sha512-HBU2Xv78SMgaydBmdor38lg8YDnFKSARg1Q6AT0/y2ezUAKiZvc211RDFHlEZRFNRVhcMamiToo7bDx3VEOYQw==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-ia32@0.25.9': + resolution: {integrity: sha512-e7S3MOJPZGp2QW6AK6+Ly81rC7oOSerQ+P8L0ta4FhVi+/j/v2yZzx5CqqDaWjtPFfYz21Vi1S0auHrap3Ma3A==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-loong64@0.25.9': + resolution: {integrity: sha512-Sbe10Bnn0oUAB2AalYztvGcK+o6YFFA/9829PhOCUS9vkJElXGdphz0A3DbMdP8gmKkqPmPcMJmJOrI3VYB1JQ==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-mips64el@0.25.9': + resolution: {integrity: sha512-YcM5br0mVyZw2jcQeLIkhWtKPeVfAerES5PvOzaDxVtIyZ2NUBZKNLjC5z3/fUlDgT6w89VsxP2qzNipOaaDyA==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-ppc64@0.25.9': + resolution: {integrity: sha512-++0HQvasdo20JytyDpFvQtNrEsAgNG2CY1CLMwGXfFTKGBGQT3bOeLSYE2l1fYdvML5KUuwn9Z8L1EWe2tzs1w==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-riscv64@0.25.9': + resolution: {integrity: sha512-uNIBa279Y3fkjV+2cUjx36xkx7eSjb8IvnL01eXUKXez/CBHNRw5ekCGMPM0BcmqBxBcdgUWuUXmVWwm4CH9kg==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-s390x@0.25.9': + resolution: {integrity: sha512-Mfiphvp3MjC/lctb+7D287Xw1DGzqJPb/J2aHHcHxflUo+8tmN/6d4k6I2yFR7BVo5/g7x2Monq4+Yew0EHRIA==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-x64@0.25.9': + resolution: {integrity: sha512-iSwByxzRe48YVkmpbgoxVzn76BXjlYFXC7NvLYq+b+kDjyyk30J0JY47DIn8z1MO3K0oSl9fZoRmZPQI4Hklzg==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-arm64@0.25.9': + resolution: {integrity: sha512-9jNJl6FqaUG+COdQMjSCGW4QiMHH88xWbvZ+kRVblZsWrkXlABuGdFJ1E9L7HK+T0Yqd4akKNa/lO0+jDxQD4Q==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.25.9': + resolution: {integrity: sha512-RLLdkflmqRG8KanPGOU7Rpg829ZHu8nFy5Pqdi9U01VYtG9Y0zOG6Vr2z4/S+/3zIyOxiK6cCeYNWOFR9QP87g==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-arm64@0.25.9': + resolution: {integrity: sha512-YaFBlPGeDasft5IIM+CQAhJAqS3St3nJzDEgsgFixcfZeyGPCd6eJBWzke5piZuZ7CtL656eOSYKk4Ls2C0FRQ==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.25.9': + resolution: {integrity: sha512-1MkgTCuvMGWuqVtAvkpkXFmtL8XhWy+j4jaSO2wxfJtilVCi0ZE37b8uOdMItIHz4I6z1bWWtEX4CJwcKYLcuA==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openharmony-arm64@0.25.9': + resolution: {integrity: sha512-4Xd0xNiMVXKh6Fa7HEJQbrpP3m3DDn43jKxMjxLLRjWnRsfxjORYJlXPO4JNcXtOyfajXorRKY9NkOpTHptErg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openharmony] + + '@esbuild/sunos-x64@0.25.9': + resolution: {integrity: sha512-WjH4s6hzo00nNezhp3wFIAfmGZ8U7KtrJNlFMRKxiI9mxEK1scOMAaa9i4crUtu+tBr+0IN6JCuAcSBJZfnphw==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + + '@esbuild/win32-arm64@0.25.9': + resolution: {integrity: sha512-mGFrVJHmZiRqmP8xFOc6b84/7xa5y5YvR1x8djzXpJBSv/UsNK6aqec+6JDjConTgvvQefdGhFDAs2DLAds6gQ==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-ia32@0.25.9': + resolution: {integrity: sha512-b33gLVU2k11nVx1OhX3C8QQP6UHQK4ZtN56oFWvVXvz2VkDoe6fbG8TOgHFxEvqeqohmRnIHe5A1+HADk4OQww==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-x64@0.25.9': + resolution: {integrity: sha512-PPOl1mi6lpLNQxnGoyAfschAodRFYXJ+9fs6WHXz7CSWKbOqiMZsubC+BQsVKuul+3vKLuwTHsS2c2y9EoKwxQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + + '@isaacs/balanced-match@4.0.1': + resolution: {integrity: sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==} + engines: {node: 20 || >=22} + + '@isaacs/brace-expansion@5.0.0': + resolution: {integrity: sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==} + engines: {node: 20 || >=22} + + '@isaacs/cliui@8.0.2': + resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} + engines: {node: '>=12'} + + '@jridgewell/gen-mapping@0.3.13': + resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} + + '@jridgewell/resolve-uri@3.1.2': + resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} + engines: {node: '>=6.0.0'} + + '@jridgewell/sourcemap-codec@1.5.5': + resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==} + + '@jridgewell/trace-mapping@0.3.30': + resolution: {integrity: sha512-GQ7Nw5G2lTu/BtHTKfXhKHok2WGetd4XYcVKGx00SjAk8GMwgJM3zr6zORiPGuOE+/vkc90KtTosSSvaCjKb2Q==} + + '@opentelemetry/api@1.9.0': + resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==} + engines: {node: '>=8.0.0'} + + '@pkgjs/parseargs@0.11.0': + resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} + engines: {node: '>=14'} + + '@rollup/rollup-android-arm-eabi@4.46.4': + resolution: {integrity: sha512-B2wfzCJ+ps/OBzRjeds7DlJumCU3rXMxJJS1vzURyj7+KBHGONm7c9q1TfdBl4vCuNMkDvARn3PBl2wZzuR5mw==} + cpu: [arm] + os: [android] + + '@rollup/rollup-android-arm64@4.46.4': + resolution: {integrity: sha512-FGJYXvYdn8Bs6lAlBZYT5n+4x0ciEp4cmttsvKAZc/c8/JiPaQK8u0c/86vKX8lA7OY/+37lIQSe0YoAImvBAA==} + cpu: [arm64] + os: [android] + + '@rollup/rollup-darwin-arm64@4.46.4': + resolution: {integrity: sha512-/9qwE/BM7ATw/W/OFEMTm3dmywbJyLQb4f4v5nmOjgYxPIGpw7HaxRi6LnD4Pjn/q7k55FGeHe1/OD02w63apA==} + cpu: [arm64] + os: [darwin] + + '@rollup/rollup-darwin-x64@4.46.4': + resolution: {integrity: sha512-QkWfNbeRuzFnv2d0aPlrzcA3Ebq2mE8kX/5Pl7VdRShbPBjSnom7dbT8E3Jmhxo2RL784hyqGvR5KHavCJQciw==} + cpu: [x64] + os: [darwin] + + '@rollup/rollup-freebsd-arm64@4.46.4': + resolution: {integrity: sha512-+ToyOMYnSfV8D+ckxO6NthPln/PDNp1P6INcNypfZ7muLmEvPKXqduUiD8DlJpMMT8LxHcE5W0dK9kXfJke9Zw==} + cpu: [arm64] + os: [freebsd] + + '@rollup/rollup-freebsd-x64@4.46.4': + resolution: {integrity: sha512-cGT6ey/W+sje6zywbLiqmkfkO210FgRz7tepWAzzEVgQU8Hn91JJmQWNqs55IuglG8sJdzk7XfNgmGRtcYlo1w==} + cpu: [x64] + os: [freebsd] + + '@rollup/rollup-linux-arm-gnueabihf@4.46.4': + resolution: {integrity: sha512-9fhTJyOb275w5RofPSl8lpr4jFowd+H4oQKJ9XTYzD1JWgxdZKE8bA6d4npuiMemkecQOcigX01FNZNCYnQBdA==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm-musleabihf@4.46.4': + resolution: {integrity: sha512-+6kCIM5Zjvz2HwPl/udgVs07tPMIp1VU2Y0c72ezjOvSvEfAIWsUgpcSDvnC7g9NrjYR6X9bZT92mZZ90TfvXw==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm64-gnu@4.46.4': + resolution: {integrity: sha512-SWuXdnsayCZL4lXoo6jn0yyAj7TTjWE4NwDVt9s7cmu6poMhtiras5c8h6Ih6Y0Zk6Z+8t/mLumvpdSPTWub2Q==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-arm64-musl@4.46.4': + resolution: {integrity: sha512-vDknMDqtMhrrroa5kyX6tuC0aRZZlQ+ipDfbXd2YGz5HeV2t8HOl/FDAd2ynhs7Ki5VooWiiZcCtxiZ4IjqZwQ==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-loongarch64-gnu@4.46.4': + resolution: {integrity: sha512-mCBkjRZWhvjtl/x+Bd4fQkWZT8canStKDxGrHlBiTnZmJnWygGcvBylzLVCZXka4dco5ymkWhZlLwKCGFF4ivw==} + cpu: [loong64] + os: [linux] + + '@rollup/rollup-linux-ppc64-gnu@4.46.4': + resolution: {integrity: sha512-YMdz2phOTFF+Z66dQfGf0gmeDSi5DJzY5bpZyeg9CPBkV9QDzJ1yFRlmi/j7WWRf3hYIWrOaJj5jsfwgc8GTHQ==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-riscv64-gnu@4.46.4': + resolution: {integrity: sha512-r0WKLSfFAK8ucG024v2yiLSJMedoWvk8yWqfNICX28NHDGeu3F/wBf8KG6mclghx4FsLePxJr/9N8rIj1PtCnw==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-riscv64-musl@4.46.4': + resolution: {integrity: sha512-IaizpPP2UQU3MNyPH1u0Xxbm73D+4OupL0bjo4Hm0496e2wg3zuvoAIhubkD1NGy9fXILEExPQy87mweujEatA==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-s390x-gnu@4.46.4': + resolution: {integrity: sha512-aCM29orANR0a8wk896p6UEgIfupReupnmISz6SUwMIwTGaTI8MuKdE0OD2LvEg8ondDyZdMvnaN3bW4nFbATPA==} + cpu: [s390x] + os: [linux] + + '@rollup/rollup-linux-x64-gnu@4.46.4': + resolution: {integrity: sha512-0Xj1vZE3cbr/wda8d/m+UeuSL+TDpuozzdD4QaSzu/xSOMK0Su5RhIkF7KVHFQsobemUNHPLEcYllL7ZTCP/Cg==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-linux-x64-musl@4.46.4': + resolution: {integrity: sha512-kM/orjpolfA5yxsx84kI6bnK47AAZuWxglGKcNmokw2yy9i5eHY5UAjcX45jemTJnfHAWo3/hOoRqEeeTdL5hw==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-win32-arm64-msvc@4.46.4': + resolution: {integrity: sha512-cNLH4psMEsWKILW0isbpQA2OvjXLbKvnkcJFmqAptPQbtLrobiapBJVj6RoIvg6UXVp5w0wnIfd/Q56cNpF+Ew==} + cpu: [arm64] + os: [win32] + + '@rollup/rollup-win32-ia32-msvc@4.46.4': + resolution: {integrity: sha512-OiEa5lRhiANpv4SfwYVgQ3opYWi/QmPDC5ve21m8G9pf6ZO+aX1g2EEF1/IFaM1xPSP7mK0msTRXlPs6mIagkg==} + cpu: [ia32] + os: [win32] + + '@rollup/rollup-win32-x64-msvc@4.46.4': + resolution: {integrity: sha512-IKL9mewGZ5UuuX4NQlwOmxPyqielvkAPUS2s1cl6yWjjQvyN3h5JTdVFGD5Jr5xMjRC8setOfGQDVgX8V+dkjg==} + cpu: [x64] + os: [win32] + + '@tavily/core@0.5.11': + resolution: {integrity: sha512-SHYjpWgeyHpJNZh8RZCUcRtb228ymLzNlVOqrfCql6vOlSiz8q3wy5VEVtfdN881pg6wHPLa2Rra3xVyPhuYuw==} + + '@types/diff-match-patch@1.0.36': + resolution: {integrity: sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg==} + + '@types/estree@1.0.8': + resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} + + '@types/node@22.17.2': + resolution: {integrity: sha512-gL6z5N9Jm9mhY+U2KXZpteb+09zyffliRkZyZOHODGATyC5B1Jt/7TzuuiLkFsSUMLbS1OLmlj/E+/3KF4Q/4w==} + + acorn@8.15.0: + resolution: {integrity: sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==} + engines: {node: '>=0.4.0'} + hasBin: true + + agent-base@7.1.4: + resolution: {integrity: sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==} + engines: {node: '>= 14'} + + ai@4.3.19: + resolution: {integrity: sha512-dIE2bfNpqHN3r6IINp9znguYdhIOheKW2LDigAMrgt/upT3B8eBGPSCblENvaZGoq+hxaN9fSMzjWpbqloP+7Q==} + engines: {node: '>=18'} + peerDependencies: + react: ^18 || ^19 || ^19.0.0-rc + zod: ^3.23.8 + peerDependenciesMeta: + react: + optional: true + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-regex@6.2.0: + resolution: {integrity: sha512-TKY5pyBkHyADOPYlRT9Lx6F544mPl0vS5Ew7BJ45hA08Q+t3GjbueLliBWN3sMICk6+y7HdyxSzC4bWS8baBdg==} + engines: {node: '>=12'} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + ansi-styles@6.2.1: + resolution: {integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==} + engines: {node: '>=12'} + + any-promise@1.3.0: + resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} + + asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + + axios@1.11.0: + resolution: {integrity: sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==} + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + base64-js@1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + + brace-expansion@2.0.2: + resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} + + bundle-require@5.1.0: + resolution: {integrity: sha512-3WrrOuZiyaaZPWiEt4G3+IffISVC9HYlWueJEBWED4ZH4aIAC2PnkdnuRrR94M+w6yGWn4AglWtJtBI8YqvgoA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + peerDependencies: + esbuild: '>=0.18' + + cac@6.7.14: + resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} + engines: {node: '>=8'} + + call-bind-apply-helpers@1.0.2: + resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} + engines: {node: '>= 0.4'} + + chalk@5.6.0: + resolution: {integrity: sha512-46QrSQFyVSEyYAgQ22hQ+zDa60YHA4fBstHmtSApj1Y5vKtG27fWowW03jCk5KcbXEWPZUIR894aARCA/G1kfQ==} + engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} + + chokidar@4.0.3: + resolution: {integrity: sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==} + engines: {node: '>= 14.16.0'} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + + commander@4.1.1: + resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} + engines: {node: '>= 6'} + + confbox@0.1.8: + resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==} + + consola@3.4.2: + resolution: {integrity: sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==} + engines: {node: ^14.18.0 || >=16.10.0} + + cross-spawn@7.0.6: + resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} + engines: {node: '>= 8'} + + debug@4.4.1: + resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + + dequal@2.0.3: + resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} + engines: {node: '>=6'} + + diff-match-patch@1.0.5: + resolution: {integrity: sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==} + + dunder-proto@1.0.1: + resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} + engines: {node: '>= 0.4'} + + eastasianwidth@0.2.0: + resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + + es-define-property@1.0.1: + resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} + engines: {node: '>= 0.4'} + + es-errors@1.3.0: + resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} + engines: {node: '>= 0.4'} + + es-object-atoms@1.1.1: + resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} + engines: {node: '>= 0.4'} + + es-set-tostringtag@2.1.0: + resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==} + engines: {node: '>= 0.4'} + + esbuild@0.25.9: + resolution: {integrity: sha512-CRbODhYyQx3qp7ZEwzxOk4JBqmD/seJrzPa/cGjY1VtIn5E09Oi9/dB4JwctnfZ8Q8iT7rioVv5k/FNT/uf54g==} + engines: {node: '>=18'} + hasBin: true + + fdir@6.5.0: + resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==} + engines: {node: '>=12.0.0'} + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + + fix-dts-default-cjs-exports@1.0.1: + resolution: {integrity: sha512-pVIECanWFC61Hzl2+oOCtoJ3F17kglZC/6N94eRWycFgBH35hHx0Li604ZIzhseh97mf2p0cv7vVrOZGoqhlEg==} + + follow-redirects@1.15.11: + resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + + foreground-child@3.3.1: + resolution: {integrity: sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==} + engines: {node: '>=14'} + + form-data@4.0.4: + resolution: {integrity: sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==} + engines: {node: '>= 6'} + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + + get-intrinsic@1.3.0: + resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} + engines: {node: '>= 0.4'} + + get-proto@1.0.1: + resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} + engines: {node: '>= 0.4'} + + glob@10.4.5: + resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==} + hasBin: true + + glob@11.0.3: + resolution: {integrity: sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==} + engines: {node: 20 || >=22} + hasBin: true + + gopd@1.2.0: + resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} + engines: {node: '>= 0.4'} + + has-symbols@1.1.0: + resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} + engines: {node: '>= 0.4'} + + has-tostringtag@1.0.2: + resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==} + engines: {node: '>= 0.4'} + + hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + + https-proxy-agent@7.0.6: + resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==} + engines: {node: '>= 14'} + + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + + jackspeak@3.4.3: + resolution: {integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==} + + jackspeak@4.1.1: + resolution: {integrity: sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==} + engines: {node: 20 || >=22} + + joycon@3.1.1: + resolution: {integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==} + engines: {node: '>=10'} + + js-tiktoken@1.0.21: + resolution: {integrity: sha512-biOj/6M5qdgx5TKjDnFT1ymSpM5tbd3ylwDtrQvFQSu0Z7bBYko2dF+W/aUkXUPuk6IVpRxk/3Q2sHOzGlS36g==} + + json-schema@0.4.0: + resolution: {integrity: sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==} + + jsondiffpatch@0.6.0: + resolution: {integrity: sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + + lilconfig@3.1.3: + resolution: {integrity: sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==} + engines: {node: '>=14'} + + lines-and-columns@1.2.4: + resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} + + load-tsconfig@0.2.5: + resolution: {integrity: sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + lodash.sortby@4.7.0: + resolution: {integrity: sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==} + + lru-cache@10.4.3: + resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} + + lru-cache@11.1.0: + resolution: {integrity: sha512-QIXZUBJUx+2zHUdQujWejBkcD9+cs94tLn0+YL8UrCh+D5sCXZ4c7LaEH48pNwRY3MLDgqUFyhlCyjJPf1WP0A==} + engines: {node: 20 || >=22} + + magic-string@0.30.17: + resolution: {integrity: sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==} + + math-intrinsics@1.1.0: + resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==} + engines: {node: '>= 0.4'} + + mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + + mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + + minimatch@10.0.3: + resolution: {integrity: sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==} + engines: {node: 20 || >=22} + + minimatch@9.0.5: + resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==} + engines: {node: '>=16 || 14 >=14.17'} + + minipass@7.1.2: + resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} + engines: {node: '>=16 || 14 >=14.17'} + + mlly@1.7.4: + resolution: {integrity: sha512-qmdSIPC4bDJXgZTCR7XosJiNKySV7O215tsPtDN9iEO/7q/76b/ijtgRu/+epFXSJhijtTCCGp3DWS549P3xKw==} + + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + + mz@2.7.0: + resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} + + nanoid@3.3.11: + resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + object-assign@4.1.1: + resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} + engines: {node: '>=0.10.0'} + + package-json-from-dist@1.0.1: + resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + path-scurry@1.11.1: + resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} + engines: {node: '>=16 || 14 >=14.18'} + + path-scurry@2.0.0: + resolution: {integrity: sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==} + engines: {node: 20 || >=22} + + pathe@2.0.3: + resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==} + + picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + + picomatch@4.0.3: + resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==} + engines: {node: '>=12'} + + pirates@4.0.7: + resolution: {integrity: sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==} + engines: {node: '>= 6'} + + pkg-types@1.3.1: + resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==} + + postcss-load-config@6.0.1: + resolution: {integrity: sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==} + engines: {node: '>= 18'} + peerDependencies: + jiti: '>=1.21.0' + postcss: '>=8.0.9' + tsx: ^4.8.1 + yaml: ^2.4.2 + peerDependenciesMeta: + jiti: + optional: true + postcss: + optional: true + tsx: + optional: true + yaml: + optional: true + + proxy-from-env@1.1.0: + resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} + + punycode@2.3.1: + resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} + engines: {node: '>=6'} + + react@19.1.1: + resolution: {integrity: sha512-w8nqGImo45dmMIfljjMwOGtbmC/mk4CMYhWIicdSflH91J9TyCyczcPFXJzrZ/ZXcgGRFeP6BU0BEJTw6tZdfQ==} + engines: {node: '>=0.10.0'} + + readdirp@4.1.2: + resolution: {integrity: sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==} + engines: {node: '>= 14.18.0'} + + resolve-from@5.0.0: + resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} + engines: {node: '>=8'} + + rimraf@6.0.1: + resolution: {integrity: sha512-9dkvaxAsk/xNXSJzMgFqqMCuFgt2+KsOFek3TMLfo8NCPfWpBmqwyNn5Y+NX56QUYfCtsyhF3ayiboEoUmJk/A==} + engines: {node: 20 || >=22} + hasBin: true + + rollup@4.46.4: + resolution: {integrity: sha512-YbxoxvoqNg9zAmw4+vzh1FkGAiZRK+LhnSrbSrSXMdZYsRPDWoshcSd/pldKRO6lWzv/e9TiJAVQyirYIeSIPQ==} + engines: {node: '>=18.0.0', npm: '>=8.0.0'} + hasBin: true + + secure-json-parse@2.7.0: + resolution: {integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==} + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + + source-map@0.8.0-beta.0: + resolution: {integrity: sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==} + engines: {node: '>= 8'} + deprecated: The work that was done in this beta branch won't be included in future versions + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + string-width@5.1.2: + resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} + engines: {node: '>=12'} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + strip-ansi@7.1.0: + resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} + engines: {node: '>=12'} + + sucrase@3.35.0: + resolution: {integrity: sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA==} + engines: {node: '>=16 || 14 >=14.17'} + hasBin: true + + swr@2.3.6: + resolution: {integrity: sha512-wfHRmHWk/isGNMwlLGlZX5Gzz/uTgo0o2IRuTMcf4CPuPFJZlq0rDaKUx+ozB5nBOReNV1kiOyzMfj+MBMikLw==} + peerDependencies: + react: ^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + thenify-all@1.6.0: + resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} + engines: {node: '>=0.8'} + + thenify@3.3.1: + resolution: {integrity: sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==} + + throttleit@2.1.0: + resolution: {integrity: sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw==} + engines: {node: '>=18'} + + tinyexec@0.3.2: + resolution: {integrity: sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==} + + tinyglobby@0.2.14: + resolution: {integrity: sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==} + engines: {node: '>=12.0.0'} + + tr46@1.0.1: + resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} + + tree-kill@1.2.2: + resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==} + hasBin: true + + ts-interface-checker@0.1.13: + resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} + + tsup@8.5.0: + resolution: {integrity: sha512-VmBp77lWNQq6PfuMqCHD3xWl22vEoWsKajkF8t+yMBawlUS8JzEI+vOVMeuNZIuMML8qXRizFKi9oD5glKQVcQ==} + engines: {node: '>=18'} + hasBin: true + peerDependencies: + '@microsoft/api-extractor': ^7.36.0 + '@swc/core': ^1 + postcss: ^8.4.12 + typescript: '>=4.5.0' + peerDependenciesMeta: + '@microsoft/api-extractor': + optional: true + '@swc/core': + optional: true + postcss: + optional: true + typescript: + optional: true + + typescript@5.9.2: + resolution: {integrity: sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==} + engines: {node: '>=14.17'} + hasBin: true + + ufo@1.6.1: + resolution: {integrity: sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==} + + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + + use-sync-external-store@1.5.0: + resolution: {integrity: sha512-Rb46I4cGGVBmjamjphe8L/UnvJD+uPPtTkNvX5mZgqdbavhI4EbgIWJiIHXJ8bc/i9EQGPRh4DwEURJ552Do0A==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + webidl-conversions@4.0.2: + resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==} + + whatwg-url@7.1.0: + resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==} + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + wrap-ansi@8.1.0: + resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} + engines: {node: '>=12'} + + zod-to-json-schema@3.24.6: + resolution: {integrity: sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==} + peerDependencies: + zod: ^3.24.1 + + zod@4.1.1: + resolution: {integrity: sha512-SgMZK/h8Tigt9nnKkfJMvB/mKjiJXaX26xegP4sa+0wHIFVFWVlsQGdhklDmuargBD3Hsi3rsQRIzwJIhTPJHA==} + +snapshots: + + '@ai-sdk/openai-compatible@0.2.16(zod@4.1.1)': + dependencies: + '@ai-sdk/provider': 1.1.3 + '@ai-sdk/provider-utils': 2.2.8(zod@4.1.1) + zod: 4.1.1 + + '@ai-sdk/provider-utils@2.2.8(zod@4.1.1)': + dependencies: + '@ai-sdk/provider': 1.1.3 + nanoid: 3.3.11 + secure-json-parse: 2.7.0 + zod: 4.1.1 + + '@ai-sdk/provider@1.1.3': + dependencies: + json-schema: 0.4.0 + + '@ai-sdk/react@1.2.12(react@19.1.1)(zod@4.1.1)': + dependencies: + '@ai-sdk/provider-utils': 2.2.8(zod@4.1.1) + '@ai-sdk/ui-utils': 1.2.11(zod@4.1.1) + react: 19.1.1 + swr: 2.3.6(react@19.1.1) + throttleit: 2.1.0 + optionalDependencies: + zod: 4.1.1 + + '@ai-sdk/ui-utils@1.2.11(zod@4.1.1)': + dependencies: + '@ai-sdk/provider': 1.1.3 + '@ai-sdk/provider-utils': 2.2.8(zod@4.1.1) + zod: 4.1.1 + zod-to-json-schema: 3.24.6(zod@4.1.1) + + '@esbuild/aix-ppc64@0.25.9': + optional: true + + '@esbuild/android-arm64@0.25.9': + optional: true + + '@esbuild/android-arm@0.25.9': + optional: true + + '@esbuild/android-x64@0.25.9': + optional: true + + '@esbuild/darwin-arm64@0.25.9': + optional: true + + '@esbuild/darwin-x64@0.25.9': + optional: true + + '@esbuild/freebsd-arm64@0.25.9': + optional: true + + '@esbuild/freebsd-x64@0.25.9': + optional: true + + '@esbuild/linux-arm64@0.25.9': + optional: true + + '@esbuild/linux-arm@0.25.9': + optional: true + + '@esbuild/linux-ia32@0.25.9': + optional: true + + '@esbuild/linux-loong64@0.25.9': + optional: true + + '@esbuild/linux-mips64el@0.25.9': + optional: true + + '@esbuild/linux-ppc64@0.25.9': + optional: true + + '@esbuild/linux-riscv64@0.25.9': + optional: true + + '@esbuild/linux-s390x@0.25.9': + optional: true + + '@esbuild/linux-x64@0.25.9': + optional: true + + '@esbuild/netbsd-arm64@0.25.9': + optional: true + + '@esbuild/netbsd-x64@0.25.9': + optional: true + + '@esbuild/openbsd-arm64@0.25.9': + optional: true + + '@esbuild/openbsd-x64@0.25.9': + optional: true + + '@esbuild/openharmony-arm64@0.25.9': + optional: true + + '@esbuild/sunos-x64@0.25.9': + optional: true + + '@esbuild/win32-arm64@0.25.9': + optional: true + + '@esbuild/win32-ia32@0.25.9': + optional: true + + '@esbuild/win32-x64@0.25.9': + optional: true + + '@isaacs/balanced-match@4.0.1': {} + + '@isaacs/brace-expansion@5.0.0': + dependencies: + '@isaacs/balanced-match': 4.0.1 + + '@isaacs/cliui@8.0.2': + dependencies: + string-width: 5.1.2 + string-width-cjs: string-width@4.2.3 + strip-ansi: 7.1.0 + strip-ansi-cjs: strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: wrap-ansi@7.0.0 + + '@jridgewell/gen-mapping@0.3.13': + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + '@jridgewell/trace-mapping': 0.3.30 + + '@jridgewell/resolve-uri@3.1.2': {} + + '@jridgewell/sourcemap-codec@1.5.5': {} + + '@jridgewell/trace-mapping@0.3.30': + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.5.5 + + '@opentelemetry/api@1.9.0': {} + + '@pkgjs/parseargs@0.11.0': + optional: true + + '@rollup/rollup-android-arm-eabi@4.46.4': + optional: true + + '@rollup/rollup-android-arm64@4.46.4': + optional: true + + '@rollup/rollup-darwin-arm64@4.46.4': + optional: true + + '@rollup/rollup-darwin-x64@4.46.4': + optional: true + + '@rollup/rollup-freebsd-arm64@4.46.4': + optional: true + + '@rollup/rollup-freebsd-x64@4.46.4': + optional: true + + '@rollup/rollup-linux-arm-gnueabihf@4.46.4': + optional: true + + '@rollup/rollup-linux-arm-musleabihf@4.46.4': + optional: true + + '@rollup/rollup-linux-arm64-gnu@4.46.4': + optional: true + + '@rollup/rollup-linux-arm64-musl@4.46.4': + optional: true + + '@rollup/rollup-linux-loongarch64-gnu@4.46.4': + optional: true + + '@rollup/rollup-linux-ppc64-gnu@4.46.4': + optional: true + + '@rollup/rollup-linux-riscv64-gnu@4.46.4': + optional: true + + '@rollup/rollup-linux-riscv64-musl@4.46.4': + optional: true + + '@rollup/rollup-linux-s390x-gnu@4.46.4': + optional: true + + '@rollup/rollup-linux-x64-gnu@4.46.4': + optional: true + + '@rollup/rollup-linux-x64-musl@4.46.4': + optional: true + + '@rollup/rollup-win32-arm64-msvc@4.46.4': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.46.4': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.46.4': + optional: true + + '@tavily/core@0.5.11': + dependencies: + axios: 1.11.0 + https-proxy-agent: 7.0.6 + js-tiktoken: 1.0.21 + transitivePeerDependencies: + - debug + - supports-color + + '@types/diff-match-patch@1.0.36': {} + + '@types/estree@1.0.8': {} + + '@types/node@22.17.2': + dependencies: + undici-types: 6.21.0 + + acorn@8.15.0: {} + + agent-base@7.1.4: {} + + ai@4.3.19(react@19.1.1)(zod@4.1.1): + dependencies: + '@ai-sdk/provider': 1.1.3 + '@ai-sdk/provider-utils': 2.2.8(zod@4.1.1) + '@ai-sdk/react': 1.2.12(react@19.1.1)(zod@4.1.1) + '@ai-sdk/ui-utils': 1.2.11(zod@4.1.1) + '@opentelemetry/api': 1.9.0 + jsondiffpatch: 0.6.0 + zod: 4.1.1 + optionalDependencies: + react: 19.1.1 + + ansi-regex@5.0.1: {} + + ansi-regex@6.2.0: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + ansi-styles@6.2.1: {} + + any-promise@1.3.0: {} + + asynckit@0.4.0: {} + + axios@1.11.0: + dependencies: + follow-redirects: 1.15.11 + form-data: 4.0.4 + proxy-from-env: 1.1.0 + transitivePeerDependencies: + - debug + + balanced-match@1.0.2: {} + + base64-js@1.5.1: {} + + brace-expansion@2.0.2: + dependencies: + balanced-match: 1.0.2 + + bundle-require@5.1.0(esbuild@0.25.9): + dependencies: + esbuild: 0.25.9 + load-tsconfig: 0.2.5 + + cac@6.7.14: {} + + call-bind-apply-helpers@1.0.2: + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + + chalk@5.6.0: {} + + chokidar@4.0.3: + dependencies: + readdirp: 4.1.2 + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + combined-stream@1.0.8: + dependencies: + delayed-stream: 1.0.0 + + commander@4.1.1: {} + + confbox@0.1.8: {} + + consola@3.4.2: {} + + cross-spawn@7.0.6: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + debug@4.4.1: + dependencies: + ms: 2.1.3 + + delayed-stream@1.0.0: {} + + dequal@2.0.3: {} + + diff-match-patch@1.0.5: {} + + dunder-proto@1.0.1: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-errors: 1.3.0 + gopd: 1.2.0 + + eastasianwidth@0.2.0: {} + + emoji-regex@8.0.0: {} + + emoji-regex@9.2.2: {} + + es-define-property@1.0.1: {} + + es-errors@1.3.0: {} + + es-object-atoms@1.1.1: + dependencies: + es-errors: 1.3.0 + + es-set-tostringtag@2.1.0: + dependencies: + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + has-tostringtag: 1.0.2 + hasown: 2.0.2 + + esbuild@0.25.9: + optionalDependencies: + '@esbuild/aix-ppc64': 0.25.9 + '@esbuild/android-arm': 0.25.9 + '@esbuild/android-arm64': 0.25.9 + '@esbuild/android-x64': 0.25.9 + '@esbuild/darwin-arm64': 0.25.9 + '@esbuild/darwin-x64': 0.25.9 + '@esbuild/freebsd-arm64': 0.25.9 + '@esbuild/freebsd-x64': 0.25.9 + '@esbuild/linux-arm': 0.25.9 + '@esbuild/linux-arm64': 0.25.9 + '@esbuild/linux-ia32': 0.25.9 + '@esbuild/linux-loong64': 0.25.9 + '@esbuild/linux-mips64el': 0.25.9 + '@esbuild/linux-ppc64': 0.25.9 + '@esbuild/linux-riscv64': 0.25.9 + '@esbuild/linux-s390x': 0.25.9 + '@esbuild/linux-x64': 0.25.9 + '@esbuild/netbsd-arm64': 0.25.9 + '@esbuild/netbsd-x64': 0.25.9 + '@esbuild/openbsd-arm64': 0.25.9 + '@esbuild/openbsd-x64': 0.25.9 + '@esbuild/openharmony-arm64': 0.25.9 + '@esbuild/sunos-x64': 0.25.9 + '@esbuild/win32-arm64': 0.25.9 + '@esbuild/win32-ia32': 0.25.9 + '@esbuild/win32-x64': 0.25.9 + + fdir@6.5.0(picomatch@4.0.3): + optionalDependencies: + picomatch: 4.0.3 + + fix-dts-default-cjs-exports@1.0.1: + dependencies: + magic-string: 0.30.17 + mlly: 1.7.4 + rollup: 4.46.4 + + follow-redirects@1.15.11: {} + + foreground-child@3.3.1: + dependencies: + cross-spawn: 7.0.6 + signal-exit: 4.1.0 + + form-data@4.0.4: + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + es-set-tostringtag: 2.1.0 + hasown: 2.0.2 + mime-types: 2.1.35 + + fsevents@2.3.3: + optional: true + + function-bind@1.1.2: {} + + get-intrinsic@1.3.0: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + function-bind: 1.1.2 + get-proto: 1.0.1 + gopd: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.2 + math-intrinsics: 1.1.0 + + get-proto@1.0.1: + dependencies: + dunder-proto: 1.0.1 + es-object-atoms: 1.1.1 + + glob@10.4.5: + dependencies: + foreground-child: 3.3.1 + jackspeak: 3.4.3 + minimatch: 9.0.5 + minipass: 7.1.2 + package-json-from-dist: 1.0.1 + path-scurry: 1.11.1 + + glob@11.0.3: + dependencies: + foreground-child: 3.3.1 + jackspeak: 4.1.1 + minimatch: 10.0.3 + minipass: 7.1.2 + package-json-from-dist: 1.0.1 + path-scurry: 2.0.0 + + gopd@1.2.0: {} + + has-symbols@1.1.0: {} + + has-tostringtag@1.0.2: + dependencies: + has-symbols: 1.1.0 + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + + https-proxy-agent@7.0.6: + dependencies: + agent-base: 7.1.4 + debug: 4.4.1 + transitivePeerDependencies: + - supports-color + + is-fullwidth-code-point@3.0.0: {} + + isexe@2.0.0: {} + + jackspeak@3.4.3: + dependencies: + '@isaacs/cliui': 8.0.2 + optionalDependencies: + '@pkgjs/parseargs': 0.11.0 + + jackspeak@4.1.1: + dependencies: + '@isaacs/cliui': 8.0.2 + + joycon@3.1.1: {} + + js-tiktoken@1.0.21: + dependencies: + base64-js: 1.5.1 + + json-schema@0.4.0: {} + + jsondiffpatch@0.6.0: + dependencies: + '@types/diff-match-patch': 1.0.36 + chalk: 5.6.0 + diff-match-patch: 1.0.5 + + lilconfig@3.1.3: {} + + lines-and-columns@1.2.4: {} + + load-tsconfig@0.2.5: {} + + lodash.sortby@4.7.0: {} + + lru-cache@10.4.3: {} + + lru-cache@11.1.0: {} + + magic-string@0.30.17: + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + + math-intrinsics@1.1.0: {} + + mime-db@1.52.0: {} + + mime-types@2.1.35: + dependencies: + mime-db: 1.52.0 + + minimatch@10.0.3: + dependencies: + '@isaacs/brace-expansion': 5.0.0 + + minimatch@9.0.5: + dependencies: + brace-expansion: 2.0.2 + + minipass@7.1.2: {} + + mlly@1.7.4: + dependencies: + acorn: 8.15.0 + pathe: 2.0.3 + pkg-types: 1.3.1 + ufo: 1.6.1 + + ms@2.1.3: {} + + mz@2.7.0: + dependencies: + any-promise: 1.3.0 + object-assign: 4.1.1 + thenify-all: 1.6.0 + + nanoid@3.3.11: {} + + object-assign@4.1.1: {} + + package-json-from-dist@1.0.1: {} + + path-key@3.1.1: {} + + path-scurry@1.11.1: + dependencies: + lru-cache: 10.4.3 + minipass: 7.1.2 + + path-scurry@2.0.0: + dependencies: + lru-cache: 11.1.0 + minipass: 7.1.2 + + pathe@2.0.3: {} + + picocolors@1.1.1: {} + + picomatch@4.0.3: {} + + pirates@4.0.7: {} + + pkg-types@1.3.1: + dependencies: + confbox: 0.1.8 + mlly: 1.7.4 + pathe: 2.0.3 + + postcss-load-config@6.0.1: + dependencies: + lilconfig: 3.1.3 + + proxy-from-env@1.1.0: {} + + punycode@2.3.1: {} + + react@19.1.1: {} + + readdirp@4.1.2: {} + + resolve-from@5.0.0: {} + + rimraf@6.0.1: + dependencies: + glob: 11.0.3 + package-json-from-dist: 1.0.1 + + rollup@4.46.4: + dependencies: + '@types/estree': 1.0.8 + optionalDependencies: + '@rollup/rollup-android-arm-eabi': 4.46.4 + '@rollup/rollup-android-arm64': 4.46.4 + '@rollup/rollup-darwin-arm64': 4.46.4 + '@rollup/rollup-darwin-x64': 4.46.4 + '@rollup/rollup-freebsd-arm64': 4.46.4 + '@rollup/rollup-freebsd-x64': 4.46.4 + '@rollup/rollup-linux-arm-gnueabihf': 4.46.4 + '@rollup/rollup-linux-arm-musleabihf': 4.46.4 + '@rollup/rollup-linux-arm64-gnu': 4.46.4 + '@rollup/rollup-linux-arm64-musl': 4.46.4 + '@rollup/rollup-linux-loongarch64-gnu': 4.46.4 + '@rollup/rollup-linux-ppc64-gnu': 4.46.4 + '@rollup/rollup-linux-riscv64-gnu': 4.46.4 + '@rollup/rollup-linux-riscv64-musl': 4.46.4 + '@rollup/rollup-linux-s390x-gnu': 4.46.4 + '@rollup/rollup-linux-x64-gnu': 4.46.4 + '@rollup/rollup-linux-x64-musl': 4.46.4 + '@rollup/rollup-win32-arm64-msvc': 4.46.4 + '@rollup/rollup-win32-ia32-msvc': 4.46.4 + '@rollup/rollup-win32-x64-msvc': 4.46.4 + fsevents: 2.3.3 + + secure-json-parse@2.7.0: {} + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + + signal-exit@4.1.0: {} + + source-map@0.8.0-beta.0: + dependencies: + whatwg-url: 7.1.0 + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + string-width@5.1.2: + dependencies: + eastasianwidth: 0.2.0 + emoji-regex: 9.2.2 + strip-ansi: 7.1.0 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-ansi@7.1.0: + dependencies: + ansi-regex: 6.2.0 + + sucrase@3.35.0: + dependencies: + '@jridgewell/gen-mapping': 0.3.13 + commander: 4.1.1 + glob: 10.4.5 + lines-and-columns: 1.2.4 + mz: 2.7.0 + pirates: 4.0.7 + ts-interface-checker: 0.1.13 + + swr@2.3.6(react@19.1.1): + dependencies: + dequal: 2.0.3 + react: 19.1.1 + use-sync-external-store: 1.5.0(react@19.1.1) + + thenify-all@1.6.0: + dependencies: + thenify: 3.3.1 + + thenify@3.3.1: + dependencies: + any-promise: 1.3.0 + + throttleit@2.1.0: {} + + tinyexec@0.3.2: {} + + tinyglobby@0.2.14: + dependencies: + fdir: 6.5.0(picomatch@4.0.3) + picomatch: 4.0.3 + + tr46@1.0.1: + dependencies: + punycode: 2.3.1 + + tree-kill@1.2.2: {} + + ts-interface-checker@0.1.13: {} + + tsup@8.5.0(typescript@5.9.2): + dependencies: + bundle-require: 5.1.0(esbuild@0.25.9) + cac: 6.7.14 + chokidar: 4.0.3 + consola: 3.4.2 + debug: 4.4.1 + esbuild: 0.25.9 + fix-dts-default-cjs-exports: 1.0.1 + joycon: 3.1.1 + picocolors: 1.1.1 + postcss-load-config: 6.0.1 + resolve-from: 5.0.0 + rollup: 4.46.4 + source-map: 0.8.0-beta.0 + sucrase: 3.35.0 + tinyexec: 0.3.2 + tinyglobby: 0.2.14 + tree-kill: 1.2.2 + optionalDependencies: + typescript: 5.9.2 + transitivePeerDependencies: + - jiti + - supports-color + - tsx + - yaml + + typescript@5.9.2: {} + + ufo@1.6.1: {} + + undici-types@6.21.0: {} + + use-sync-external-store@1.5.0(react@19.1.1): + dependencies: + react: 19.1.1 + + webidl-conversions@4.0.2: {} + + whatwg-url@7.1.0: + dependencies: + lodash.sortby: 4.7.0 + tr46: 1.0.1 + webidl-conversions: 4.0.2 + + which@2.0.2: + dependencies: + isexe: 2.0.0 + + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrap-ansi@8.1.0: + dependencies: + ansi-styles: 6.2.1 + string-width: 5.1.2 + strip-ansi: 7.1.0 + + zod-to-json-schema@3.24.6(zod@4.1.1): + dependencies: + zod: 4.1.1 + + zod@4.1.1: {} diff --git a/tools/cortensor-openai-provider/src/constants.ts b/tools/cortensor-openai-provider/src/constants.ts new file mode 100644 index 0000000..c8cf120 --- /dev/null +++ b/tools/cortensor-openai-provider/src/constants.ts @@ -0,0 +1,31 @@ +/** + * Default configuration constants for Cortensor Provider + * + * This file centralizes all default values used throughout the provider + * to ensure consistency and easy maintenance. + */ + +/** + * Default model configuration values + */ +export const DEFAULT_MODEL_CONFIG = { + modelName: 'cortensor-chat', + temperature: 0.5, + maxTokens: 64000, + topP: 0.95, + topK: 40, + presencePenalty: 0, + frequencyPenalty: 0, + stream: false, + timeout: 60 * 15, + promptType: 1, + promptTemplate: '' +} as const; +export const MAX_INPUT_TOKEN = 20000; + +/** + * Maximum number of words to include from search result snippets + * This helps keep the context focused and prevents overwhelming the AI with too much information + */ +export const SEARCH_SNIPPET_WORD_LIMIT = 200; + diff --git a/tools/cortensor-openai-provider/src/index.ts b/tools/cortensor-openai-provider/src/index.ts new file mode 100644 index 0000000..b992853 --- /dev/null +++ b/tools/cortensor-openai-provider/src/index.ts @@ -0,0 +1,73 @@ +/** + * Cortensor AI Provider + * + * A drop-in OpenAI-compatible provider for the Cortensor API that works seamlessly + * with the Vercel AI SDK and any framework that supports OpenAI providers. + * + * @example + * ```typescript + * import { cortensorProvider, cortensorModel } from 'cortensor-ai-provider'; + * import { generateText } from 'ai'; + * + * // Using the provider directly + * const result = await generateText({ + * model: cortensorProvider('cortensor-chat'), + * prompt: 'Hello, world!', + * }); + * + * // Using the model with configuration + * const model = cortensorModel({ + * sessionId: 123, + * temperature: 0.7, + * maxTokens: 1000, + * }); + * + * const result = await generateText({ + * model, + * prompt: 'Tell me a story', + * }); + * ``` + */ + + + +// Main provider exports +export { + cortensorProvider, + cortensorModel, + createCortensorProvider, + extractModelConfiguration, +} from './provider'; + +// Constants exports +export { + DEFAULT_MODEL_CONFIG, +} from './constants'; + + +// All other type exports from types +export type * from './types'; + +// Tavily provider exports +export { + createTavilySearch, +} from './providers/tavily'; + + +// Transformer function exports +export { + transformToCortensor, + transformToOpenAI, +} from './transformers'; + +// Web search function exports +export { + extractSearchDirectives, + generateSearchQuery, + buildPromptWithSearchResults, +} from './websearch'; + +// Utility function exports +export { + formatSearchResults, +} from './utils'; \ No newline at end of file diff --git a/tools/cortensor-openai-provider/src/provider.ts b/tools/cortensor-openai-provider/src/provider.ts new file mode 100644 index 0000000..9e1d91f --- /dev/null +++ b/tools/cortensor-openai-provider/src/provider.ts @@ -0,0 +1,451 @@ +/** + * Cortensor Provider for Mastra AI + * + * This module provides integration between the Cortensor API and the Vercel AI SDK. + * It creates an OpenAI-compatible interface that handles session management, + * request/response transformations, and error handling automatically. + */ + +import { createOpenAICompatible } from '@ai-sdk/openai-compatible'; +import { transformToCortensor, transformToOpenAI } from './transformers'; +import type { CortensorConfig, CortensorModelConfig, WebSearchResult, WebSearchCallback } from './types'; +import { DEFAULT_MODEL_CONFIG } from './constants'; + +// Global registry for web search providers to handle function serialization +const webSearchProviderRegistry = new Map(); +let providerIdCounter = 0; + +// ============================================================================ +// ENVIRONMENT CONFIGURATION +// ============================================================================ + +// Load environment variables (validation happens at runtime) +const CORTENSOR_API_KEY = process.env.CORTENSOR_API_KEY; +const CORTENSOR_BASE_URL = process.env.CORTENSOR_BASE_URL; + +// ============================================================================ +// CUSTOM ERROR CLASSES +// ============================================================================ + +/** + * Base error class for Cortensor-related errors + */ +export class CortensorError extends Error { + constructor(message: string, public code: string) { + super(message); + this.name = 'CortensorError'; + } +} + +/** + * Error thrown when web search operations fail + */ +export class WebSearchError extends CortensorError { + constructor(message: string) { + super(message, 'WEB_SEARCH_ERROR'); + } +} + +/** + * Error thrown when configuration is invalid + */ +export class ConfigurationError extends CortensorError { + constructor(message: string) { + super(message, 'CONFIGURATION_ERROR'); + } +} + +/** + * Validates Cortensor configuration at runtime + * @param apiKey - API key to validate + * @param baseUrl - Base URL to validate + * @throws ConfigurationError if validation fails + */ +function validateCortensorConfig(apiKey?: string, baseUrl?: string): void { + if (!apiKey) { + throw new ConfigurationError( + 'CORTENSOR_API_KEY is required. Set it as environment variable or pass it explicitly.' + ); + } + if (!baseUrl) { + throw new ConfigurationError( + 'CORTENSOR_BASE_URL is required. Set it as environment variable or pass it explicitly.' + ); + } +} + + +/** + * Extracts model configuration and session ID from request body + * @param requestBody - The request body as string + * @returns Object containing sessionId and modelConfig with defaults applied + * @throws Error if configuration cannot be extracted + */ +export function extractModelConfiguration(requestBody: string): { + sessionId: number; + modelConfig?: CortensorModelConfig; +} { + try { + const parsedBody = JSON.parse(requestBody); + + const modelName = parsedBody.model; + + if (typeof modelName !== 'string') { + throw new Error('Model name must be a string'); + } + + + + // Extract configuration from model name (format: modelname-config-base64encodedconfig) + const configMatch = modelName.match(/-config-([A-Za-z0-9+/=]+)$/); + if (!configMatch || !configMatch[1]) { + throw new Error('Configuration not found in model name. Model name should end with "-config-{base64EncodedConfig}"'); + } + + + + // Decode the base64 encoded configuration + const configBase64 = configMatch[1]; + const configJson = Buffer.from(configBase64, 'base64').toString('utf-8'); + const decodedConfig = JSON.parse(configJson) as Partial; + + + if (!decodedConfig.sessionId) { + throw new Error('Session ID not found in model configuration'); + } + + + + // Merge decoded configuration with defaults + + const modelConfig: CortensorModelConfig = { + sessionId: decodedConfig.sessionId, + modelName: decodedConfig.modelName ?? DEFAULT_MODEL_CONFIG.modelName, + temperature: decodedConfig.temperature ?? DEFAULT_MODEL_CONFIG.temperature, + maxTokens: decodedConfig.maxTokens ?? DEFAULT_MODEL_CONFIG.maxTokens, + topP: decodedConfig.topP ?? DEFAULT_MODEL_CONFIG.topP, + topK: decodedConfig.topK ?? DEFAULT_MODEL_CONFIG.topK, + presencePenalty: decodedConfig.presencePenalty ?? DEFAULT_MODEL_CONFIG.presencePenalty, + frequencyPenalty: decodedConfig.frequencyPenalty ?? DEFAULT_MODEL_CONFIG.frequencyPenalty, + stream: decodedConfig.stream ?? DEFAULT_MODEL_CONFIG.stream, + timeout: decodedConfig.timeout ?? DEFAULT_MODEL_CONFIG.timeout, + promptType: decodedConfig.promptType ?? DEFAULT_MODEL_CONFIG.promptType, + promptTemplate: decodedConfig.promptTemplate ?? DEFAULT_MODEL_CONFIG.promptTemplate + }; + + // Copy web search configuration if present + if (decodedConfig.webSearch) { + modelConfig.webSearch = { ...decodedConfig.webSearch }; + + // Restore web search provider function from registry if it's a reference + if (modelConfig.webSearch.provider && typeof modelConfig.webSearch.provider === 'string' && (modelConfig.webSearch.provider as string).startsWith('provider_')) { + const providerId = modelConfig.webSearch.provider as string; + const providerFunction = webSearchProviderRegistry.get(providerId); + + if (providerFunction) { + modelConfig.webSearch.provider = providerFunction; + } else { + delete modelConfig.webSearch.provider; + } + } + } + + + + const result = { + sessionId: modelConfig.sessionId, + modelConfig + }; + + return result; + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Unknown error'; + throw new Error(`Failed to extract model configuration: ${errorMessage}`); + } +} + +/** + * Creates a standardized error response for the provider + * @param error - The error that occurred + * @returns Response object with error details + */ +function createProviderErrorResponse(error: unknown): Response { + let errorMessage = 'Unknown error'; + let errorCode = 'UNKNOWN_ERROR'; + let statusCode = 500; + + if (error instanceof CortensorError) { + errorMessage = error.message; + errorCode = error.code; + + // Set appropriate status codes for different error types + if (error instanceof ConfigurationError) { + statusCode = 400; // Bad Request + } else if (error instanceof WebSearchError) { + statusCode = 502; // Bad Gateway + } + } else if (error instanceof Error) { + errorMessage = error.message; + } + + const errorResponse = { + error: { + message: errorMessage, + type: 'provider_error', + code: errorCode + } + }; + + return new Response( + JSON.stringify(errorResponse), + { + status: statusCode, + headers: { + 'Content-Type': 'application/json' + } + } + ); +} + +/** + * Handles the core request processing logic + * @param requestBody - The request body as string + * @returns Promise - The processed response + */ +async function processRequest(requestBody: string): Promise { + // Extract configuration from request + const { sessionId, modelConfig } = extractModelConfiguration(requestBody); + + // Transform to Cortensor format + const transformResult = await transformToCortensor(requestBody, sessionId, modelConfig); + + // Prepare API request + const cortensorUrl = `${CORTENSOR_BASE_URL}/api/v1/completions`; + + const cortensorOptions: RequestInit = { + method: 'POST', + headers: { + 'Authorization': `Bearer ${CORTENSOR_API_KEY}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify(transformResult.request), + }; + + // Make API call + const cortensorResponse = await fetch(cortensorUrl, cortensorOptions); + + if (!cortensorResponse.ok) { + throw new Error(`Cortensor API error: ${cortensorResponse.status} ${cortensorResponse.statusText}`); + } + + // Process response + const responseText = await cortensorResponse.text(); + + const cortensorResponseClone = new Response(responseText, { + status: cortensorResponse.status, + statusText: cortensorResponse.statusText, + headers: cortensorResponse.headers + }); + + // Transform back to OpenAI format with web search results + const finalResponse = await transformToOpenAI(cortensorResponseClone, transformResult.webSearchResults, transformResult.searchQuery); + + return finalResponse; +} + +// ============================================================================ +// MAIN PROVIDER +// ============================================================================ + +/** + * Main Cortensor provider using OpenAI-compatible interface + * Handles session management and format transformations automatically + */ +export const cortensorProvider = createOpenAICompatible({ + name: 'cortensor', + baseURL: `${CORTENSOR_BASE_URL}`, + headers: { + 'Authorization': `Bearer ${CORTENSOR_API_KEY || ''}`, + 'Content-Type': 'application/json', + }, + fetch: async (input, options: RequestInit = {}) => { + try { + // Validate configuration at runtime + validateCortensorConfig(CORTENSOR_API_KEY, CORTENSOR_BASE_URL); + + const requestBody = options.body as string; + + const result = await processRequest(requestBody); + + return result; + } catch (error) { + const errorResponse = createProviderErrorResponse(error); + + return errorResponse; + } + }, +}); + +// ============================================================================ +// MODEL CREATION UTILITIES +// ============================================================================ + +/** + * Creates a configurable Cortensor model with custom parameters + * @param config - Configuration options for the model (optional, uses defaults if not provided) + * @returns Cortensor model instance with applied configuration + */ +export function cortensorModel(config: { sessionId: number } & Partial>): ReturnType { + // Validate required session ID + if (!config.sessionId) { + throw new Error('Session ID is required for Cortensor model creation'); + } + + // Only include explicitly provided configuration values + const configToEncode: Partial = { + sessionId: config.sessionId + }; + + // Add only the properties that were explicitly provided + if (config.modelName !== undefined) configToEncode.modelName = config.modelName; + if (config.temperature !== undefined) configToEncode.temperature = config.temperature; + if (config.maxTokens !== undefined) configToEncode.maxTokens = config.maxTokens; + if (config.topP !== undefined) configToEncode.topP = config.topP; + if (config.topK !== undefined) configToEncode.topK = config.topK; + if (config.presencePenalty !== undefined) configToEncode.presencePenalty = config.presencePenalty; + if (config.frequencyPenalty !== undefined) configToEncode.frequencyPenalty = config.frequencyPenalty; + if (config.stream !== undefined) configToEncode.stream = config.stream; + if (config.timeout !== undefined) configToEncode.timeout = config.timeout; + if (config.promptType !== undefined) configToEncode.promptType = config.promptType; + if (config.promptTemplate !== undefined) configToEncode.promptTemplate = config.promptTemplate; + + // Handle web search configuration with provider function serialization + if (config.webSearch !== undefined) { + const webSearchConfig = { ...config.webSearch }; + + // If there's a provider function, store it in the registry and use a reference + if (webSearchConfig.provider && typeof webSearchConfig.provider === 'function') { + const providerId = `provider_${providerIdCounter++}_${Date.now()}`; + webSearchProviderRegistry.set(providerId, webSearchConfig.provider); + + // Replace the function with a reference + configToEncode.webSearch = { + ...webSearchConfig, + provider: providerId as any // Store the ID instead of the function + }; + } else { + configToEncode.webSearch = webSearchConfig; + } + } + + // Encode configuration as base64 JSON and embed in model name + const configJson = JSON.stringify(configToEncode); + const configBase64 = Buffer.from(configJson, 'utf-8').toString('base64'); + const modelName = config.modelName || DEFAULT_MODEL_CONFIG.modelName; + const uniqueModelName = `${modelName}-config-${configBase64}`; + + // Create model instance with unique name that contains encoded configuration + const modelInstance = cortensorProvider(uniqueModelName); + + return modelInstance; +} + + +// ============================================================================ +// EXPORTS +// ============================================================================ + +// Re-export transformer functions for convenience +export { transformToCortensor, transformToOpenAI } from './transformers'; + + + +// ============================================================================ +// CUSTOM PROVIDER FACTORY +// ============================================================================ + +// Note: Model configurations are now embedded directly in model names as base64 JSON, +// so no global state management or cleanup functions are needed. + +/** + * Creates a custom Cortensor provider with specific configuration + * @param config - Configuration options to override defaults + * @returns Configured Cortensor provider instance + */ +export function createCortensorProvider(config: CortensorConfig = {}) { + // Use provided config or fall back to environment variables + const apiKey = config.apiKey || CORTENSOR_API_KEY; + const baseURL = config.baseURL || `${CORTENSOR_BASE_URL}/v1`; + + // Validate configuration + if (!apiKey) { + throw new Error('API key is required for custom Cortensor provider'); + } + + /** + * Custom request processor for the provider + * @param requestBody - The request body as string + * @returns Promise - The processed response + */ + async function processCustomRequest(requestBody: string): Promise { + // Extract configuration from request + const { sessionId, modelConfig } = extractModelConfiguration(requestBody); + + // Transform to Cortensor format + const cortensorRequest = transformToCortensor(requestBody, sessionId, modelConfig); + + // Prepare API request with custom config + const cortensorUrl = `${CORTENSOR_BASE_URL}/api/v1/completions`; + + const cortensorOptions: RequestInit = { + method: 'POST', + headers: { + 'Authorization': `Bearer ${apiKey}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify(cortensorRequest), + }; + + // Make API call + const cortensorResponse = await fetch(cortensorUrl, cortensorOptions); + + if (!cortensorResponse.ok) { + throw new Error(`Cortensor API error: ${cortensorResponse.status} ${cortensorResponse.statusText}`); + } + + // Process response + const responseText = await cortensorResponse.text(); + + const cortensorResponseClone = new Response(responseText, { + status: cortensorResponse.status, + statusText: cortensorResponse.statusText, + headers: cortensorResponse.headers + }); + + // Transform back to OpenAI format + const result = await transformToOpenAI(cortensorResponseClone); + + return result; + } + + // Return configured provider + return createOpenAICompatible({ + name: 'cortensor-custom', + baseURL, + headers: { + 'Authorization': `Bearer ${apiKey}`, + 'Content-Type': 'application/json', + }, + fetch: async (input, options: RequestInit = {}) => { + try { + const requestBody = options.body as string; + + const result = await processCustomRequest(requestBody); + + return result; + } catch (error) { + return createProviderErrorResponse(error); + } + } + }); +} diff --git a/tools/cortensor-openai-provider/src/providers/tavily.ts b/tools/cortensor-openai-provider/src/providers/tavily.ts new file mode 100644 index 0000000..7b33e63 --- /dev/null +++ b/tools/cortensor-openai-provider/src/providers/tavily.ts @@ -0,0 +1,61 @@ +/** + * Simple Tavily Web Search Provider + */ + +// import type { WebSearchResult, WebSearchCallback } from '../provider.js'; +import type { WebSearchCallback, WebSearchResult } from '@/types'; +import { tavily, type TavilySearchOptions } from '@tavily/core'; +import { truncateSnippet } from '../utils'; + + +/** + * Create a simple Tavily search function + */ +export function createTavilySearch(options: TavilySearchOptions = {}): WebSearchCallback { + + + const apiKeyToBeUsed = options.apiKey || process.env.TAVILY_API_KEY; + + if (!apiKeyToBeUsed) { + + throw new Error('Tavily API key is required. Provide it as parameter or set TAVILY_API_KEY environment variable.'); + } + + + const client = tavily({ apiKey: apiKeyToBeUsed }); + + + return async (query: string): Promise => { + const searchOptions = { + maxResults: options.maxResults || 2, + includeImages: options.includeImages || false, + searchDepth: options.searchDepth || 'basic', + }; + + try { + + const response = await client.search(query, searchOptions); + + + + if (!response.results || response.results.length === 0) { + + return []; + } + + const mappedResults = response.results.map((result: any, index: number) => { + const mappedResult = { + title: result.title || '', + url: result.url || '', + snippet: result.content ? truncateSnippet(result.content) : '', + }; + + return mappedResult; + }); + + return mappedResults; + } catch (error) { + throw new Error(`Tavily search failed: ${error instanceof Error ? error.message : 'Unknown error'}`); + } + }; +} \ No newline at end of file diff --git a/tools/cortensor-openai-provider/src/transformers.ts b/tools/cortensor-openai-provider/src/transformers.ts new file mode 100644 index 0000000..a67c595 --- /dev/null +++ b/tools/cortensor-openai-provider/src/transformers.ts @@ -0,0 +1,270 @@ +/** + * Cortensor API Transformers + * + * This module handles the conversion between OpenAI format and Cortensor API format. + * It provides utilities to transform requests and responses between the two formats, + * enabling seamless integration with the Vercel AI SDK. + */ + +import type { CoreMessage } from 'ai'; +import type { + CortensorModelConfig, + WebSearchResult, + WebSearchCallback, + CortensorRequest, + CortensorResponse, + CortensorChoice, + CortensorUsage, + OpenAIRequest, + OpenAIResponse, + SearchDirectives, + CortensorTransformResult +} from './types'; +import { WebSearchError, ConfigurationError } from './provider'; +import { DEFAULT_MODEL_CONFIG, MAX_INPUT_TOKEN } from './constants'; +import { extractSearchDirectives, generateSearchQuery, buildPromptWithSearchResults } from './websearch'; +import { buildFormattedPrompt, createErrorResponse, formatSearchResults } from './utils'; +import { handleWebSearch } from './websearch'; + + + + + + + + + + +/** + * Sanitizes message content by removing unwanted tokens and patterns + * @param content - The content to sanitize + * @returns Sanitized content + */ +function sanitizeMessageContent(content: string): string { + let sanitized = content + .replace(/<\/s>/g, '') // Remove stop tokens + .replace(//g, '') // Remove start tokens + .replace(/\[INST\]/g, '') // Remove instruction tokens + .replace(/\[\/INST\]/g, '') // Remove end instruction tokens + .trim(); + + return sanitized; +} + +/** + * Transforms OpenAI request format to Cortensor API format + * @param requestBody - The OpenAI-formatted request body as string + * @param sessionId - The session ID to include in the request + * @param modelConfig - Optional model configuration to override defaults + * @returns Cortensor transform result with request and optional web search data + */ +export async function transformToCortensor( + requestBody: string, + sessionId: number, + modelConfig?: CortensorModelConfig +): Promise { + + try { + const openAIRequest: OpenAIRequest = JSON.parse(requestBody); + + + // Extract search directives and clean messages + const searchDirectives = extractSearchDirectives(openAIRequest.messages, modelConfig?.webSearch); + + let finalPrompt: string = ''; + let webSearchResults: WebSearchResult[] | undefined; + let searchQuery: string | undefined; + + // Handle web search if needed + if (searchDirectives.shouldSearch && modelConfig?.webSearch?.provider) { + + try { + // Perform web search using flexible provider + const searchResult = await handleWebSearch( + searchDirectives.cleanedMessages, + modelConfig.webSearch + ); + + if (searchResult) { + webSearchResults = searchResult.results || []; + searchQuery = searchResult.query; + + // Build enhanced prompt with search results + finalPrompt = buildPromptWithSearchResults( + searchDirectives.cleanedMessages, + webSearchResults || [], + searchQuery + ); + } + + } catch (error) { + if (error instanceof ConfigurationError) { + throw error; + } + // Fall through to standard prompt building + } + } + + // Build standard prompt if no search or search failed + if (!finalPrompt) { + const systemMessages = searchDirectives.cleanedMessages.filter(msg => msg.role === 'system'); + const conversationMessages = searchDirectives.cleanedMessages.filter(msg => msg.role !== 'system'); + + finalPrompt = buildFormattedPrompt(systemMessages, conversationMessages); + } + + // Sanitize the final prompt before sending to AI + const sanitizedPrompt = sanitizeMessageContent(finalPrompt); + + // Create Cortensor request with model config or defaults + const cortensorRequest: CortensorRequest = { + session_id: sessionId, + prompt: sanitizedPrompt, + prompt_type: modelConfig?.promptType ?? DEFAULT_MODEL_CONFIG.promptType, + prompt_template: modelConfig?.promptTemplate ?? DEFAULT_MODEL_CONFIG.promptTemplate, + stream: modelConfig?.stream ?? DEFAULT_MODEL_CONFIG.stream, + timeout: modelConfig?.timeout ?? DEFAULT_MODEL_CONFIG.timeout, + client_reference: `user-request-${Date.now()}`, + max_tokens: modelConfig?.maxTokens ?? DEFAULT_MODEL_CONFIG.maxTokens, + temperature: modelConfig?.temperature ?? openAIRequest.temperature ?? DEFAULT_MODEL_CONFIG.temperature, + top_p: modelConfig?.topP ?? DEFAULT_MODEL_CONFIG.topP, + top_k: modelConfig?.topK ?? DEFAULT_MODEL_CONFIG.topK, + presence_penalty: modelConfig?.presencePenalty ?? DEFAULT_MODEL_CONFIG.presencePenalty, + frequency_penalty: modelConfig?.frequencyPenalty ?? DEFAULT_MODEL_CONFIG.frequencyPenalty + }; + + + + const result: CortensorTransformResult = { + request: cortensorRequest + }; + + if (webSearchResults) { + result.webSearchResults = webSearchResults; + } + + if (searchQuery) { + result.searchQuery = searchQuery; + } + + return result; + } catch (error) { + throw new Error('Failed to transform request to Cortensor format'); + } +} + + + +/** + * Transforms Cortensor response to OpenAI format + * @param cortensorResponse - The response from Cortensor API + * @param webSearchResults - Optional web search results to include as tool calls + * @param searchQuery - The search query used (if any) + * @returns Promise - OpenAI-formatted response + */ +export async function transformToOpenAI( + cortensorResponse: Response, + webSearchResults?: WebSearchResult[], + searchQuery?: string +): Promise { + + try { + const cortensorData = await cortensorResponse.json() as CortensorResponse; + + // Transform choices to OpenAI format + const transformedChoices = cortensorData.choices.map((choice: CortensorChoice, index: number) => { + let content = choice.text || ''; + content = sanitizeMessageContent(content); + + // Validate that we have substantial content from the AI + const hasSubstantialContent = content.trim().length > 50; // At least 50 characters of meaningful content + + // If content is too brief and we have search results, add a note about the issue + if (!hasSubstantialContent && webSearchResults && webSearchResults.length > 0) { + content = content || 'Based on the search results provided:'; + } + + // Append search results as markdown URLs to content if they exist + if (webSearchResults && webSearchResults.length > 0) { + const searchResultsMarkdown = formatSearchResults(webSearchResults); + if (searchResultsMarkdown) { + // Only add "Search Results" header if the AI's response doesn't already reference them + const needsHeader = !content.toLowerCase().includes('search result') && !content.toLowerCase().includes('source'); + const separator = needsHeader ? `\n\n**Sources Referenced:** ${searchResultsMarkdown}` : `\n\n${searchResultsMarkdown}`; + content += separator; + } + } + + const message: any = { + role: 'assistant' as const, + content: content + }; + + const transformedChoice = { + index: choice.index ?? index, + message, + finish_reason: choice.finish_reason || 'stop' + }; + + return transformedChoice; + }); + + // Transform usage information + const transformedUsage = cortensorData.usage ? { + prompt_tokens: cortensorData.usage.prompt_tokens, + completion_tokens: cortensorData.usage.completion_tokens, + total_tokens: cortensorData.usage.total_tokens + } : { + prompt_tokens: 0, + completion_tokens: 0, + total_tokens: 0 + }; + + // Create OpenAI-formatted response + const openAIResponse: OpenAIResponse = { + id: cortensorData.id || `cortensor-${Date.now()}`, + object: 'chat.completion', + created: cortensorData.created || Math.floor(Date.now() / 1000), + model: cortensorData.model || 'cortensor-model', + choices: transformedChoices, + usage: transformedUsage + }; + + // Return as Response object + const responseBody = JSON.stringify(openAIResponse); + + const finalResponse = new Response( + responseBody, + { + status: cortensorResponse.status, + statusText: cortensorResponse.statusText, + headers: { + 'Content-Type': 'application/json' + } + } + ); + + return finalResponse; + } catch (error) { + // Return standardized error response + const errorResponse = createErrorResponse(); + + const errorResponseBody = JSON.stringify(errorResponse); + + return new Response( + errorResponseBody, + { + status: 500, + statusText: 'Internal Server Error', + headers: { 'Content-Type': 'application/json' } + } + ); + } +} + +// ============================================================================ +// NOTES +// ============================================================================ +// - Streaming is currently disabled - all responses are sent at once +// - The transformer handles both successful responses and error cases +// - All responses are converted to OpenAI-compatible format for SDK integration \ No newline at end of file diff --git a/tools/cortensor-openai-provider/src/types.ts b/tools/cortensor-openai-provider/src/types.ts new file mode 100644 index 0000000..31d969e --- /dev/null +++ b/tools/cortensor-openai-provider/src/types.ts @@ -0,0 +1,257 @@ +/** + * TypeScript Type Definitions for Cortensor OpenAI Provider + * + * This module contains all the interface and type definitions used throughout + * the Cortensor OpenAI Provider package for type safety and consistency. + */ + +import type { CoreMessage } from 'ai'; + + +// ============================================================================ +// RE-EXPORTED TYPES FROM PROVIDER +// ============================================================================ + + +/** + * Simple options for Tavily search + */ +export interface TavilySearchOptions { + maxResults?: number; + apiKey?: string; + includeImages?: boolean; + searchDepth?: 'basic' | 'advanced'; +} + +// ============================================================================ +// CORTENSOR API TYPES +// ============================================================================ + +/** + * Request format expected by the Cortensor API + */ +export interface CortensorRequest { + session_id: number; + prompt: string; + prompt_type?: number; + prompt_template?: string; + stream?: boolean; + timeout?: number; + client_reference?: string; + max_tokens?: number; + temperature?: number; + top_p?: number; + top_k?: number; + presence_penalty?: number; + frequency_penalty?: number; +} + +/** + * Individual choice in Cortensor API response + */ +export interface CortensorChoice { + finish_reason: string; + index: number; + logprobs: null | any; + text: string; +} + +/** + * Token usage information from Cortensor API + */ +export interface CortensorUsage { + completion_tokens: number; + prompt_tokens: number; + total_tokens: number; +} + +/** + * Response format from Cortensor API + */ +export interface CortensorResponse { + choices: CortensorChoice[]; + created: number; + id: string; + model: string; + object: string; + usage: CortensorUsage; +} + +// ============================================================================ +// OPENAI API TYPES +// ============================================================================ + +/** + * Request format from OpenAI/Vercel AI SDK + */ +export interface OpenAIRequest { + model: string; + messages: CoreMessage[]; + stream?: boolean; + temperature?: number; + max_tokens?: number; + [key: string]: unknown; +} + +/** + * Message structure in OpenAI response + */ +export interface OpenAIMessage { + role: string; + content: string | null; + refusal?: string | null; +} + +/** + * Choice structure in OpenAI response + */ +export interface OpenAIChoice { + index: number; + message: OpenAIMessage; + finish_reason: string | null; + logprobs?: any | null; +} + +/** + * Usage information in OpenAI response + */ +export interface OpenAIUsage { + prompt_tokens: number; + completion_tokens: number; + total_tokens: number; +} + +/** + * Response format expected by OpenAI/Vercel AI SDk + */ +export interface OpenAIResponse { + id: string; + object: string; + created: number; + model: string; + choices: OpenAIChoice[]; + usage?: OpenAIUsage; + system_fingerprint?: string; +} + +// ============================================================================ +// WEB SEARCH TYPES +// ============================================================================ + +/** + * Search directive information extracted from messages + */ +export interface SearchDirectives { + shouldSearch: boolean; + cleanedMessages: CoreMessage[]; +} + +/** + * Result of transforming to Cortensor format with optional web search data + */ +export interface CortensorTransformResult { + request: CortensorRequest; + webSearchResults?: WebSearchResult[]; + searchQuery?: string; +} + + +// ============================================================================ +// TYPE DEFINITIONS +// ============================================================================ + +/** + * Configuration options for Cortensor provider + */ +export interface CortensorConfig { + /** API key for authentication (optional, defaults to env var) */ + apiKey?: string; + /** Base URL for the API (optional, defaults to env var) */ + baseURL?: string; + /** Request timeout in seconds */ + timeout?: number; + /** Session timeout in seconds */ + sessionTimeout?: number; +} + +/** + * Web search result structure + */ +export interface WebSearchResult { + title: string; + url: string; + snippet: string; + publishedDate?: string; +} + +/** + * Web search request structure + */ +export interface WebSearchRequest { + query: string; + maxResults: number; +} + +/** + * Web search configuration options + */ +export interface WebSearchConfig { + mode: 'prompt' | 'force' | 'disable'; + provider?: WebSearchCallback; + maxResults?: number; +} + +/** + * Model configuration options for Cortensor models + */ +export interface CortensorModelConfig { + /** Required session ID for the conversation */ + sessionId: number; + /** Model name identifier */ + modelName?: string; + /** Sampling temperature (0.0 to 2.0) */ + temperature?: number; + /** Maximum tokens to generate */ + maxTokens?: number; + /** Top-p sampling parameter */ + topP?: number; + /** Top-k sampling parameter */ + topK?: number; + /** Presence penalty (-2.0 to 2.0) */ + presencePenalty?: number; + /** Frequency penalty (-2.0 to 2.0) */ + frequencyPenalty?: number; + /** Whether to stream responses */ + stream?: boolean; + /** Request timeout in seconds */ + timeout?: number; + /** Prompt type identifier */ + promptType?: number; + /** Custom prompt template */ + promptTemplate?: string; + /** Web search configuration */ + webSearch?: WebSearchConfig; +} + + +// ============================================================================ +// WEB SEARCH INTERFACES +// ============================================================================ + +/** + * Base interface for web search providers + */ +export interface WebSearchProvider { + search(query: string, maxResults?: number): Promise; +} + +/** + * Flexible callback type - can be a provider or direct function + */ +export type WebSearchCallback = + | WebSearchProvider + | ((query: string, maxResults?: number) => Promise); + +// ============================================================================ +// HELPER FUNCTIONS +// ============================================================================ diff --git a/tools/cortensor-openai-provider/src/utils.ts b/tools/cortensor-openai-provider/src/utils.ts new file mode 100644 index 0000000..a884f1d --- /dev/null +++ b/tools/cortensor-openai-provider/src/utils.ts @@ -0,0 +1,182 @@ +import type { CoreMessage } from "ai"; +import type { OpenAIResponse, WebSearchResult } from "./types"; +import { SEARCH_SNIPPET_WORD_LIMIT } from "./constants"; + +/** + * Creates a standardized error response in OpenAI format + * @param errorMessage - The error message to include + * @returns OpenAI-formatted error response + */ +export function createErrorResponse(errorMessage: string = 'Sorry, I encountered an error processing your request.'): OpenAIResponse { + return { + id: `cortensor-error-${Date.now()}`, + object: 'chat.completion', + created: Math.floor(Date.now() / 1000), + model: 'cortensor-model', + choices: [ + { + index: 0, + message: { + role: 'assistant' as const, + content: errorMessage + }, + finish_reason: 'stop' + } + ] + }; +} + + + +/** + * Builds a formatted prompt from system and conversation messages + * @param systemMessages - Array of system messages + * @param conversationMessages - Array of conversation messages + * @returns Formatted prompt string + */ +export function buildFormattedPrompt(systemMessages: CoreMessage[], conversationMessages: CoreMessage[]): string { + let prompt = ''; + + // Add system instructions section if present + if (systemMessages.length > 0) { + const systemInstructions = systemMessages + .map((msg, index) => { + const content = extractMessageContent(msg); + return content; + }) + .join('\n\n'); + + prompt += `### SYSTEM INSTRUCTIONS ###\n${systemInstructions}\n\n### CONVERSATION ###\n`; + } + + // Add conversation history with role formatting + const conversationText = conversationMessages + .map((msg, index) => { + const content = extractMessageContent(msg); + switch (msg.role) { + case 'user': + return `Human: ${content}`; + case 'assistant': + return `Assistant: ${content}`; + default: + return content; + } + }) + .join('\n\n'); + + prompt += conversationText; + + // Get current date and time for context + const now = new Date(); + const currentDateTime = now.toLocaleDateString('en-US', { + weekday: 'long', + year: 'numeric', + month: 'long', + day: 'numeric' + }) + ' at ' + now.toLocaleTimeString('en-US', { + hour: '2-digit', + minute: '2-digit', + second: '2-digit', + timeZoneName: 'short' + }); + + prompt += `\n\n--- CURRENT DATE AND TIME ---\n${currentDateTime}`; + + // Add assistant prompt if the last message is from user + const lastMessage = conversationMessages[conversationMessages.length - 1]; + if (conversationMessages.length > 0 && lastMessage?.role === 'user') { + prompt += '\n\nAssistant:'; + } + + return prompt; +} + + +// ============================================================================ +// HELPER FUNCTIONS +// ============================================================================ + +/** + * Extracts text content from a message, handling both string and array formats + * @param message - The message to extract content from + * @returns The extracted text content + */ +export function extractMessageContent(message: CoreMessage): string { + if (typeof message.content === 'string') { + return message.content; + } + + if (Array.isArray(message.content)) { + const extractedContent = message.content + .filter(part => { + // Handle string parts + if (typeof part === 'string') { + return true; + } + // Handle text objects + if (typeof part === 'object' && part !== null && 'type' in part) { + return part.type === 'text'; + } + return false; + }) + .map(part => { + if (typeof part === 'string') { + return part; + } + // Extract text from text objects + const text = (part as any).text || ''; + return text; + }) + .join(' ') + .trim(); + return extractedContent; + } + + return ''; +} + + +/** + * Formats search results as numbered citations with a sources section + * @param results - Array of search results + * @returns Formatted search results with numbered citations and sources section + */ +export function formatSearchResults( + results: WebSearchResult[] +): string { + if (results.length === 0) { + return ''; + } + + // Create the sources section + const sources = results + .map((result, index) => { + return `[${index + 1}] [${result.title}](${result.url})`; + }) + .join('\n'); + + const formattedResults = `\n\n**Sources:**\n${sources}`; + + return formattedResults; +} + +/** + * Truncates a snippet to the specified number of words + * @param snippet - The snippet text to truncate + * @param wordLimit - Maximum number of words to include (defaults to SEARCH_SNIPPET_WORD_LIMIT) + * @returns Truncated snippet with ellipsis if truncated + */ +export function truncateSnippet(snippet: string, wordLimit: number = SEARCH_SNIPPET_WORD_LIMIT): string { + if (!snippet || snippet.trim().length === 0) { + return ''; + } + + const words = snippet.trim().split(/\s+/); + + if (words.length <= wordLimit) { + return snippet.trim(); + } + + const truncated = words.slice(0, wordLimit).join(' '); + return `${truncated}...`; +} \ No newline at end of file diff --git a/tools/cortensor-openai-provider/src/websearch.ts b/tools/cortensor-openai-provider/src/websearch.ts new file mode 100644 index 0000000..a4a7923 --- /dev/null +++ b/tools/cortensor-openai-provider/src/websearch.ts @@ -0,0 +1,170 @@ +import type { CoreMessage } from "ai"; +import { WebSearchError } from "./provider"; +import type { CortensorModelConfig, WebSearchCallback, WebSearchResult } from "./types"; +import { extractMessageContent, truncateSnippet } from "./utils"; + +/** + * Simple web search function + */ +async function performWebSearch( + query: string, + provider: WebSearchCallback, + maxResults: number +): Promise { + try { + if (typeof provider === 'function') { + return await provider(query, maxResults); + } else { + return await provider.search(query, maxResults); + } + } catch (error) { + throw new WebSearchError(`Web search failed: ${error instanceof Error ? error.message : 'Unknown error'}`); + } +} + +/** + * Simple web search handler + */ +export async function handleWebSearch( + messages: CoreMessage[], + webSearchConfig?: CortensorModelConfig['webSearch'] +): Promise<{ query: string; results: WebSearchResult[] } | null> { + if (!webSearchConfig?.provider) { + return null; + } + + const searchQuery = generateSearchQuery(messages); + + if (!searchQuery) { + return null; + } + + try { + const searchResults = await performWebSearch(searchQuery, webSearchConfig.provider, webSearchConfig.maxResults || 5); + + const result = { query: searchQuery, results: searchResults }; + return result; + } catch (error) { + return null; + } +} + + + +/** + * Builds a prompt enhanced with search results + */ +export function buildPromptWithSearchResults( + messages: CoreMessage[], + searchResults: WebSearchResult[], + searchQuery: string +): string { + + // Build basic prompt + let prompt = ''; + const systemMessages = messages.filter(msg => msg.role === 'system'); + const conversationMessages = messages.filter(msg => msg.role !== 'system'); + + if (systemMessages.length > 0) { + prompt += systemMessages.map(msg => extractMessageContent(msg)).join('\n\n') + '\n\n'; + } + + conversationMessages.forEach(msg => { + const content = extractMessageContent(msg); + if (msg.role === 'user') { + prompt += `Human: ${content}\n\n`; + } else { + prompt += `Assistant: ${content}\n\n`; + } + }); + + + + // Add current date/time + const now = new Date(); + const currentDateTime = now.toLocaleDateString('en-US', { + weekday: 'long', year: 'numeric', month: 'long', day: 'numeric' + }) + ' at ' + now.toLocaleTimeString('en-US', { + hour: '2-digit', minute: '2-digit', timeZoneName: 'short' + }); + + // Add search results (with truncated snippets, no URLs in prompt) + const searchContent = searchResults.length > 0 + ? searchResults.map((result, index) => { + const truncatedSnippet = result.snippet ? truncateSnippet(result.snippet) : 'No content available'; + return `${result.title}: ${truncatedSnippet}`; + }).join('\n\n') + : 'No search results found.'; + + const finalPrompt = `${prompt}Current date and time: ${currentDateTime}\n\nSearch results for "${searchQuery}":\n\n${searchContent}\n\nAssistant:`; + + return finalPrompt; +} + + + +/** + * Simple search query generator - uses first 390 chars of latest message + */ +export function generateSearchQuery(messages: CoreMessage[]): string { + if (messages.length === 0) { + return 'general information'; + } + + const lastMessage = messages[messages.length - 1]; + if (!lastMessage) { + return 'general information'; + } + + const content = extractMessageContent(lastMessage); + + // Take first 390 characters + const searchQuery = content.substring(0, 390).trim(); + + return searchQuery; +} + + +/** + * Simple search directive checker - checks for [**search**] marker + */ +export function extractSearchDirectives( + messages: CoreMessage[], + webSearchConfig?: CortensorModelConfig['webSearch'] +): { shouldSearch: boolean; cleanedMessages: CoreMessage[] } { + if (!webSearchConfig || messages.length === 0) { + return { shouldSearch: false, cleanedMessages: messages }; + } + + const lastMessage = messages[messages.length - 1]; + if (!lastMessage) { + return { shouldSearch: false, cleanedMessages: messages }; + } + + const content = extractMessageContent(lastMessage); + + // Check for search markers + const hasSearchMarker = /\[\*\*search\*\*\]/i.test(content); + const hasNoSearchMarker = /\[\*\*no-search\*\*\]/i.test(content); + + // Determine if search should be performed + let shouldSearch = false; + if (webSearchConfig.mode === 'force') { + shouldSearch = true; + } else if (webSearchConfig.mode === 'disable') { + shouldSearch = false; + } else { // prompt-based mode + shouldSearch = hasSearchMarker && !hasNoSearchMarker; + } + + // Clean the content by removing markers + const cleanedContent = content.replace(/\[\*\*search\*\*\]/gi, '').replace(/\[\*\*no-search\*\*\]/gi, '').trim(); + + const cleanedMessages: CoreMessage[] = [ + ...messages.slice(0, -1), + { ...lastMessage, content: cleanedContent as any } + ]; + + return { shouldSearch, cleanedMessages }; +} + diff --git a/tools/cortensor-openai-provider/tsconfig.json b/tools/cortensor-openai-provider/tsconfig.json new file mode 100644 index 0000000..db28910 --- /dev/null +++ b/tools/cortensor-openai-provider/tsconfig.json @@ -0,0 +1,40 @@ +{ + "compilerOptions": { + "target": "ES2020", + "lib": ["ES2020", "DOM"], + "module": "ESNext", + "moduleResolution": "bundler", + "allowJs": false, + "skipLibCheck": true, + "strict": true, + "noEmit": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "isolatedModules": true, + "verbatimModuleSyntax": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "outDir": "./dist", + "rootDir": "./src", + "noUncheckedIndexedAccess": true, + "exactOptionalPropertyTypes": true, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "paths": { + "@/*": ["./src/*"] + } + }, + "include": [ + "src/**/*" + ], + "exclude": [ + "node_modules", + "dist", + "**/*.test.ts", + "**/*.spec.ts" + ] +} \ No newline at end of file diff --git a/tools/cortensor-openai-provider/tsup.config.ts b/tools/cortensor-openai-provider/tsup.config.ts new file mode 100644 index 0000000..bfbec57 --- /dev/null +++ b/tools/cortensor-openai-provider/tsup.config.ts @@ -0,0 +1,18 @@ +import { defineConfig } from 'tsup'; + +export default defineConfig({ + entry: ['src/index.ts'], + format: ['cjs', 'esm'], + dts: true, + splitting: false, + sourcemap: true, + clean: true, + minify: false, + external: ['ai'], + treeshake: true, + target: 'es2020', + outDir: 'dist', + banner: { + js: '// Cortensor AI Provider - OpenAI-compatible provider for Cortensor API', + }, +}); \ No newline at end of file