Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
136 changes: 136 additions & 0 deletions packages/opencode/test/fixture/anthropic.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
/**
* Fake Anthropic HTTP server for E2E tests that need real AI SDK streamText.
*
* Usage:
* import { server, waitRequest, toolResponse, textResponse, deferred } from "../fixture/anthropic"
*
* beforeAll(() => server.start())
* beforeEach(() => server.reset())
* afterAll(() => server.stop())
*
* // In test:
* waitRequest("/messages", toolResponse("toolu_01", "my_tool", { key: "value" }))
* waitRequest("/messages", textResponse("Done"))
*/

export type Capture = {
url: URL
headers: Headers
body: Record<string, unknown>
}

type Entry = {
path: string
response: Response | ((req: Request, capture: Capture) => Response)
resolve: (value: Capture) => void
}

const state = {
instance: null as ReturnType<typeof Bun.serve> | null,
queue: [] as Entry[],
}

export function deferred<T>() {
const result = {} as { promise: Promise<T>; resolve: (value: T) => void }
result.promise = new Promise((resolve) => {
result.resolve = resolve
})
return result
}

export const server = {
start() {
state.instance = Bun.serve({
port: 0,
async fetch(req) {
const next = state.queue.shift()
if (!next) return new Response("unexpected request", { status: 500 })
const url = new URL(req.url)
const body = (await req.json()) as Record<string, unknown>
next.resolve({ url, headers: req.headers, body })
if (!url.pathname.endsWith(next.path)) return new Response("not found", { status: 404 })
return typeof next.response === "function"
? next.response(req, { url, headers: req.headers, body })
: next.response
},
})
},
stop() {
state.instance?.stop()
},
reset() {
state.queue.length = 0
},
get origin() {
if (!state.instance) throw new Error("server.start() must be called before accessing origin")
return state.instance.url.origin
},
}

export function waitRequest(pathname: string, response: Response | ((req: Request, capture: Capture) => Response)) {
const pending = deferred<Capture>()
state.queue.push({ path: pathname, response, resolve: pending.resolve })
return pending.promise
}

// --- SSE helpers ---

export function sse(chunks: unknown[]) {
const payload = chunks.map((c) => `data: ${JSON.stringify(c)}`).join("\n\n") + "\n\n"
const bytes = new TextEncoder().encode(payload)
return new Response(
new ReadableStream({
start(controller) {
controller.enqueue(bytes)
controller.close()
},
}),
{ status: 200, headers: { "Content-Type": "text/event-stream" } },
)
}

export function toolResponse(id: string, name: string, input: Record<string, unknown>) {
return sse([
{
type: "message_start",
message: {
id: "msg-1",
model: "claude-3-5-sonnet-20241022",
role: "assistant",
usage: { input_tokens: 10, cache_creation_input_tokens: null, cache_read_input_tokens: null },
},
},
{ type: "content_block_start", index: 0, content_block: { type: "tool_use", id, name } },
{ type: "content_block_delta", index: 0, delta: { type: "input_json_delta", partial_json: JSON.stringify(input) } },
{ type: "content_block_stop", index: 0 },
{
type: "message_delta",
delta: { stop_reason: "tool_use" },
usage: { output_tokens: 20, cache_creation_input_tokens: null, cache_read_input_tokens: null },
},
{ type: "message_stop" },
])
}

export function textResponse(msg: string) {
return sse([
{
type: "message_start",
message: {
id: "msg-2",
model: "claude-3-5-sonnet-20241022",
role: "assistant",
usage: { input_tokens: 10, cache_creation_input_tokens: null, cache_read_input_tokens: null },
},
},
{ type: "content_block_start", index: 0, content_block: { type: "text", text: "" } },
{ type: "content_block_delta", index: 0, delta: { type: "text_delta", text: msg } },
{ type: "content_block_stop", index: 0 },
{
type: "message_delta",
delta: { stop_reason: "end_turn" },
usage: { output_tokens: 5, cache_creation_input_tokens: null, cache_read_input_tokens: null },
},
{ type: "message_stop" },
])
}
125 changes: 125 additions & 0 deletions packages/opencode/test/fixture/prompt-layers.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
/**
* Full SessionPrompt Effect layer stack with real LLM (not TestLLM).
*
* Use this when tests need to exercise the complete prompt pipeline
* (resolveTools, tool execution, processor event handling) through
* the real AI SDK streamText against a fake HTTP server.
*
* Service stubs (MCP, LSP, FileTime) are no-ops — they satisfy the
* layer graph without requiring real servers.
*
* Usage:
* import { env } from "../fixture/prompt-layers"
* const it = testEffect(env)
*/

import { NodeFileSystem } from "@effect/platform-node"
import { Effect, Layer } from "effect"
import { Agent as AgentSvc } from "../../src/agent/agent"
import { Bus } from "../../src/bus"
import { Command } from "../../src/command"
import { Config } from "../../src/config/config"
import { FileTime } from "../../src/file/time"
import { AppFileSystem } from "../../src/filesystem"
import { LSP } from "../../src/lsp"
import { MCP } from "../../src/mcp"
import { Permission } from "../../src/permission"
import { Plugin } from "../../src/plugin"
import { Session } from "../../src/session"
import { LLM } from "../../src/session/llm"
import { SessionCompaction } from "../../src/session/compaction"
import { SessionProcessor } from "../../src/session/processor"
import { SessionPrompt } from "../../src/session/prompt"
import { SessionStatus } from "../../src/session/status"
import { Snapshot } from "../../src/snapshot"
import { ToolRegistry } from "../../src/tool/registry"
import { Truncate } from "../../src/tool/truncate"
import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"

// --- Service stubs ---

const mcp = Layer.succeed(
MCP.Service,
MCP.Service.of({
status: () => Effect.succeed({}),
clients: () => Effect.succeed({}),
tools: () => Effect.succeed({}),
prompts: () => Effect.succeed({}),
resources: () => Effect.succeed({}),
add: () => Effect.succeed({ status: { status: "disabled" as const } }),
connect: () => Effect.void,
disconnect: () => Effect.void,
getPrompt: () => Effect.succeed(undefined),
readResource: () => Effect.succeed(undefined),
startAuth: () => Effect.die("unexpected MCP auth"),
authenticate: () => Effect.die("unexpected MCP auth"),
finishAuth: () => Effect.die("unexpected MCP auth"),
removeAuth: () => Effect.void,
supportsOAuth: () => Effect.succeed(false),
hasStoredTokens: () => Effect.succeed(false),
getAuthStatus: () => Effect.succeed("not_authenticated" as const),
}),
)

const lsp = Layer.succeed(
LSP.Service,
LSP.Service.of({
init: () => Effect.void,
status: () => Effect.succeed([]),
hasClients: () => Effect.succeed(false),
touchFile: () => Effect.void,
diagnostics: () => Effect.succeed({}),
hover: () => Effect.succeed(undefined),
definition: () => Effect.succeed([]),
references: () => Effect.succeed([]),
implementation: () => Effect.succeed([]),
documentSymbol: () => Effect.succeed([]),
workspaceSymbol: () => Effect.succeed([]),
prepareCallHierarchy: () => Effect.succeed([]),
incomingCalls: () => Effect.succeed([]),
outgoingCalls: () => Effect.succeed([]),
}),
)

const filetime = Layer.succeed(
FileTime.Service,
FileTime.Service.of({
read: () => Effect.void,
get: () => Effect.succeed(undefined),
assert: () => Effect.void,
withLock: (_filepath, fn) => Effect.promise(fn),
}),
)

// --- Layer composition ---

const status = SessionStatus.layer.pipe(Layer.provideMerge(Bus.layer))
const infra = Layer.mergeAll(NodeFileSystem.layer, CrossSpawnSpawner.defaultLayer)
const deps = Layer.mergeAll(
Session.defaultLayer,
Snapshot.defaultLayer,
AgentSvc.defaultLayer,
Command.defaultLayer,
Permission.layer,
Plugin.defaultLayer,
Config.defaultLayer,
filetime,
lsp,
mcp,
AppFileSystem.defaultLayer,
status,
LLM.defaultLayer,
).pipe(Layer.provideMerge(infra))
const registry = ToolRegistry.layer.pipe(Layer.provideMerge(deps))
const trunc = Truncate.layer.pipe(Layer.provideMerge(deps))
const proc = SessionProcessor.layer.pipe(Layer.provideMerge(deps))
const compact = SessionCompaction.layer.pipe(Layer.provideMerge(proc), Layer.provideMerge(deps))

/** Full SessionPrompt layer with real LLM and no-op stubs for MCP/LSP/FileTime */
export const env = SessionPrompt.layer.pipe(
Layer.provideMerge(compact),
Layer.provideMerge(proc),
Layer.provideMerge(registry),
Layer.provideMerge(trunc),
Layer.provideMerge(deps),
)
Loading