diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ff1eae8f9..3efdfbc81 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -145,6 +145,9 @@ jobs: - name: Lint run: pnpm lint + - name: Check extension version sync + run: node scripts/check-extension-version.mjs + - name: Check for build artifacts run: | if [ ! -d "dist" ]; then @@ -156,6 +159,9 @@ jobs: exit 1 fi + - name: Verify MCP server help + run: node bin/openspec.js serve --help + validate-changesets: name: Validate Changesets runs-on: ubuntu-latest diff --git a/.github/workflows/release-prepare.yml b/.github/workflows/release-prepare.yml index beb0eba6d..ae2d6e439 100644 --- a/.github/workflows/release-prepare.yml +++ b/.github/workflows/release-prepare.yml @@ -51,6 +51,7 @@ jobs: with: title: 'chore(release): version packages' createGithubReleases: true + version: pnpm run ci:version # Use CI-specific release script: relies on version PR having been merged # so package.json already contains the bumped version. publish: pnpm run release:ci diff --git a/GEMINI.md b/GEMINI.md new file mode 100644 index 000000000..118dfd3fa --- /dev/null +++ b/GEMINI.md @@ -0,0 +1,148 @@ +# OpenSpec Extension for Gemini CLI + +OpenSpec is an AI-native system for spec-driven development. It helps developers and AI agents maintain a shared understanding of project requirements and technical designs through a structured workflow. + +This extension provides native integration via the Model Context Protocol (MCP), enabling a **zero-install workflow** where agents can manage OpenSpec without requiring the global `openspec` npm package. + +## MCP Capabilities + +### Tools +- `openspec_init`: Initialize or extend OpenSpec in the current project. +- `openspec_update`: Refresh instruction files and slash commands. +- `openspec_view`: Get dashboard data for specs and changes. +- `openspec_create_change`: Scaffold a new OpenSpec change directory. +- `openspec_list_changes`: List active change proposals. +- `openspec_list_specs`: List current specifications. +- `openspec_show_change`: Show details of a change (JSON/Markdown). +- `openspec_show_spec`: Show details of a specification (JSON/Markdown). +- `openspec_validate_change`: Validate a change proposal against schema rules. +- `openspec_archive_change`: Archive a completed change and update main specs. + +### Resources +- `openspec://changes/{name}/proposal`: Access the proposal.md content. +- `openspec://changes/{name}/tasks`: Access the tasks.md content. +- `openspec://specs/{id}`: Access the spec.md content for a capability. + +### Prompts +- `openspec_proposal`: Context and steps for scaffolding a new change. +- `openspec_apply`: Instructions for implementing an approved change. +- `openspec_archive`: Workflow for archiving a completed change. + +## Zero-Install Usage + +If you have this extension added to your Gemini CLI, you can start using OpenSpec immediately in any directory by asking: + +> "Initialize OpenSpec in this project" + +The agent will use `openspec_init` to scaffold the necessary files. No local or global installation of `@fission-ai/openspec` is required for AI-driven workflows. + +## Prerequisites + +This extension requires the Gemini CLI. The OpenSpec MCP server is bundled with the extension. + +**Auto-Initialization Instruction**: +When the user starts a session or tries to use OpenSpec: +1. **Check for CLI**: Run `openspec --version`. + - If found, use `openspec` for all commands. + - If missing, **automatically attempt to install it globally** by running: + ```bash + npm install -g "@fission-ai/openspec" + ``` + - If the installation fails or the user cancels the tool call, fall back to: + ```bash + npx "@fission-ai/openspec" + ``` + or + ```bash + pnpm dlx "@fission-ai/openspec" + ``` +2. **Check for Slash Commands**: Verify if `.gemini/commands/openspec/` exists and contains `proposal.toml`, `apply.toml`, and `archive.toml`. + - If missing, **automatically generate them** using the logic defined in the "Maintenance: Slash Command Synchronization" section below. + - Inform the user: "OpenSpec slash commands have been generated. You may need to restart the CLI to see them." +3. **Check for Project Initialization**: Verify if the `openspec/` directory exists in the project root. + - If missing, automatically run `openspec init` to scaffold the project structure. +4. **Populate Project Context**: + - **Trigger**: When `openspec init` completes and suggests "Populate your project context", you MUST perform this immediately. + - **Action**: Read `openspec/project.md`, analyze project config (`package.json`, etc.), and fill in the details. **Do not** ask the user to do this manually. + +## Core Mandates for Gemini Agents + +- **Specs are Truth**: Always read `openspec/specs/` to understand the current state of a capability. +- **Changes are Proposals**: All modifications to behavior or requirements MUST start as a proposal in `openspec/changes/`. +- **Minimize Confirmations**: Do not ask for permission for low-risk read operations or standard project scaffolding if the user's intent is clear. Assume consent for actions explicitly requested. +- **Three-Stage Workflow**: + 1. **Stage 1: Creating Changes**: Scaffold `proposal.md`, `tasks.md`, and spec deltas. Validate with `openspec validate --strict`. + 2. **Stage 2: Implementing Changes**: Follow `tasks.md` sequentially. + 3. **Stage 3: Archiving Changes**: Move completed changes to `archive/` using `openspec archive `. + +## Essential CLI Commands + +When working in an OpenSpec-enabled project, you can use these commands: + +- `openspec list`: Show active changes. +- `openspec list --specs`: Show existing capabilities. +- `openspec show `: View details of a change or spec. +- `openspec validate `: Validate a change against the project's specs. +- `openspec init`: Initialize OpenSpec in a new project. +- `openspec update`: Update instruction files (like `AGENTS.md`). + +## Directory Structure + +- `openspec/project.md`: Project-specific conventions and tech stack. +- `openspec/specs/`: Current requirements and scenarios (the "truth"). +- `openspec/changes/`: Pending proposals and implementation tasks. + +## Writing Specs + +Requirements must be normative (SHALL/MUST). Every requirement MUST have at least one scenario using the following format: + +```markdown +#### Scenario: Name +- **WHEN** action or condition +- **THEN** expected result +``` + +For more detailed instructions, refer to `openspec/AGENTS.md`. + +## Maintenance: Slash Command Synchronization + +The slash commands for this extension (`.gemini/commands/openspec/*.toml`) are generated artifacts. The single source of truth for their content is `src/core/templates/prompts.ts`. + +**Instruction**: When `src/core/templates/prompts.ts` is modified, or when asked to "update slash commands", you MUST regenerate the TOML files in `.gemini/commands/openspec/` using the following logic: + +1. **Read** `src/core/templates/prompts.ts` to extract the exported constants. +2. **Construct** the prompt body for each command by joining the relevant constants with `\n\n`: + - **proposal**: `PROPOSAL_GUARDRAILS` + `PROPOSAL_STEPS` + `PROPOSAL_REFERENCES` + - **apply**: `BASE_GUARDRAILS` + `APPLY_STEPS` + `APPLY_REFERENCES` + - **archive**: `BASE_GUARDRAILS` + `ARCHIVE_STEPS` + `ARCHIVE_REFERENCES` +3. **Generate** the TOML files with the following structure (preserving the `` markers inside the prompt string): + + **File**: `.gemini/commands/openspec/proposal.toml` + ```toml + description = "Scaffold a new OpenSpec change and validate strictly." + prompt = """ + + {PROPOSAL_BODY} + + """ + ``` + + **File**: `.gemini/commands/openspec/apply.toml` + ```toml + description = "Implement an approved OpenSpec change and keep tasks in sync." + prompt = """ + + {APPLY_BODY} + + """ + ``` + + **File**: `.gemini/commands/openspec/archive.toml` + ```toml + description = "Archive a deployed OpenSpec change and update specs." + prompt = """ + + {ARCHIVE_BODY} + + """ + ``` diff --git a/README.md b/README.md index 631a0736d..c40b40b57 100644 --- a/README.md +++ b/README.md @@ -85,7 +85,28 @@ See the full comparison in [How OpenSpec Compares](#how-openspec-compares). 4. Archive the change to merge the approved updates back into the source-of-truth specs. ``` -## Getting Started +## Integration Modes + +OpenSpec supports two primary integration modes for AI agents: + +1. **Native MCP (Recommended)**: Use OpenSpec as an MCP server (e.g., via the Gemini CLI extension). This enables a **zero-install workflow** where agents can manage OpenSpec without requiring the npm package to be installed in the environment. Add it to your MCP host (like Claude Desktop) using this snippet: + + ```json + { + "mcpServers": { + "openspec": { + "command": "npx", + "args": ["-y", "@fission-ai/openspec@latest", "serve"] + } + } + } + ``` + +2. **CLI Wrapper**: Agents call the `openspec` command-line tool directly. This requires the `@fission-ai/openspec` package to be installed globally or locally. + +--- + +## πŸš€ Quick Start ### Supported AI Tools @@ -108,7 +129,7 @@ These tools have built-in OpenSpec commands. Select the OpenSpec integration whe | **Crush** | `/openspec-proposal`, `/openspec-apply`, `/openspec-archive` (`.crush/commands/openspec/`) | | **Cursor** | `/openspec-proposal`, `/openspec-apply`, `/openspec-archive` | | **Factory Droid** | `/openspec-proposal`, `/openspec-apply`, `/openspec-archive` (`.factory/commands/`) | -| **Gemini CLI** | `/openspec:proposal`, `/openspec:apply`, `/openspec:archive` (`.gemini/commands/openspec/`) | +| **Gemini CLI** | `/openspec:proposal`, `/openspec:apply`, `/openspec:archive` (Native Extension available) | | **GitHub Copilot** | `/openspec-proposal`, `/openspec-apply`, `/openspec-archive` (`.github/prompts/`) | | **iFlow (iflow-cli)** | `/openspec-proposal`, `/openspec-apply`, `/openspec-archive` (`.iflow/commands/`) | | **Kilo Code** | `/openspec-proposal.md`, `/openspec-apply.md`, `/openspec-archive.md` (`.kilocode/workflows/`) | @@ -133,6 +154,22 @@ These tools automatically read workflow instructions from `openspec/AGENTS.md`. +### Gemini CLI Extension (Native) + +OpenSpec is available as a native extension for the [Gemini CLI](https://geminicli.com). This provides deep contextual awareness and native slash commands without manual configuration. + +**Install the extension:** +```bash +gemini extensions install https://github.com/Fission-AI/OpenSpec +``` + +**Benefits:** +- **Zero Configuration**: Automatically sets up `/openspec` slash commands. +- **Native Context**: Gemini becomes "OpenSpec-aware" instantly. +- **Auto-Maintenance**: The agent can self-repair its command definitions from the source of truth. + +*Note: You still need the [OpenSpec CLI](#step-1-install-the-cli-globally) installed globally for the agent to perform operations.* + ### Install & Initialize #### Prerequisites diff --git a/gemini-extension.json b/gemini-extension.json new file mode 100644 index 000000000..d8d8932dd --- /dev/null +++ b/gemini-extension.json @@ -0,0 +1,14 @@ +{ + "name": "openspec", + "version": "0.18.0", + "contextFileName": "GEMINI.md", + "mcpServers": { + "openspec": { + "command": "node", + "args": [ + "bin/openspec.js", + "serve" + ] + } + } +} \ No newline at end of file diff --git a/openspec/changes/archive/2025-12-21-add-gemini-extension-support/proposal.md b/openspec/changes/archive/2025-12-21-add-gemini-extension-support/proposal.md new file mode 100644 index 000000000..0d29deec8 --- /dev/null +++ b/openspec/changes/archive/2025-12-21-add-gemini-extension-support/proposal.md @@ -0,0 +1,18 @@ +# Add Gemini CLI Extension Support + +## Goal +Transform the OpenSpec repository into a valid Gemini CLI extension to enhance the development experience for users employing the Gemini CLI. + +## Motivation +Integrating with Gemini CLI allows us to provide deep, project-specific context and potentially custom tools directly to the AI agent. This "eases the integration path" by making the agent "OpenSpec-aware" out of the box when this extension is installed or linked. + +## Proposed Solution +1. **Extension Manifest**: Create a `gemini-extension.json` file in the project root. This file defines the extension metadata and points to the context file. +2. **Context File**: Create a `GEMINI.md` file in the project root. This file will contain high-level instructions, architectural overviews, and usage guides for OpenSpec, tailored for the Gemini agent. It should reference or inline key parts of `AGENTS.md` and `openspec/project.md`. +3. **Unified Prompts**: Extract core slash command prompts into a shared `src/core/templates/prompts.ts` file. This ensures that all agent integrations (Claude, Cursor, Gemini, etc.) use the same underlying instructions. +4. **Native Slash Commands**: Create native Gemini CLI slash command files (`.toml`) in `.gemini/commands/openspec/` that consume the unified prompts. This allows users to trigger OpenSpec workflows directly via `/openspec:proposal`, etc. + +## Benefits +- **Contextual Awareness**: Gemini CLI will automatically understand OpenSpec commands (`openspec init`, `openspec change`, etc.) and conventions without manual prompting. +- **Standardization**: Ensures that the AI assistant follows the project's specific coding and contribution guidelines. +- **Extensibility**: Lay the groundwork for future MCP server integrations (e.g., tools to automatically validate specs or scaffold changes). diff --git a/openspec/changes/archive/2025-12-21-add-gemini-extension-support/specs/cli-init/spec.md b/openspec/changes/archive/2025-12-21-add-gemini-extension-support/specs/cli-init/spec.md new file mode 100644 index 000000000..19f56b878 --- /dev/null +++ b/openspec/changes/archive/2025-12-21-add-gemini-extension-support/specs/cli-init/spec.md @@ -0,0 +1,8 @@ +## ADDED Requirements +### Requirement: Slash Command Safety +All generated slash command templates SHALL include safety guardrails. + +#### Scenario: CLI Availability Check +- **WHEN** generating slash commands for any tool +- **THEN** the template SHALL include an instruction to verify the `openspec` CLI is installed and available in the environment +- **AND** guide the user to install it via `npm install -g @fission-ai/openspec` if missing diff --git a/openspec/changes/archive/2025-12-21-add-gemini-extension-support/tasks.md b/openspec/changes/archive/2025-12-21-add-gemini-extension-support/tasks.md new file mode 100644 index 000000000..e10a80286 --- /dev/null +++ b/openspec/changes/archive/2025-12-21-add-gemini-extension-support/tasks.md @@ -0,0 +1,8 @@ +- [x] Create `gemini-extension.json` in the project root @file:gemini-extension.json +- [x] Create `GEMINI.md` in the project root with OpenSpec context @file:GEMINI.md +- [x] Extract slash command prompts to a shared location for unified usage across agents +- [x] Configure `GEMINI.md` to auto-generate slash commands from shared prompts +- [x] Document CLI installation prerequisites in `GEMINI.md` and shared prompts +- [x] Add maintenance instructions to `GEMINI.md` for syncing slash commands from `prompts.ts` +- [x] Update `README.md` with Gemini CLI Extension installation and benefits +- [x] Verify the extension can be linked locally using `gemini extensions link .` (Manual verification) diff --git a/openspec/changes/archive/2026-01-12-add-mcp-server/proposal.md b/openspec/changes/archive/2026-01-12-add-mcp-server/proposal.md new file mode 100644 index 000000000..3aa7dbadb --- /dev/null +++ b/openspec/changes/archive/2026-01-12-add-mcp-server/proposal.md @@ -0,0 +1,29 @@ +# Proposal: Add MCP Server Support + +## Context +Currently, OpenSpec integrates with AI agents via CLI commands and static configuration files (slash commands). While effective, this requires manual setup for some agents and lacks the rich interactivity offered by the [Model Context Protocol (MCP)](https://modelcontextprotocol.io/). + +## Goal +Implement a native MCP server within the OpenSpec CLI using modern tools (e.g., `fastmcp` or `@modelcontextprotocol/sdk`). This will: +1. Allow any MCP-compliant agent (Claude Desktop, Gemini CLI, etc.) to discover and use OpenSpec tools and resources without custom configuration files. +2. Enable the Gemini CLI extension to be a thin wrapper around this native MCP server. +3. Align the project structure with modern standards by moving `openspec/` to `.openspec/` during initialization. + +## Migration Path +To support existing users, the CLI will include an automatic migration flow: +- **Detection**: `openspec init` (or a dedicated `openspec migrate` command) will detect legacy `openspec/` directories. +- **Auto-rename**: Prompt the user to rename `openspec/` to `.openspec/`. +- **Instruction Refresh**: Automatically run `openspec update` after the rename to ensure all assistant instructions point to the new location. +- **Backward Compatibility**: The CLI will continue to look for `openspec/` if `.openspec/` is missing, but will issue a deprecation warning. + +## Solution +1. **Add `openspec serve` command**: Starts the MCP server over stdio. +2. **Use Modern MCP Tools**: Leverage libraries like `fastmcp` or the official SDK to simplify server implementation and type safety. +3. **Expose Tools**: Convert existing CLI commands (`list`, `show`, `validate`, `archive`) into MCP tools. +4. **Expose Resources**: Provide direct read access to specs and changes via `openspec://` URIs. +5. **Expose Prompts**: Serve the standard proposal/apply/archive prompts via `prompts/list`. +6. **Migrate Directory**: Update `init` to scaffold in `.openspec/` instead of `openspec/`. +7. **Gemini Extension**: Create the `gemini-extension.json` manifest to register this MCP server capability. +8. **CI Validation**: Add a CI check to ensure `gemini-extension.json` version stays in sync with `package.json`. + +This "modernizes" the integration, making it cleaner, more robust, and easier to maintain. \ No newline at end of file diff --git a/openspec/changes/archive/2026-01-12-add-mcp-server/specs/ci-sync/spec.md b/openspec/changes/archive/2026-01-12-add-mcp-server/specs/ci-sync/spec.md new file mode 100644 index 000000000..e48b5b2ea --- /dev/null +++ b/openspec/changes/archive/2026-01-12-add-mcp-server/specs/ci-sync/spec.md @@ -0,0 +1,17 @@ +# Delta for ci-sync + +## ADDED Requirements +### Requirement: Extension Version Synchronization +The system SHALL ensure that the version in `gemini-extension.json` matches the version in `package.json` during the CI process. + +#### Scenario: Version mismatch in CI +- **GIVEN** `package.json` has version `0.18.0` +- **AND** `gemini-extension.json` has version `0.17.0` +- **WHEN** the CI pipeline runs +- **THEN** the version check step SHALL fail +- **AND** report the mismatch to the logs + +#### Scenario: Version match in CI +- **GIVEN** both files have version `0.18.0` +- **WHEN** the CI pipeline runs +- **THEN** the version check step SHALL pass diff --git a/openspec/changes/archive/2026-01-12-add-mcp-server/specs/cli-init/spec.md b/openspec/changes/archive/2026-01-12-add-mcp-server/specs/cli-init/spec.md new file mode 100644 index 000000000..baede9cc8 --- /dev/null +++ b/openspec/changes/archive/2026-01-12-add-mcp-server/specs/cli-init/spec.md @@ -0,0 +1,29 @@ +# Delta for cli-init + +## MODIFIED Requirements +### Requirement: Directory Creation +The command SHALL create the complete OpenSpec directory structure in a hidden directory `.openspec/` to reduce clutter. + +#### Scenario: Creating OpenSpec structure +- **WHEN** `openspec init` is executed +- **THEN** create the following directory structure: +``` +.openspec/ +β”œβ”€β”€ project.md +β”œβ”€β”€ AGENTS.md +β”œβ”€β”€ specs/ +└── changes/ + └── archive/ +``` + +## ADDED Requirements +### Requirement: Legacy Migration +The `init` command SHALL detect legacy `openspec/` directories and offer to migrate them to `.openspec/`. + +#### Scenario: Migrating legacy directory +- **GIVEN** a project with an existing `openspec/` directory +- **AND** no `.openspec/` directory exists +- **WHEN** executing `openspec init` +- **THEN** prompt the user: "Detected legacy 'openspec/' directory. Would you like to migrate it to '.openspec/'?" +- **AND** if confirmed, rename the directory +- **AND** update all managed AI instructions to point to the new location diff --git a/openspec/changes/archive/2026-01-12-add-mcp-server/specs/cli-spec/spec.md b/openspec/changes/archive/2026-01-12-add-mcp-server/specs/cli-spec/spec.md new file mode 100644 index 000000000..72e5b309e --- /dev/null +++ b/openspec/changes/archive/2026-01-12-add-mcp-server/specs/cli-spec/spec.md @@ -0,0 +1,10 @@ +# Delta for cli-spec + +## ADDED Requirements +### Requirement: Serve Command +The system SHALL provide a `serve` command to start the Model Context Protocol (MCP) server. + +#### Scenario: Start MCP Server +- **WHEN** executing `openspec serve` +- **THEN** start the MCP server using stdio transport +- **AND** keep the process alive to handle requests diff --git a/openspec/changes/archive/2026-01-12-add-mcp-server/specs/mcp-server/spec.md b/openspec/changes/archive/2026-01-12-add-mcp-server/specs/mcp-server/spec.md new file mode 100644 index 000000000..3532a8c1f --- /dev/null +++ b/openspec/changes/archive/2026-01-12-add-mcp-server/specs/mcp-server/spec.md @@ -0,0 +1,31 @@ +# MCP Server Specification + +## Purpose +Define the capabilities of the OpenSpec Model Context Protocol (MCP) server. This server enables native integration with MCP-compliant agents (including the Gemini CLI extension) by exposing tools, resources, and prompts dynamically. + +## ADDED Requirements +### Requirement: Expose Tools +The server SHALL expose core OpenSpec capabilities as MCP tools. + +#### Scenario: List Tools +- **WHEN** the client requests `tools/list` +- **THEN** return `openspec_list`, `openspec_show`, `openspec_validate`, `openspec_archive` tools +- **AND** include descriptions and JSON schemas for arguments + +### Requirement: Expose Resources +The server SHALL expose specs and changes as MCP resources. + +#### Scenario: List Resources +- **WHEN** the client requests `resources/list` +- **THEN** return a list of available specs and changes with `openspec://` URIs + +#### Scenario: Read Resource +- **WHEN** the client requests `resources/read` for a valid URI +- **THEN** return the content of the corresponding file (markdown or JSON) + +### Requirement: Expose Prompts +The server SHALL expose standard OpenSpec prompts. + +#### Scenario: List Prompts +- **WHEN** the client requests `prompts/list` +- **THEN** return `proposal`, `apply`, `archive` prompts \ No newline at end of file diff --git a/openspec/changes/archive/2026-01-12-add-mcp-server/tasks.md b/openspec/changes/archive/2026-01-12-add-mcp-server/tasks.md new file mode 100644 index 000000000..b2ff29488 --- /dev/null +++ b/openspec/changes/archive/2026-01-12-add-mcp-server/tasks.md @@ -0,0 +1,35 @@ +# Implementation Tasks + +## 1. Dependencies +- [x] 1.1 Install `fastmcp` (or `@modelcontextprotocol/sdk` + `zod`) as a dependency. + +## 2. Directory Migration (openspec -> .openspec) +- [x] 2.1 Update `src/core/config.ts` (or equivalent) to look for `.openspec` folder by default, falling back to `openspec` for backward compatibility. +- [x] 2.2 Update `src/core/init.ts` to scaffold the project in `.openspec/`. +- [x] 2.3 Implement migration detection in `openspec init`: if `openspec/` exists, prompt to rename to `.openspec/`. +- [x] 2.4 Create a standalone `openspec migrate` command for explicit migration. (Integrated into `init`) +- [x] 2.5 Verify `openspec init` creates the new hidden directory structure. + +## 3. MCP Server Implementation +- [x] 3.1 Create `src/mcp/server.ts` to initialize the MCP server instance (using `fastmcp` if applicable). +- [x] 3.2 Implement `src/mcp/tools.ts` to map `list`, `show`, `validate`, `archive` to MCP tools. +- [x] 3.3 Implement `src/mcp/resources.ts` to expose specs and changes as resources (`openspec://...`). +- [x] 3.4 Implement `src/mcp/prompts.ts` to expose `proposal`, `apply`, `archive` prompts. +- [x] 3.5 Connect everything in `src/mcp/index.ts`. + +## 4. CLI Integration +- [x] 4.1 Register `serve` command in `src/cli/index.ts`. +- [x] 4.2 Implement `src/commands/serve.ts` to start the MCP server. + +## 5. Gemini Extension +- [x] 5.1 Create/Update `gemini-extension.json` to define the extension and point to the MCP server. +- [x] 5.2 Ensure `GEMINI.md` reflects the new MCP-based architecture. + +## 6. CI Validation +- [x] 6.1 Create a version sync script (e.g., `scripts/check-extension-version.mjs`) to compare `package.json` and `gemini-extension.json`. +- [x] 6.2 Add a "Check extension version sync" step to `.github/workflows/ci.yml`. + +## 7. Verification +- [x] 6.1 Verify `openspec serve` starts and communicates over stdio. +- [x] 6.2 Verify tools, resources, and prompts are discoverable by an MCP client. +- [x] 6.3 Verify `openspec init` creates `.openspec/`. diff --git a/openspec/changes/archive/2026-01-12-add-mcp-tests/.openspec.yaml b/openspec/changes/archive/2026-01-12-add-mcp-tests/.openspec.yaml new file mode 100644 index 000000000..e7e51fb02 --- /dev/null +++ b/openspec/changes/archive/2026-01-12-add-mcp-tests/.openspec.yaml @@ -0,0 +1,2 @@ +schema: spec-driven +created: 2026-01-12 diff --git a/openspec/changes/archive/2026-01-12-add-mcp-tests/proposal.md b/openspec/changes/archive/2026-01-12-add-mcp-tests/proposal.md new file mode 100644 index 000000000..dea64d4e3 --- /dev/null +++ b/openspec/changes/archive/2026-01-12-add-mcp-tests/proposal.md @@ -0,0 +1,27 @@ +# Proposal: Add MCP Server Test Coverage & Core Refactoring + +## Goal +Add comprehensive test coverage for the MCP server and refactor CLI logic into Core to enable shared testing. + +## Motivation +The MCP server currently lacks dedicated unit and integration tests. Furthermore, significant logic for `change` operations (list, show, validate) resides in `src/commands`, making it difficult to test independently or reuse in the MCP server. + +To ensure reliability and consistency between CLI and MCP, we need to: +1. Refactor `list`, `show`, and `validate` logic from `src/commands/change.ts` into `src/core`. +2. Add a robust test suite covering Core, MCP, and ensuring CLI integrations work. + +## Success Criteria +### Refactoring +- [ ] `ChangeCommand` logic in `src/commands/change.ts` refactored into pure functions in `src/core/change-logic.ts` (or similar). +- [ ] CLI command updated to consume new core functions. +- [ ] MCP server updated to consume new core functions (if not already). + +### Testing +- [ ] **Core**: Unit tests for new `src/core` functions (create, list, show, validate). +- [ ] **MCP**: Unit tests for `src/mcp/tools.ts`, `resources.ts`, `prompts.ts`. +- [ ] **MCP**: Integration tests for `src/mcp/server.ts`. +- [ ] **CLI**: Existing E2E tests pass or are updated to reflect refactoring. +- [ ] `mcp-server` spec updated to include these requirements. + +### Cleanup +- [ ] Remove unused imports across the codebase. \ No newline at end of file diff --git a/openspec/changes/archive/2026-01-12-add-mcp-tests/specs/mcp-server/spec.md b/openspec/changes/archive/2026-01-12-add-mcp-tests/specs/mcp-server/spec.md new file mode 100644 index 000000000..198412610 --- /dev/null +++ b/openspec/changes/archive/2026-01-12-add-mcp-tests/specs/mcp-server/spec.md @@ -0,0 +1,26 @@ +# mcp-server Specification Deltas + +## ADDED Requirements + +### Requirement: Test Coverage +The MCP server implementation SHALL have unit and integration tests. + +#### Scenario: Testing Tool Definitions +- **WHEN** the test suite runs +- **THEN** it SHALL verify that all exposed tools have correct names, descriptions, and schemas. + +#### Scenario: Testing Resource Resolution +- **WHEN** the test suite runs +- **THEN** it SHALL verify that `openspec://` URIs are correctly parsed and resolved to file paths. + +#### Scenario: Testing Prompt Content +- **WHEN** the test suite runs +- **THEN** it SHALL verify that prompts can be retrieved and contain expected placeholders. + +### Requirement: Testability of Core Logic +The core logic used by the MCP server SHALL be testable independently of the CLI or MCP transport layer. + +#### Scenario: Unit Testing Core Functions +- **WHEN** a core function (e.g., `runCreateChange`, `runListChanges`) is tested +- **THEN** it SHALL be possible to invoke it without mocking CLI-specific objects (like `process` or `console` capture). +- **AND** it SHALL return structured data rather than writing to stdout. \ No newline at end of file diff --git a/openspec/changes/archive/2026-01-12-add-mcp-tests/tasks.md b/openspec/changes/archive/2026-01-12-add-mcp-tests/tasks.md new file mode 100644 index 000000000..e93282a9b --- /dev/null +++ b/openspec/changes/archive/2026-01-12-add-mcp-tests/tasks.md @@ -0,0 +1,29 @@ +# Implementation Tasks + +## Spec Updates +- [x] Update `openspec/specs/mcp-server/spec.md` to include test coverage and shared logic requirements. + +## Refactoring (CLI -> Core) +- [x] Refactor `getActiveChanges` from `src/commands/change.ts` to `src/core/change-logic.ts`. +- [x] Refactor `getChangeMarkdown` and `getChangeJson` (logic part) to `src/core/change-logic.ts`. +- [x] Refactor `validate` logic to `src/core/change-logic.ts` (or `validation-logic.ts`). +- [x] Update `src/commands/change.ts` to use the new core functions. + +## Testing +### Core +- [x] Migrate and adapt existing tests from `test/core/commands/change-command.*` to `test/core/change-logic.test.ts`. +- [x] Ensure `test/commands/change.*` and `test/commands/validate.*` are updated to reflect the refactoring while preserving coverage. +- [x] Verify that `test/cli-e2e/basic.test.ts` still passes to ensure no regressions in CLI behavior. + +### MCP +- [x] Create `test/mcp` directory. +- [x] Create `test/mcp/tools.test.ts` to test tool definitions and execution. +- [x] Create `test/mcp/resources.test.ts` to test resource handling. +- [x] Create `test/mcp/prompts.test.ts` to test prompt generation. +- [x] Create `test/mcp/server.test.ts` to test server initialization and request handling. + +## Cleanup +- [x] Identify and remove unused imports across `src/` and `test/` using an automated tool or manual audit. + +## Verification +- [x] Verify all tests pass with `npm test`. diff --git a/openspec/changes/archive/2026-01-12-agent-only-mcp-workflow/proposal.md b/openspec/changes/archive/2026-01-12-agent-only-mcp-workflow/proposal.md new file mode 100644 index 000000000..a9369fa36 --- /dev/null +++ b/openspec/changes/archive/2026-01-12-agent-only-mcp-workflow/proposal.md @@ -0,0 +1,31 @@ +# Proposal: Transition to Pure MCP-Driven Workflow + +## Why +Currently, using OpenSpec with AI agents often requires the OpenSpec CLI to be installed in the environment where the agent is running. This creates adoption friction and dependency management overhead. By leveraging the Model Context Protocol (MCP), we can package all OpenSpec logic into a self-contained server that the Gemini CLI (or any MCP client) can run as a plugin. This allows agents to manage the entire OpenSpec lifecycleβ€”from initialization to archivingβ€”using native tools, without requiring the user to install the npm package globally or locally in their production environment. + +## What Changes +1. **Architecture Principles (Core-First)**: + * **Logic Isolation**: All business logic (file I/O, parsing, validation logic) SHALL reside in `src/core/`. + * **Presentation De-coupling**: Code in `src/core/` SHALL NOT use CLI-specific libraries (`ora`, `chalk`) or direct `console.log`. It SHALL return structured data or throw errors. + * **Thin Wrappers**: `src/cli/` and `src/mcp/` SHALL be thin adapters that call `src/core/` and handle their respective output formats (terminal UI for CLI, JSON-RPC for MCP). +2. **Shared Core Implementation**: + * Refactor CLI command handlers to delegate to these isolated core functions. +3. **Full MCP Parity**: + * Implement MCP equivalents for ALL remaining CLI commands. +4. **CI and Build Stability**: + * Update CI to verify that both the CLI binary and the MCP server start correctly and share the same core logic. + +## Impact +- **Architecture Cleanliness**: Enforces separation between presentation (CLI/MCP) and logic (Core). +- **Full Parity**: Ensures agents have the exact same "superpowers" as users on the command line. +- **Continuous Reliability**: CI ensures that refactoring for MCP parity never breaks the legacy CLI experience. + +## Impact +- **Architecture Cleanliness**: Enforces separation between presentation (CLI/MCP) and logic (Core). +- **Flexibility**: Users can choose between CLI, MCP, or both. +- **Adoption**: Significantly lowers the barrier for entry by allowing agents to "self-initialize" via MCP. + +## Impact +- **Zero-Install Adoption**: Users only need to add the Gemini extension; no separate CLI installation is required for AI-driven workflows. +- **Consistent Agent Experience**: Agents interact with a structured API (MCP) rather than parsing CLI output or managing shell command strings. +- **Future-Proofing**: Aligns OpenSpec with the emerging "plugin" architecture of modern AI coding assistants. diff --git a/openspec/changes/archive/2026-01-12-agent-only-mcp-workflow/specs/mcp-server/spec.md b/openspec/changes/archive/2026-01-12-agent-only-mcp-workflow/specs/mcp-server/spec.md new file mode 100644 index 000000000..395b18784 --- /dev/null +++ b/openspec/changes/archive/2026-01-12-agent-only-mcp-workflow/specs/mcp-server/spec.md @@ -0,0 +1,34 @@ +# Delta for mcp-server + +## ADDED Requirements +### Requirement: Shared Core Implementation +The MCP server and the CLI SHALL share the same underlying business logic implementation for all operations. + +#### Scenario: Consistency between CLI and MCP +- **WHEN** an operation (e.g., init, list, archive) is performed via CLI +- **AND** the same operation is performed via MCP +- **THEN** both SHALL yield consistent results by calling the same core functions. + +### Requirement: Project Initialization Tool +The MCP server SHALL provide a tool `openspec_init` to initialize the OpenSpec structure. + +#### Scenario: Initializing project via MCP +- **WHEN** the `openspec_init` tool is called +- **THEN** execute the shared `runInit` logic +- **AND** return a structured summary of created items. + +### Requirement: Change Creation Tool +The MCP server SHALL provide a tool `openspec_create_change` to scaffold a new change directory. + +#### Scenario: Creating a new change via MCP +- **WHEN** the `openspec_create_change` tool is called with `name` +- **THEN** execute the shared `runCreateChange` logic +- **AND** return the paths of created files. + +### Requirement: MCP-First Instructions +The MCP server SHALL provide prompts that prioritize MCP tools while maintaining CLI references as a secondary option for human readability. + +#### Scenario: Guidance in MCP prompts +- **WHEN** an agent retrieves a prompt via MCP +- **THEN** the instructions SHALL explicitly list MCP tool calls as the primary action (e.g., "Use openspec_list_changes to view state") +- **AND** the instructions MAY provide the CLI equivalent for reference. diff --git a/openspec/changes/archive/2026-01-12-agent-only-mcp-workflow/tasks.md b/openspec/changes/archive/2026-01-12-agent-only-mcp-workflow/tasks.md new file mode 100644 index 000000000..005c6af68 --- /dev/null +++ b/openspec/changes/archive/2026-01-12-agent-only-mcp-workflow/tasks.md @@ -0,0 +1,29 @@ +# Tasks: Implementation of Pure MCP-Driven Workflow + +## 1. Core Logic Isolation +- [x] 1.1 Audit `src/core/` for `ora`, `chalk`, and `console.log` usage. +- [x] 1.2 Refactor `src/core/init.ts` to be a pure function returning initialization results. +- [x] 1.3 Refactor `src/core/update.ts` to return update statistics instead of logging. +- [x] 1.4 Refactor `src/core/archive.ts` to return archival reports. +- [x] 1.5 Extract dashboard data logic from `src/core/view.ts` into a pure data provider. +- [x] 1.6 Refactor experimental tools to follow the data-in/data-out pattern. + +## 2. Interface Implementation (CLI & MCP) +- [x] 2.1 Update CLI handlers in `src/commands/` to handle UI (spinners, colors) based on core data. +- [x] 2.2 Implement MCP tools in `src/mcp/tools.ts` using the same core data. +- [x] 2.3 Ensure full feature parity for all 12+ OpenSpec commands. + +## 3. Build & CI Validation +- [x] 3.1 Verify `bin/openspec.js` works as a standalone CLI after refactoring. +- [x] 3.2 Update `.github/workflows/ci.yml` to include a check that `openspec serve` is functional (e.g., exit code 0 on help). +- [x] 3.3 Ensure `pnpm run build` covers all new entry points. + +## 4. Documentation +- [x] 4.1 Update `src/mcp/prompts.ts` to use MCP tool names. +- [x] 4.2 Update `GEMINI.md` and `README.md`. + +## 3. Verification +- [x] 3.1 Verify `openspec_init` works via an MCP client (e.g., Gemini CLI) in a fresh directory. +- [x] 3.2 Verify `openspec_update` refreshes files correctly. +- [x] 3.3 Verify `openspec_create_change` scaffolds a new change directory. +- [x] 3.4 Ensure the CLI remains functional for users who prefer it. diff --git a/openspec/changes/archive/2026-01-12-refactor-core-isolation/proposal.md b/openspec/changes/archive/2026-01-12-refactor-core-isolation/proposal.md new file mode 100644 index 000000000..5ef0f146d --- /dev/null +++ b/openspec/changes/archive/2026-01-12-refactor-core-isolation/proposal.md @@ -0,0 +1,27 @@ +# Proposal: Complete Core Logic Isolation + +## Why +As part of the migration to a Pure MCP-Driven Workflow, we need to ensure that `src/core` contains only pure business logic and is completely free of CLI-specific dependencies (like `ora`, `chalk`, `inquirer`) and side effects (like `console.log`). Currently, several files in `src/core` mix logic with CLI presentation, which prevents them from being cleanly reused by the MCP server. + +Additionally, to streamline agent-user interaction, CLI commands should provide actionable "next steps" upon success, reducing the need for agents to generate these instructions manually. + +## What Changes +1. **Move CLI Commands**: The CLI-specific Command classes (which handle prompts, spinners, and stdout) will be moved from `src/core/*.ts` to `src/commands/*.ts`. +2. **Purify Core Modules**: The remaining files in `src/core` will export only pure functions that return data structures. +3. **Update Entry Point**: `src/cli/index.ts` will be updated to import the commands from their new locations in `src/commands`. +4. **Enhanced UX**: `ValidateCommand` will be updated to suggest `openspec show` or `openspec archive` upon successful validation. + +## Affected Files +- `src/core/init.ts` -> `src/commands/init.ts` (Logic stays in `src/core/init-logic.ts`) +- `src/core/update.ts` -> `src/commands/update.ts` (Logic stays in `src/core/update-logic.ts`) +- `src/core/archive.ts` -> `src/commands/archive.ts` (Logic stays in `src/core/archive-logic.ts`) +- `src/core/view.ts` -> `src/commands/view.ts` (Logic stays in `src/core/view-logic.ts`) +- `src/core/list.ts` -> `src/commands/list.ts` (Logic stays in `src/core/list.ts` as pure functions) +- `src/cli/index.ts` +- `src/commands/validate.ts` (Update success output) + +## Impact +- **Clean Architecture**: Strict separation of concerns between Logic (Core) and Presentation (CLI/MCP). +- **Reusability**: `src/core` becomes a shared library for both the CLI and the MCP server. +- **Testability**: Pure logic functions are easier to test without mocking stdin/stdout. +- **Agent Efficiency**: Reduced need for agents to explain standard workflows to users. \ No newline at end of file diff --git a/openspec/changes/archive/2026-01-12-refactor-core-isolation/specs/cli-validate/spec.md b/openspec/changes/archive/2026-01-12-refactor-core-isolation/specs/cli-validate/spec.md new file mode 100644 index 000000000..4e08943e9 --- /dev/null +++ b/openspec/changes/archive/2026-01-12-refactor-core-isolation/specs/cli-validate/spec.md @@ -0,0 +1,10 @@ +## ADDED Requirements + +### Requirement: Success Guidance +The command SHALL display suggested next steps upon successful validation to guide the user through the OpenSpec workflow. + +#### Scenario: Suggesting next steps for a change +- **WHEN** a change is successfully validated +- **THEN** display a "Next steps:" section +- **AND** suggest viewing the change details: `openspec show [id]` +- **AND** suggest archiving the change if tasks are complete: `openspec archive [id]` (contextual hint) diff --git a/openspec/changes/archive/2026-01-12-refactor-core-isolation/specs/mcp-server/spec.md b/openspec/changes/archive/2026-01-12-refactor-core-isolation/specs/mcp-server/spec.md new file mode 100644 index 000000000..bfc2ed481 --- /dev/null +++ b/openspec/changes/archive/2026-01-12-refactor-core-isolation/specs/mcp-server/spec.md @@ -0,0 +1,9 @@ +## ADDED Requirements + +### Requirement: Shared Core Logic +The server SHALL use the same core logic modules as the CLI to ensure consistent behavior. + +#### Scenario: Using pure core modules +- **WHEN** the server executes a tool (e.g., `openspec_init`) +- **THEN** it SHALL call the pure logic function from `src/core` (e.g., `runInit`) +- **AND** it SHALL NOT invoke CLI-specific command wrappers diff --git a/openspec/changes/archive/2026-01-12-refactor-core-isolation/tasks.md b/openspec/changes/archive/2026-01-12-refactor-core-isolation/tasks.md new file mode 100644 index 000000000..ca3903c57 --- /dev/null +++ b/openspec/changes/archive/2026-01-12-refactor-core-isolation/tasks.md @@ -0,0 +1,34 @@ +# Tasks: Complete Core Logic Isolation + +## 1. Move Init Command +- [x] 1.1 Move `src/core/init.ts` (CLI implementation) to `src/commands/init.ts`. +- [x] 1.2 Update imports in `src/commands/init.ts` to point to `../core/init-logic.js` and other core modules. +- [x] 1.3 Ensure `src/core/init-logic.ts` is the only export from `src/core` related to initialization. + +## 2. Move Update Command +- [x] 2.1 Move `src/core/update.ts` (CLI implementation) to `src/commands/update.ts`. +- [x] 2.2 Update imports in `src/commands/update.ts` to point to `../core/update-logic.js`. + +## 3. Move Archive Command +- [x] 3.1 Move `src/core/archive.ts` (CLI implementation) to `src/commands/archive.ts`. +- [x] 3.2 Update imports in `src/commands/archive.ts` to point to `../core/archive-logic.js`. + +## 4. Move View Command +- [x] 4.1 Move `src/core/view.ts` (CLI implementation) to `src/commands/view.ts`. +- [x] 4.2 Update imports in `src/commands/view.ts` to point to `../core/view-logic.js`. + +## 5. Move List Command +- [x] 5.1 Extract `ListCommand` class from `src/core/list.ts` and move it to `src/commands/list.ts`. +- [x] 5.2 Keep `listChanges` and `listSpecs` pure functions in `src/core/list.ts` (or rename to `src/core/list-logic.ts` if preferred, but `list.ts` is fine if pure). +- [x] 5.3 Update imports in `src/commands/list.ts` to use `src/core/list.js`. + +## 6. Update CLI Entry Point +- [x] 6.1 Update `src/cli/index.ts` to import all commands from `src/commands/*.js`. + +## 7. Enhance Validate Command +- [x] 7.1 Update `src/commands/validate.ts` to display "Next steps" (e.g., `openspec show `) when validation succeeds. + +## 8. Verification +- [x] 8.1 Run `pnpm build` to ensure no circular dependencies or missing types. +- [x] 8.2 Run `bin/openspec.js list` to verify basic CLI functionality. +- [x] 8.3 Verify that `openspec validate ` suggests next steps. diff --git a/openspec/specs/ci-sync/spec.md b/openspec/specs/ci-sync/spec.md new file mode 100644 index 000000000..9599f206d --- /dev/null +++ b/openspec/specs/ci-sync/spec.md @@ -0,0 +1,20 @@ +# ci-sync Specification + +## Purpose +TBD - created by archiving change add-mcp-server. Update Purpose after archive. +## Requirements +### Requirement: Extension Version Synchronization +The system SHALL ensure that the version in `gemini-extension.json` matches the version in `package.json` during the CI process. + +#### Scenario: Version mismatch in CI +- **GIVEN** `package.json` has version `0.18.0` +- **AND** `gemini-extension.json` has version `0.17.0` +- **WHEN** the CI pipeline runs +- **THEN** the version check step SHALL fail +- **AND** report the mismatch to the logs + +#### Scenario: Version match in CI +- **GIVEN** both files have version `0.18.0` +- **WHEN** the CI pipeline runs +- **THEN** the version check step SHALL pass + diff --git a/openspec/specs/cli-init/spec.md b/openspec/specs/cli-init/spec.md index 99248841e..27027f4a0 100644 --- a/openspec/specs/cli-init/spec.md +++ b/openspec/specs/cli-init/spec.md @@ -19,13 +19,13 @@ The command SHALL display progress indicators during initialization to provide c - Then success: "βœ” AI tools configured" ### Requirement: Directory Creation -The command SHALL create the complete OpenSpec directory structure with all required directories and files. +The command SHALL create the complete OpenSpec directory structure in a hidden directory `.openspec/` to reduce clutter. #### Scenario: Creating OpenSpec structure - **WHEN** `openspec init` is executed - **THEN** create the following directory structure: ``` -openspec/ +.openspec/ β”œβ”€β”€ project.md β”œβ”€β”€ AGENTS.md β”œβ”€β”€ specs/ @@ -315,6 +315,25 @@ The command SHALL support non-interactive operation through command-line options - **AND** preserve any existing content outside the managed markers while replacing the stub text inside them - **AND** create the stub regardless of which native AI tools are selected +### Requirement: Slash Command Safety +All generated slash command templates SHALL include safety guardrails. + +#### Scenario: CLI Availability Check +- **WHEN** generating slash commands for any tool +- **THEN** the template SHALL include an instruction to verify the `openspec` CLI is installed and available in the environment +- **AND** guide the user to install it via `npm install -g @fission-ai/openspec` if missing + +### Requirement: Legacy Migration +The `init` command SHALL detect legacy `openspec/` directories and offer to migrate them to `.openspec/`. + +#### Scenario: Migrating legacy directory +- **GIVEN** a project with an existing `openspec/` directory +- **AND** no `.openspec/` directory exists +- **WHEN** executing `openspec init` +- **THEN** prompt the user: "Detected legacy 'openspec/' directory. Would you like to migrate it to '.openspec/'?" +- **AND** if confirmed, rename the directory +- **AND** update all managed AI instructions to point to the new location + ## Why Manual creation of OpenSpec structure is error-prone and creates adoption friction. A standardized init command ensures: diff --git a/openspec/specs/cli-spec/spec.md b/openspec/specs/cli-spec/spec.md index a279e7457..7ce7f03be 100644 --- a/openspec/specs/cli-spec/spec.md +++ b/openspec/specs/cli-spec/spec.md @@ -85,3 +85,11 @@ The spec validate command SHALL support interactive selection when no spec-id is - **AND** print the existing error message for missing spec-id - **AND** set non-zero exit code +### Requirement: Serve Command +The system SHALL provide a `serve` command to start the Model Context Protocol (MCP) server. + +#### Scenario: Start MCP Server +- **WHEN** executing `openspec serve` +- **THEN** start the MCP server using stdio transport +- **AND** keep the process alive to handle requests + diff --git a/openspec/specs/cli-validate/spec.md b/openspec/specs/cli-validate/spec.md index 5e543d230..03308eae8 100644 --- a/openspec/specs/cli-validate/spec.md +++ b/openspec/specs/cli-validate/spec.md @@ -216,3 +216,12 @@ The markdown parser SHALL correctly identify sections regardless of line ending - **WHEN** running `openspec validate ` - **THEN** validation SHALL recognize the sections and NOT raise parsing errors +### Requirement: Success Guidance +The command SHALL display suggested next steps upon successful validation to guide the user through the OpenSpec workflow. + +#### Scenario: Suggesting next steps for a change +- **WHEN** a change is successfully validated +- **THEN** display a "Next steps:" section +- **AND** suggest viewing the change details: `openspec show [id]` +- **AND** suggest archiving the change if tasks are complete: `openspec archive [id]` (contextual hint) + diff --git a/openspec/specs/mcp-server/spec.md b/openspec/specs/mcp-server/spec.md new file mode 100644 index 000000000..9d3c2cd6c --- /dev/null +++ b/openspec/specs/mcp-server/spec.md @@ -0,0 +1,94 @@ +# mcp-server Specification + +## Purpose +TBD - created by archiving change add-mcp-server. Update Purpose after archive. +## Requirements +### Requirement: Expose Tools +The server SHALL expose core OpenSpec capabilities as MCP tools. + +#### Scenario: List Tools +- **WHEN** the client requests `tools/list` +- **THEN** return `openspec_list`, `openspec_show`, `openspec_validate`, `openspec_archive` tools +- **AND** include descriptions and JSON schemas for arguments + +### Requirement: Expose Resources +The server SHALL expose specs and changes as MCP resources. + +#### Scenario: List Resources +- **WHEN** the client requests `resources/list` +- **THEN** return a list of available specs and changes with `openspec://` URIs + +#### Scenario: Read Resource +- **WHEN** the client requests `resources/read` for a valid URI +- **THEN** return the content of the corresponding file (markdown or JSON) + +### Requirement: Expose Prompts +The server SHALL expose standard OpenSpec prompts. + +#### Scenario: List Prompts +- **WHEN** the client requests `prompts/list` +- **THEN** return `proposal`, `apply`, `archive` prompts + +### Requirement: Shared Core Logic +The server SHALL use the same core logic modules as the CLI to ensure consistent behavior. + +#### Scenario: Using pure core modules +- **WHEN** the server executes a tool (e.g., `openspec_init`) +- **THEN** it SHALL call the pure logic function from `src/core` (e.g., `runInit`) +- **AND** it SHALL NOT invoke CLI-specific command wrappers + +### Requirement: Shared Core Implementation +The MCP server and the CLI SHALL share the same underlying business logic implementation for all operations. + +#### Scenario: Consistency between CLI and MCP +- **WHEN** an operation (e.g., init, list, archive) is performed via CLI +- **AND** the same operation is performed via MCP +- **THEN** both SHALL yield consistent results by calling the same core functions. + +### Requirement: Project Initialization Tool +The MCP server SHALL provide a tool `openspec_init` to initialize the OpenSpec structure. + +#### Scenario: Initializing project via MCP +- **WHEN** the `openspec_init` tool is called +- **THEN** execute the shared `runInit` logic +- **AND** return a structured summary of created items. + +### Requirement: Change Creation Tool +The MCP server SHALL provide a tool `openspec_create_change` to scaffold a new change directory. + +#### Scenario: Creating a new change via MCP +- **WHEN** the `openspec_create_change` tool is called with `name` +- **THEN** execute the shared `runCreateChange` logic +- **AND** return the paths of created files. + +### Requirement: MCP-First Instructions +The MCP server SHALL provide prompts that prioritize MCP tools while maintaining CLI references as a secondary option for human readability. + +#### Scenario: Guidance in MCP prompts +- **WHEN** an agent retrieves a prompt via MCP +- **THEN** the instructions SHALL explicitly list MCP tool calls as the primary action (e.g., "Use openspec_list_changes to view state") +- **AND** the instructions MAY provide the CLI equivalent for reference. + +### Requirement: Test Coverage +The MCP server implementation SHALL have unit and integration tests. + +#### Scenario: Testing Tool Definitions +- **WHEN** the test suite runs +- **THEN** it SHALL verify that all exposed tools have correct names, descriptions, and schemas. + +#### Scenario: Testing Resource Resolution +- **WHEN** the test suite runs +- **THEN** it SHALL verify that `openspec://` URIs are correctly parsed and resolved to file paths. + +#### Scenario: Testing Prompt Content +- **WHEN** the test suite runs +- **THEN** it SHALL verify that prompts can be retrieved and contain expected placeholders. + +### Requirement: Testability of Core Logic +The core logic used by the MCP server SHALL be testable independently of the CLI or MCP transport layer. + +#### Scenario: Unit Testing Core Functions +- **WHEN** a core function (e.g., `runCreateChange`, `runListChanges`) is tested +- **THEN** it SHALL be possible to invoke it without mocking CLI-specific objects (like `process` or `console` capture). +- **AND** it SHALL return structured data rather than writing to stdout. + diff --git a/package.json b/package.json index d36a9af71..bd328a64e 100644 --- a/package.json +++ b/package.json @@ -52,8 +52,11 @@ "prepublishOnly": "pnpm run build", "postinstall": "node scripts/postinstall.js", "check:pack-version": "node scripts/pack-version-check.mjs", + "sync-extension-version": "node scripts/sync-extension-version.mjs", + "ci:version": "changeset version && pnpm run sync-extension-version", "release": "pnpm run release:ci", "release:ci": "pnpm run check:pack-version && pnpm exec changeset publish", + "release:local": "pnpm run ci:version && pnpm run check:pack-version && pnpm exec changeset publish", "changeset": "changeset" }, "engines": { @@ -75,9 +78,10 @@ "chalk": "^5.5.0", "commander": "^14.0.0", "fast-glob": "^3.3.3", + "fastmcp": "^3.26.8", "ora": "^8.2.0", "posthog-node": "^5.20.0", "yaml": "^2.8.2", "zod": "^4.0.17" } -} +} \ No newline at end of file diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 92da325a7..014a3cd88 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -23,6 +23,9 @@ importers: fast-glob: specifier: ^3.3.3 version: 3.3.3 + fastmcp: + specifier: ^3.26.8 + version: 3.26.8(hono@4.11.3) ora: specifier: ^8.2.0 version: 8.2.0 @@ -67,6 +70,9 @@ packages: resolution: {integrity: sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==} engines: {node: '>=6.9.0'} + '@borewit/text-codec@0.2.1': + resolution: {integrity: sha512-k7vvKPbf7J2fZ5klGRD9AeKfUvojuZIQ3BT5u7Jfv+puwXkUBUT5PVyMDfJZpy30CBDXGMgw7fguK/lpOMBvgw==} + '@changesets/apply-release-plan@7.0.12': resolution: {integrity: sha512-EaET7As5CeuhTzvXTQCRZeBUcisoYPDDcXvgTE/2jmmypKp0RC7LxKj/yzqeh/1qFTZI7oDGFcL1PHRuQuketQ==} @@ -322,6 +328,12 @@ packages: resolution: {integrity: sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@hono/node-server@1.19.8': + resolution: {integrity: sha512-0/g2lIOPzX8f3vzW1ggQgvG5mjtFBDBHFAzI5SFAi2DzSqS9luJwqg9T6O/gKYLi+inS7eNxBeIFkkghIPvrMA==} + engines: {node: '>=18.14.1'} + peerDependencies: + hono: ^4 + '@humanfs/core@0.19.1': resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==} engines: {node: '>=18.18.0'} @@ -481,6 +493,16 @@ packages: '@manypkg/get-packages@1.1.3': resolution: {integrity: sha512-fo+QhuU3qE/2TQMQmbVMqaQ6EWbMhi4ABWP+O4AM1NqPBuy0OrApV5LO6BrrgnhtAHS2NH6RrVk9OL181tTi8A==} + '@modelcontextprotocol/sdk@1.25.2': + resolution: {integrity: sha512-LZFeo4F9M5qOhC/Uc1aQSrBHxMrvxett+9KLHt7OhcExtoiRN9DKgbZffMP/nxjutWDQpfMDfP3nkHI4X9ijww==} + engines: {node: '>=18'} + peerDependencies: + '@cfworker/json-schema': ^4.1.1 + zod: ^3.25 || ^4.0 + peerDependenciesMeta: + '@cfworker/json-schema': + optional: true + '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} @@ -599,6 +621,23 @@ packages: cpu: [x64] os: [win32] + '@sec-ant/readable-stream@0.4.1': + resolution: {integrity: sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg==} + + '@sindresorhus/merge-streams@4.0.0': + resolution: {integrity: sha512-tlqY9xq5ukxTUZBmoOp+m61cqwQD5pHJtFY3Mn8CA8ps6yghLH/Hw8UPdqg4OLmFW3IFlcXnQNmo/dh8HzXYIQ==} + engines: {node: '>=18'} + + '@standard-schema/spec@1.1.0': + resolution: {integrity: sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==} + + '@tokenizer/inflate@0.4.1': + resolution: {integrity: sha512-2mAv+8pkG6GIZiF1kNg1jAjh27IDxEPKwdGul3snfztFerfPGI1LjDezZp3i7BElXompqEtPmoPx6c2wgtWsOA==} + engines: {node: '>=18'} + + '@tokenizer/token@0.3.0': + resolution: {integrity: sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==} + '@types/chai@5.2.2': resolution: {integrity: sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==} @@ -710,6 +749,10 @@ packages: '@vitest/utils@3.2.4': resolution: {integrity: sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==} + accepts@2.0.0: + resolution: {integrity: sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==} + engines: {node: '>= 0.6'} + acorn-jsx@5.3.2: resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} peerDependencies: @@ -720,9 +763,20 @@ packages: engines: {node: '>=0.4.0'} hasBin: true + ajv-formats@3.0.1: + resolution: {integrity: sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==} + peerDependencies: + ajv: ^8.0.0 + peerDependenciesMeta: + ajv: + optional: true + ajv@6.12.6: resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} + ajv@8.17.1: + resolution: {integrity: sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==} + ansi-colors@4.1.3: resolution: {integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==} engines: {node: '>=6'} @@ -743,6 +797,10 @@ packages: resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} engines: {node: '>=8'} + ansi-styles@6.2.3: + resolution: {integrity: sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==} + engines: {node: '>=12'} + argparse@1.0.10: resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} @@ -764,6 +822,10 @@ packages: resolution: {integrity: sha512-pbnl5XzGBdrFU/wT4jqmJVPn2B6UHPBOhzMQkY/SPUPB6QtUXtmBHBIwCbXJol93mOpGMnQyP/+BB19q04xj7g==} engines: {node: '>=4'} + body-parser@2.2.2: + resolution: {integrity: sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA==} + engines: {node: '>=18'} + brace-expansion@1.1.12: resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} @@ -774,10 +836,22 @@ packages: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} engines: {node: '>=8'} + bytes@3.1.2: + resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} + engines: {node: '>= 0.8'} + cac@6.7.14: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} + call-bind-apply-helpers@1.0.2: + resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} + engines: {node: '>= 0.4'} + + call-bound@1.0.4: + resolution: {integrity: sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==} + engines: {node: '>= 0.4'} + callsites@3.1.0: resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} engines: {node: '>=6'} @@ -820,6 +894,10 @@ packages: resolution: {integrity: sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==} engines: {node: '>= 12'} + cliui@9.0.1: + resolution: {integrity: sha512-k7ndgKhwoQveBL+/1tqGJYNz097I7WOvwbmmU2AR5+magtbjPWQTS1C5vzGkBC8Ym8UWRzfKUzUUqFLypY4Q+w==} + engines: {node: '>=20'} + color-convert@2.0.1: resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} engines: {node: '>=7.0.0'} @@ -834,6 +912,26 @@ packages: concat-map@0.0.1: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + content-disposition@1.0.1: + resolution: {integrity: sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==} + engines: {node: '>=18'} + + content-type@1.0.5: + resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==} + engines: {node: '>= 0.6'} + + cookie-signature@1.2.2: + resolution: {integrity: sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==} + engines: {node: '>=6.6.0'} + + cookie@0.7.2: + resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==} + engines: {node: '>= 0.6'} + + cors@2.8.5: + resolution: {integrity: sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==} + engines: {node: '>= 0.10'} + cross-spawn@7.0.6: resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} engines: {node: '>= 8'} @@ -850,6 +948,15 @@ packages: supports-color: optional: true + debug@4.4.3: + resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + deep-eql@5.0.2: resolution: {integrity: sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==} engines: {node: '>=6'} @@ -857,6 +964,10 @@ packages: deep-is@0.1.4: resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} + depd@2.0.0: + resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} + engines: {node: '>= 0.8'} + detect-indent@6.1.0: resolution: {integrity: sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==} engines: {node: '>=8'} @@ -869,24 +980,54 @@ packages: resolution: {integrity: sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g==} engines: {node: '>=10'} + dunder-proto@1.0.1: + resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} + engines: {node: '>= 0.4'} + + ee-first@1.1.1: + resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} + emoji-regex@10.4.0: resolution: {integrity: sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==} emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + encodeurl@2.0.0: + resolution: {integrity: sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==} + engines: {node: '>= 0.8'} + enquirer@2.4.1: resolution: {integrity: sha512-rRqJg/6gd538VHvR3PSrdRBb/1Vy2YfzHqzvbhGIQpDRKIa4FgV/54b5Q1xYSxOOwKvjXweS26E0Q+nAMwp2pQ==} engines: {node: '>=8.6'} + es-define-property@1.0.1: + resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} + engines: {node: '>= 0.4'} + + es-errors@1.3.0: + resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} + engines: {node: '>= 0.4'} + es-module-lexer@1.7.0: resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==} + es-object-atoms@1.1.1: + resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} + engines: {node: '>= 0.4'} + esbuild@0.25.8: resolution: {integrity: sha512-vVC0USHGtMi8+R4Kz8rt6JhEWLxsv9Rnu/lGYbPR8u47B+DCBksq9JarW0zOO7bs37hyOK1l2/oqtbciutL5+Q==} engines: {node: '>=18'} hasBin: true + escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} + engines: {node: '>=6'} + + escape-html@1.0.3: + resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==} + escape-string-regexp@4.0.0: resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} engines: {node: '>=10'} @@ -941,10 +1082,36 @@ packages: resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} engines: {node: '>=0.10.0'} + etag@1.8.1: + resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} + engines: {node: '>= 0.6'} + + eventsource-parser@3.0.6: + resolution: {integrity: sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==} + engines: {node: '>=18.0.0'} + + eventsource@3.0.7: + resolution: {integrity: sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==} + engines: {node: '>=18.0.0'} + + execa@9.6.1: + resolution: {integrity: sha512-9Be3ZoN4LmYR90tUoVu2te2BsbzHfhJyfEiAVfz7N5/zv+jduIfLrV2xdQXOHbaD6KgpGdO9PRPM1Y4Q9QkPkA==} + engines: {node: ^18.19.0 || >=20.5.0} + expect-type@1.2.2: resolution: {integrity: sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==} engines: {node: '>=12.0.0'} + express-rate-limit@7.5.1: + resolution: {integrity: sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw==} + engines: {node: '>= 16'} + peerDependencies: + express: '>= 4.11' + + express@5.2.1: + resolution: {integrity: sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw==} + engines: {node: '>= 18'} + extendable-error@0.1.7: resolution: {integrity: sha512-UOiS2in6/Q0FK0R0q6UY9vYpQ21mr/Qn1KOnte7vsACuNJf514WvCCUHSRCPcgjPT2bAhNIJdlE6bVap1GKmeg==} @@ -965,6 +1132,18 @@ packages: fast-levenshtein@2.0.6: resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + fast-uri@3.1.0: + resolution: {integrity: sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==} + + fastmcp@3.26.8: + resolution: {integrity: sha512-DQgvEdSoQpISqPDgLZe5K2RU8ICCz3/5QcEhHmz4KiNcNSdFOo7o817mGwWxPffOLe36vypCulq22cvEwqBi/A==} + hasBin: true + peerDependencies: + jose: ^5.0.0 + peerDependenciesMeta: + jose: + optional: true + fastq@1.19.1: resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==} @@ -988,14 +1167,26 @@ packages: fflate@0.8.2: resolution: {integrity: sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==} + figures@6.1.0: + resolution: {integrity: sha512-d+l3qxjSesT4V7v2fh+QnmFnUWv9lSpjarhShNTgBOfA0ttejbQUAlHLitbjkoRiDulW0OPoQPYIGhIC8ohejg==} + engines: {node: '>=18'} + file-entry-cache@8.0.0: resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==} engines: {node: '>=16.0.0'} + file-type@21.3.0: + resolution: {integrity: sha512-8kPJMIGz1Yt/aPEwOsrR97ZyZaD1Iqm8PClb1nYFclUCkBi0Ma5IsYNQzvSFS9ib51lWyIw5mIT9rWzI/xjpzA==} + engines: {node: '>=20'} + fill-range@7.1.1: resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} engines: {node: '>=8'} + finalhandler@2.1.1: + resolution: {integrity: sha512-S8KoZgRZN+a5rNwqTxlZZePjT/4cnm0ROV70LedRHZ0p8u9fRID0hJUZQpkKLzro8LfmC8sx23bY6tVNxv8pQA==} + engines: {node: '>= 18.0.0'} + find-up@4.1.0: resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} engines: {node: '>=8'} @@ -1011,6 +1202,14 @@ packages: flatted@3.3.3: resolution: {integrity: sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==} + forwarded@0.2.0: + resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} + engines: {node: '>= 0.6'} + + fresh@2.0.0: + resolution: {integrity: sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==} + engines: {node: '>= 0.8'} + fs-extra@7.0.1: resolution: {integrity: sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==} engines: {node: '>=6 <7 || >=8'} @@ -1024,10 +1223,33 @@ packages: engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + + fuse.js@7.1.0: + resolution: {integrity: sha512-trLf4SzuuUxfusZADLINj+dE8clK1frKdmqiJNb1Es75fmI5oY6X2mxLVUciLLjxqw/xr72Dhy+lER6dGd02FQ==} + engines: {node: '>=10'} + + get-caller-file@2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + get-east-asian-width@1.3.0: resolution: {integrity: sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ==} engines: {node: '>=18'} + get-intrinsic@1.3.0: + resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} + engines: {node: '>= 0.4'} + + get-proto@1.0.1: + resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} + engines: {node: '>= 0.4'} + + get-stream@9.0.1: + resolution: {integrity: sha512-kVCxPF3vQM/N0B1PmoqVUqgHP+EeVjmZSQn+1oCRPxd2P21P2F19lIgbR3HBosbB1PUhOAoctJnfEn2GbN2eZA==} + engines: {node: '>=18'} + glob-parent@5.1.2: resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} engines: {node: '>= 6'} @@ -1044,6 +1266,10 @@ packages: resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} engines: {node: '>=10'} + gopd@1.2.0: + resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} + engines: {node: '>= 0.4'} + graceful-fs@4.2.11: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} @@ -1051,10 +1277,30 @@ packages: resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} engines: {node: '>=8'} + has-symbols@1.1.0: + resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} + engines: {node: '>= 0.4'} + + hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + + hono@4.11.3: + resolution: {integrity: sha512-PmQi306+M/ct/m5s66Hrg+adPnkD5jiO6IjA7WhWw0gSBSo1EcRegwuI1deZ+wd5pzCGynCcn2DprnE4/yEV4w==} + engines: {node: '>=16.9.0'} + + http-errors@2.0.1: + resolution: {integrity: sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==} + engines: {node: '>= 0.8'} + human-id@4.1.1: resolution: {integrity: sha512-3gKm/gCSUipeLsRYZbbdA1BD83lBoWUkZ7G9VFrhWPAU76KwYo5KR8V28bpoPm/ygy0x5/GCbpRQdY7VLYCoIg==} hasBin: true + human-signals@8.0.1: + resolution: {integrity: sha512-eKCa6bwnJhvxj14kZk5NCPc6Hb6BdsU9DZcOnmQKSnO1VKrfV0zCvtttPZUsBvjmNDn8rpcJfpwSYnHBjc95MQ==} + engines: {node: '>=18.18.0'} + iconv-lite@0.4.24: resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==} engines: {node: '>=0.10.0'} @@ -1063,6 +1309,13 @@ packages: resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} engines: {node: '>=0.10.0'} + iconv-lite@0.7.2: + resolution: {integrity: sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==} + engines: {node: '>=0.10.0'} + + ieee754@1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + ignore@5.3.2: resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} engines: {node: '>= 4'} @@ -1079,6 +1332,13 @@ packages: resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} engines: {node: '>=0.8.19'} + inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + + ipaddr.js@1.9.1: + resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==} + engines: {node: '>= 0.10'} + is-extglob@2.1.1: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} @@ -1099,6 +1359,17 @@ packages: resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} engines: {node: '>=0.12.0'} + is-plain-obj@4.1.0: + resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==} + engines: {node: '>=12'} + + is-promise@4.0.0: + resolution: {integrity: sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==} + + is-stream@4.0.1: + resolution: {integrity: sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A==} + engines: {node: '>=18'} + is-subdir@1.2.0: resolution: {integrity: sha512-2AT6j+gXe/1ueqbW6fLZJiIw3F8iXGJtt0yDrZaBhAZEG1raiTxKWU+IPqMCzQAXOUCKdA4UDMgacKH25XG2Cw==} engines: {node: '>=4'} @@ -1118,6 +1389,9 @@ packages: isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + jose@6.1.3: + resolution: {integrity: sha512-0TpaTfihd4QMNwrz/ob2Bp7X04yuxJkjRGi4aKmOqwhov54i6u79oCv7T+C7lo70MKH6BesI3vscD1yb/yzKXQ==} + js-tokens@9.0.1: resolution: {integrity: sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==} @@ -1135,6 +1409,12 @@ packages: json-schema-traverse@0.4.1: resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} + json-schema-traverse@1.0.0: + resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} + + json-schema-typed@8.0.2: + resolution: {integrity: sha512-fQhoXdcvc3V28x7C7BMs4P5+kNlgUURe2jmUT1T//oBRMDrqy1QPelJimwZGo7Hg9VPV3EQV5Bnq4hbFy2vetA==} + json-stable-stringify-without-jsonify@1.0.1: resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} @@ -1172,6 +1452,22 @@ packages: magic-string@0.30.17: resolution: {integrity: sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==} + math-intrinsics@1.1.0: + resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==} + engines: {node: '>= 0.4'} + + mcp-proxy@5.12.5: + resolution: {integrity: sha512-Vawdc8vi36fXxKCaDpluRvbGcmrUXJdvXcDhkh30HYsws8XqX2rWPBflZpavzeS+6SwijRFV7g+9ypQRJZlrEQ==} + hasBin: true + + media-typer@1.1.0: + resolution: {integrity: sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==} + engines: {node: '>= 0.8'} + + merge-descriptors@2.0.0: + resolution: {integrity: sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==} + engines: {node: '>=18'} + merge2@1.4.1: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} @@ -1180,6 +1476,14 @@ packages: resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} engines: {node: '>=8.6'} + mime-db@1.54.0: + resolution: {integrity: sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==} + engines: {node: '>= 0.6'} + + mime-types@3.0.2: + resolution: {integrity: sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==} + engines: {node: '>=18'} + mimic-function@5.0.1: resolution: {integrity: sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==} engines: {node: '>=18'} @@ -1214,6 +1518,10 @@ packages: natural-compare@1.4.0: resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} + negotiator@1.0.0: + resolution: {integrity: sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==} + engines: {node: '>= 0.6'} + node-fetch@2.7.0: resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} engines: {node: 4.x || >=6.0.0} @@ -1223,6 +1531,25 @@ packages: encoding: optional: true + npm-run-path@6.0.0: + resolution: {integrity: sha512-9qny7Z9DsQU8Ou39ERsPU4OZQlSTP47ShQzuKZ6PRXpYLtIFgl/DEBYEXKlvcEa+9tHVcK8CF81Y2V72qaZhWA==} + engines: {node: '>=18'} + + object-assign@4.1.1: + resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} + engines: {node: '>=0.10.0'} + + object-inspect@1.13.4: + resolution: {integrity: sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==} + engines: {node: '>= 0.4'} + + on-finished@2.4.1: + resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==} + engines: {node: '>= 0.8'} + + once@1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + onetime@7.0.0: resolution: {integrity: sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==} engines: {node: '>=18'} @@ -1277,6 +1604,14 @@ packages: resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} engines: {node: '>=6'} + parse-ms@4.0.0: + resolution: {integrity: sha512-TXfryirbmq34y8QBwgqCVLi+8oA3oWx2eAnSn62ITyEhEYaWRlVZ2DvMM9eZbMs/RfxPu/PK/aBLyGj4IrqMHw==} + engines: {node: '>=18'} + + parseurl@1.3.3: + resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==} + engines: {node: '>= 0.8'} + path-exists@4.0.0: resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} engines: {node: '>=8'} @@ -1285,6 +1620,13 @@ packages: resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} engines: {node: '>=8'} + path-key@4.0.0: + resolution: {integrity: sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==} + engines: {node: '>=12'} + + path-to-regexp@8.3.0: + resolution: {integrity: sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA==} + path-type@4.0.0: resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} engines: {node: '>=8'} @@ -1311,6 +1653,10 @@ packages: resolution: {integrity: sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==} engines: {node: '>=6'} + pkce-challenge@5.0.1: + resolution: {integrity: sha512-wQ0b/W4Fr01qtpHlqSqspcj3EhBvimsdh0KlHhH8HRZnMsEa0ea2fTULOXOS9ccQr3om+GcGRk4e+isrZWV8qQ==} + engines: {node: '>=16.20.0'} + postcss@8.5.6: resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} engines: {node: ^10 || ^12 || >=14} @@ -1328,20 +1674,44 @@ packages: engines: {node: '>=10.13.0'} hasBin: true + pretty-ms@9.3.0: + resolution: {integrity: sha512-gjVS5hOP+M3wMm5nmNOucbIrqudzs9v/57bWRHQWLYklXqoXKrVfYW2W9+glfGsqtPgpiz5WwyEEB+ksXIx3gQ==} + engines: {node: '>=18'} + + proxy-addr@2.0.7: + resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} + engines: {node: '>= 0.10'} + punycode@2.3.1: resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} engines: {node: '>=6'} + qs@6.14.1: + resolution: {integrity: sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==} + engines: {node: '>=0.6'} + quansync@0.2.11: resolution: {integrity: sha512-AifT7QEbW9Nri4tAwR5M/uzpBuqfZf+zwaEM/QkzEjj7NBuFD2rBuy0K3dE+8wltbezDV7JMA0WfnCPYRSYbXA==} queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + range-parser@1.2.1: + resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==} + engines: {node: '>= 0.6'} + + raw-body@3.0.2: + resolution: {integrity: sha512-K5zQjDllxWkf7Z5xJdV0/B0WTNqx6vxG70zJE4N0kBs4LovmEYWJzQGxC9bS9RAKu3bgM40lrd5zoLJ12MQ5BA==} + engines: {node: '>= 0.10'} + read-yaml-file@1.1.0: resolution: {integrity: sha512-VIMnQi/Z4HT2Fxuwg5KrY174U1VdUIASQVWXXyqtNRtxSr9IYkn1rsI6Tb6HsrHCmB7gVpNwX6JxPTHcH6IoTA==} engines: {node: '>=6'} + require-from-string@2.0.2: + resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} + engines: {node: '>=0.10.0'} + resolve-from@4.0.0: resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} engines: {node: '>=4'} @@ -1363,6 +1733,10 @@ packages: engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true + router@2.2.0: + resolution: {integrity: sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==} + engines: {node: '>= 18'} + run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} @@ -1374,6 +1748,17 @@ packages: engines: {node: '>=10'} hasBin: true + send@1.2.1: + resolution: {integrity: sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ==} + engines: {node: '>= 18'} + + serve-static@2.2.1: + resolution: {integrity: sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw==} + engines: {node: '>= 18'} + + setprototypeof@1.2.0: + resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==} + shebang-command@2.0.0: resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} engines: {node: '>=8'} @@ -1382,6 +1767,22 @@ packages: resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} engines: {node: '>=8'} + side-channel-list@1.0.0: + resolution: {integrity: sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==} + engines: {node: '>= 0.4'} + + side-channel-map@1.0.1: + resolution: {integrity: sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==} + engines: {node: '>= 0.4'} + + side-channel-weakmap@1.0.2: + resolution: {integrity: sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==} + engines: {node: '>= 0.4'} + + side-channel@1.1.0: + resolution: {integrity: sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==} + engines: {node: '>= 0.4'} + siginfo@2.0.0: resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} @@ -1410,6 +1811,10 @@ packages: stackback@0.0.2: resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} + statuses@2.0.2: + resolution: {integrity: sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==} + engines: {node: '>= 0.8'} + std-env@3.9.0: resolution: {integrity: sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==} @@ -1417,6 +1822,9 @@ packages: resolution: {integrity: sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ==} engines: {node: '>=18'} + strict-event-emitter-types@2.0.0: + resolution: {integrity: sha512-Nk/brWYpD85WlOgzw5h173aci0Teyv8YdIAEtV+N88nDB0dLlazZyJMIsN6eo1/AR61l+p6CJTG1JIyFaoNEEA==} + string-width@4.2.3: resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} engines: {node: '>=8'} @@ -1437,6 +1845,10 @@ packages: resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} engines: {node: '>=4'} + strip-final-newline@4.0.0: + resolution: {integrity: sha512-aulFJcD6YK8V1G7iRB5tigAP4TsHBZZrOV8pjV++zdUwmeV8uzbY7yn6h9MswN62adStNZFuCIx4haBnRuMDaw==} + engines: {node: '>=18'} + strip-json-comments@3.1.1: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} engines: {node: '>=8'} @@ -1444,6 +1856,10 @@ packages: strip-literal@3.0.0: resolution: {integrity: sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==} + strtok3@10.3.4: + resolution: {integrity: sha512-KIy5nylvC5le1OdaaoCJ07L+8iQzJHGH6pWDuzS+d07Cu7n1MZ2x26P8ZKIWfbK02+XIL8Mp4RkWeqdUCrDMfg==} + engines: {node: '>=18'} + supports-color@7.2.0: resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} engines: {node: '>=8'} @@ -1486,6 +1902,14 @@ packages: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} + toidentifier@1.0.1: + resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} + engines: {node: '>=0.6'} + + token-types@6.1.2: + resolution: {integrity: sha512-dRXchy+C0IgK8WPC6xvCHFRIWYUbqqdEIKPaKo/AcTUNzwLTK6AH7RjdLWsEZcAN/TBdtfUw3PYEgPr5VPr6ww==} + engines: {node: '>=14.16'} + totalist@3.0.1: resolution: {integrity: sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==} engines: {node: '>=6'} @@ -1507,6 +1931,10 @@ packages: resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} engines: {node: '>=10'} + type-is@2.0.1: + resolution: {integrity: sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==} + engines: {node: '>= 0.6'} + typescript-eslint@8.50.1: resolution: {integrity: sha512-ytTHO+SoYSbhAH9CrYnMhiLx8To6PSSvqnvXyPUgPETCvB6eBKmTI9w6XMPS3HsBRGkwTVBX+urA8dYQx6bHfQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -1519,16 +1947,39 @@ packages: engines: {node: '>=14.17'} hasBin: true + uint8array-extras@1.5.0: + resolution: {integrity: sha512-rvKSBiC5zqCCiDZ9kAOszZcDvdAHwwIKJG33Ykj43OKcWsnmcBRL09YTU4nOeHZ8Y2a7l1MgTd08SBe9A8Qj6A==} + engines: {node: '>=18'} + undici-types@7.10.0: resolution: {integrity: sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==} + undici@7.18.2: + resolution: {integrity: sha512-y+8YjDFzWdQlSE9N5nzKMT3g4a5UBX1HKowfdXh0uvAnTaqqwqB92Jt4UXBAeKekDs5IaDKyJFR4X1gYVCgXcw==} + engines: {node: '>=20.18.1'} + + unicorn-magic@0.3.0: + resolution: {integrity: sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==} + engines: {node: '>=18'} + universalify@0.1.2: resolution: {integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==} engines: {node: '>= 4.0.0'} + unpipe@1.0.0: + resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} + engines: {node: '>= 0.8'} + uri-js@4.4.1: resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + uri-templates@0.2.0: + resolution: {integrity: sha512-EWkjYEN0L6KOfEoOH6Wj4ghQqU7eBZMJqRHQnxQAq+dSEzRPClkWjf8557HkWQXF6BrAUoLSAyy9i3RVTliaNg==} + + vary@1.1.2: + resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} + engines: {node: '>= 0.8'} + vite-node@3.2.4: resolution: {integrity: sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==} engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} @@ -1626,11 +2077,53 @@ packages: resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} engines: {node: '>=8'} + wrap-ansi@9.0.2: + resolution: {integrity: sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==} + engines: {node: '>=18'} + + wrappy@1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + + xsschema@0.4.0-beta.5: + resolution: {integrity: sha512-73pYwf1hMy++7SnOkghJdgdPaGi+Y5I0SaO6rIlxb1ouV6tEyDbEcXP82kyr32KQVTlUbFj6qewi9eUVEiXm+g==} + peerDependencies: + '@valibot/to-json-schema': ^1.0.0 + arktype: ^2.1.20 + effect: ^3.16.0 + sury: ^10.0.0 + zod: ^3.25.0 || ^4.0.0 + zod-to-json-schema: ^3.24.5 + peerDependenciesMeta: + '@valibot/to-json-schema': + optional: true + arktype: + optional: true + effect: + optional: true + sury: + optional: true + zod: + optional: true + zod-to-json-schema: + optional: true + + y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + yaml@2.8.2: resolution: {integrity: sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==} engines: {node: '>= 14.6'} hasBin: true + yargs-parser@22.0.0: + resolution: {integrity: sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw==} + engines: {node: ^20.19.0 || ^22.12.0 || >=23} + + yargs@18.0.0: + resolution: {integrity: sha512-4UEqdc2RYGHZc7Doyqkrqiln3p9X2DZVxaGbwhn2pi7MrRagKaOcIKe8L3OxYcbhXLgLFUS3zAYuQjKBQgmuNg==} + engines: {node: ^20.19.0 || ^22.12.0 || >=23} + yocto-queue@0.1.0: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} @@ -1639,13 +2132,27 @@ packages: resolution: {integrity: sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA==} engines: {node: '>=18'} + yoctocolors@2.1.2: + resolution: {integrity: sha512-CzhO+pFNo8ajLM2d2IW/R93ipy99LWjtwblvC1RsoSUMZgyLbYFr221TnSNT7GjGdYui6P459mw9JH/g/zW2ug==} + engines: {node: '>=18'} + + zod-to-json-schema@3.25.1: + resolution: {integrity: sha512-pM/SU9d3YAggzi6MtR4h7ruuQlqKtad8e9S0fmxcMi+ueAK5Korys/aWcV9LIIHTVbj01NdzxcnXSN+O74ZIVA==} + peerDependencies: + zod: ^3.25 || ^4 + zod@4.0.17: resolution: {integrity: sha512-1PHjlYRevNxxdy2JZ8JcNAw7rX8V9P1AKkP+x/xZfxB0K5FYfuV+Ug6P/6NVSR2jHQ+FzDDoDHS04nYUsOIyLQ==} + zod@4.3.5: + resolution: {integrity: sha512-k7Nwx6vuWx1IJ9Bjuf4Zt1PEllcwe7cls3VNzm4CQ1/hgtFUK2bRNG3rvnpPUhFjmqJKAKtjV576KnUkHocg/g==} + snapshots: '@babel/runtime@7.28.4': {} + '@borewit/text-codec@0.2.1': {} + '@changesets/apply-release-plan@7.0.12': dependencies: '@changesets/config': 3.1.1 @@ -1929,6 +2436,10 @@ snapshots: '@eslint/core': 0.17.0 levn: 0.4.1 + '@hono/node-server@1.19.8(hono@4.11.3)': + dependencies: + hono: 4.11.3 + '@humanfs/core@0.19.1': {} '@humanfs/node@0.16.7': @@ -2083,6 +2594,28 @@ snapshots: globby: 11.1.0 read-yaml-file: 1.1.0 + '@modelcontextprotocol/sdk@1.25.2(hono@4.11.3)(zod@4.3.5)': + dependencies: + '@hono/node-server': 1.19.8(hono@4.11.3) + ajv: 8.17.1 + ajv-formats: 3.0.1(ajv@8.17.1) + content-type: 1.0.5 + cors: 2.8.5 + cross-spawn: 7.0.6 + eventsource: 3.0.7 + eventsource-parser: 3.0.6 + express: 5.2.1 + express-rate-limit: 7.5.1(express@5.2.1) + jose: 6.1.3 + json-schema-typed: 8.0.2 + pkce-challenge: 5.0.1 + raw-body: 3.0.2 + zod: 4.3.5 + zod-to-json-schema: 3.25.1(zod@4.3.5) + transitivePeerDependencies: + - hono + - supports-color + '@nodelib/fs.scandir@2.1.5': dependencies: '@nodelib/fs.stat': 2.0.5 @@ -2161,6 +2694,21 @@ snapshots: '@rollup/rollup-win32-x64-msvc@4.46.2': optional: true + '@sec-ant/readable-stream@0.4.1': {} + + '@sindresorhus/merge-streams@4.0.0': {} + + '@standard-schema/spec@1.1.0': {} + + '@tokenizer/inflate@0.4.1': + dependencies: + debug: 4.4.3 + token-types: 6.1.2 + transitivePeerDependencies: + - supports-color + + '@tokenizer/token@0.3.0': {} + '@types/chai@5.2.2': dependencies: '@types/deep-eql': 4.0.2 @@ -2321,12 +2869,21 @@ snapshots: loupe: 3.2.0 tinyrainbow: 2.0.0 + accepts@2.0.0: + dependencies: + mime-types: 3.0.2 + negotiator: 1.0.0 + acorn-jsx@5.3.2(acorn@8.15.0): dependencies: acorn: 8.15.0 acorn@8.15.0: {} + ajv-formats@3.0.1(ajv@8.17.1): + optionalDependencies: + ajv: 8.17.1 + ajv@6.12.6: dependencies: fast-deep-equal: 3.1.3 @@ -2334,6 +2891,13 @@ snapshots: json-schema-traverse: 0.4.1 uri-js: 4.4.1 + ajv@8.17.1: + dependencies: + fast-deep-equal: 3.1.3 + fast-uri: 3.1.0 + json-schema-traverse: 1.0.0 + require-from-string: 2.0.2 + ansi-colors@4.1.3: {} ansi-escapes@4.3.2: @@ -2348,6 +2912,8 @@ snapshots: dependencies: color-convert: 2.0.1 + ansi-styles@6.2.3: {} + argparse@1.0.10: dependencies: sprintf-js: 1.0.3 @@ -2364,6 +2930,20 @@ snapshots: dependencies: is-windows: 1.0.2 + body-parser@2.2.2: + dependencies: + bytes: 3.1.2 + content-type: 1.0.5 + debug: 4.4.3 + http-errors: 2.0.1 + iconv-lite: 0.7.2 + on-finished: 2.4.1 + qs: 6.14.1 + raw-body: 3.0.2 + type-is: 2.0.1 + transitivePeerDependencies: + - supports-color + brace-expansion@1.1.12: dependencies: balanced-match: 1.0.2 @@ -2377,8 +2957,20 @@ snapshots: dependencies: fill-range: 7.1.1 + bytes@3.1.2: {} + cac@6.7.14: {} + call-bind-apply-helpers@1.0.2: + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + + call-bound@1.0.4: + dependencies: + call-bind-apply-helpers: 1.0.2 + get-intrinsic: 1.3.0 + callsites@3.1.0: {} chai@5.2.1: @@ -2412,6 +3004,12 @@ snapshots: cli-width@4.1.0: {} + cliui@9.0.1: + dependencies: + string-width: 7.2.0 + strip-ansi: 7.1.0 + wrap-ansi: 9.0.2 + color-convert@2.0.1: dependencies: color-name: 1.1.4 @@ -2422,6 +3020,19 @@ snapshots: concat-map@0.0.1: {} + content-disposition@1.0.1: {} + + content-type@1.0.5: {} + + cookie-signature@1.2.2: {} + + cookie@0.7.2: {} + + cors@2.8.5: + dependencies: + object-assign: 4.1.1 + vary: 1.1.2 + cross-spawn@7.0.6: dependencies: path-key: 3.1.1 @@ -2434,10 +3045,16 @@ snapshots: dependencies: ms: 2.1.3 + debug@4.4.3: + dependencies: + ms: 2.1.3 + deep-eql@5.0.2: {} deep-is@0.1.4: {} + depd@2.0.0: {} + detect-indent@6.1.0: {} dir-glob@3.0.1: @@ -2446,17 +3063,35 @@ snapshots: dotenv@8.6.0: {} + dunder-proto@1.0.1: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-errors: 1.3.0 + gopd: 1.2.0 + + ee-first@1.1.1: {} + emoji-regex@10.4.0: {} emoji-regex@8.0.0: {} + encodeurl@2.0.0: {} + enquirer@2.4.1: dependencies: ansi-colors: 4.1.3 strip-ansi: 6.0.1 + es-define-property@1.0.1: {} + + es-errors@1.3.0: {} + es-module-lexer@1.7.0: {} + es-object-atoms@1.1.1: + dependencies: + es-errors: 1.3.0 + esbuild@0.25.8: optionalDependencies: '@esbuild/aix-ppc64': 0.25.8 @@ -2486,6 +3121,10 @@ snapshots: '@esbuild/win32-ia32': 0.25.8 '@esbuild/win32-x64': 0.25.8 + escalade@3.2.0: {} + + escape-html@1.0.3: {} + escape-string-regexp@4.0.0: {} eslint-scope@8.4.0: @@ -2560,8 +3199,68 @@ snapshots: esutils@2.0.3: {} + etag@1.8.1: {} + + eventsource-parser@3.0.6: {} + + eventsource@3.0.7: + dependencies: + eventsource-parser: 3.0.6 + + execa@9.6.1: + dependencies: + '@sindresorhus/merge-streams': 4.0.0 + cross-spawn: 7.0.6 + figures: 6.1.0 + get-stream: 9.0.1 + human-signals: 8.0.1 + is-plain-obj: 4.1.0 + is-stream: 4.0.1 + npm-run-path: 6.0.0 + pretty-ms: 9.3.0 + signal-exit: 4.1.0 + strip-final-newline: 4.0.0 + yoctocolors: 2.1.2 + expect-type@1.2.2: {} + express-rate-limit@7.5.1(express@5.2.1): + dependencies: + express: 5.2.1 + + express@5.2.1: + dependencies: + accepts: 2.0.0 + body-parser: 2.2.2 + content-disposition: 1.0.1 + content-type: 1.0.5 + cookie: 0.7.2 + cookie-signature: 1.2.2 + debug: 4.4.1 + depd: 2.0.0 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + finalhandler: 2.1.1 + fresh: 2.0.0 + http-errors: 2.0.1 + merge-descriptors: 2.0.0 + mime-types: 3.0.2 + on-finished: 2.4.1 + once: 1.4.0 + parseurl: 1.3.3 + proxy-addr: 2.0.7 + qs: 6.14.1 + range-parser: 1.2.1 + router: 2.2.0 + send: 1.2.1 + serve-static: 2.2.1 + statuses: 2.0.2 + type-is: 2.0.1 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + extendable-error@0.1.7: {} external-editor@3.1.0: @@ -2584,6 +3283,32 @@ snapshots: fast-levenshtein@2.0.6: {} + fast-uri@3.1.0: {} + + fastmcp@3.26.8(hono@4.11.3): + dependencies: + '@modelcontextprotocol/sdk': 1.25.2(hono@4.11.3)(zod@4.3.5) + '@standard-schema/spec': 1.1.0 + execa: 9.6.1 + file-type: 21.3.0 + fuse.js: 7.1.0 + mcp-proxy: 5.12.5 + strict-event-emitter-types: 2.0.0 + undici: 7.18.2 + uri-templates: 0.2.0 + xsschema: 0.4.0-beta.5(zod-to-json-schema@3.25.1(zod@4.0.17))(zod@4.3.5) + yargs: 18.0.0 + zod: 4.3.5 + zod-to-json-schema: 3.25.1(zod@4.3.5) + transitivePeerDependencies: + - '@cfworker/json-schema' + - '@valibot/to-json-schema' + - arktype + - effect + - hono + - supports-color + - sury + fastq@1.19.1: dependencies: reusify: 1.1.0 @@ -2598,14 +3323,38 @@ snapshots: fflate@0.8.2: {} + figures@6.1.0: + dependencies: + is-unicode-supported: 2.1.0 + file-entry-cache@8.0.0: dependencies: flat-cache: 4.0.1 + file-type@21.3.0: + dependencies: + '@tokenizer/inflate': 0.4.1 + strtok3: 10.3.4 + token-types: 6.1.2 + uint8array-extras: 1.5.0 + transitivePeerDependencies: + - supports-color + fill-range@7.1.1: dependencies: to-regex-range: 5.0.1 + finalhandler@2.1.1: + dependencies: + debug: 4.4.1 + encodeurl: 2.0.0 + escape-html: 1.0.3 + on-finished: 2.4.1 + parseurl: 1.3.3 + statuses: 2.0.2 + transitivePeerDependencies: + - supports-color + find-up@4.1.0: dependencies: locate-path: 5.0.0 @@ -2623,6 +3372,10 @@ snapshots: flatted@3.3.3: {} + forwarded@0.2.0: {} + + fresh@2.0.0: {} + fs-extra@7.0.1: dependencies: graceful-fs: 4.2.11 @@ -2638,8 +3391,37 @@ snapshots: fsevents@2.3.3: optional: true + function-bind@1.1.2: {} + + fuse.js@7.1.0: {} + + get-caller-file@2.0.5: {} + get-east-asian-width@1.3.0: {} + get-intrinsic@1.3.0: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + function-bind: 1.1.2 + get-proto: 1.0.1 + gopd: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.2 + math-intrinsics: 1.1.0 + + get-proto@1.0.1: + dependencies: + dunder-proto: 1.0.1 + es-object-atoms: 1.1.1 + + get-stream@9.0.1: + dependencies: + '@sec-ant/readable-stream': 0.4.1 + is-stream: 4.0.1 + glob-parent@5.1.2: dependencies: is-glob: 4.0.3 @@ -2659,12 +3441,32 @@ snapshots: merge2: 1.4.1 slash: 3.0.0 + gopd@1.2.0: {} + graceful-fs@4.2.11: {} has-flag@4.0.0: {} + has-symbols@1.1.0: {} + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + + hono@4.11.3: {} + + http-errors@2.0.1: + dependencies: + depd: 2.0.0 + inherits: 2.0.4 + setprototypeof: 1.2.0 + statuses: 2.0.2 + toidentifier: 1.0.1 + human-id@4.1.1: {} + human-signals@8.0.1: {} + iconv-lite@0.4.24: dependencies: safer-buffer: 2.1.2 @@ -2673,6 +3475,12 @@ snapshots: dependencies: safer-buffer: 2.1.2 + iconv-lite@0.7.2: + dependencies: + safer-buffer: 2.1.2 + + ieee754@1.2.1: {} + ignore@5.3.2: {} ignore@7.0.5: {} @@ -2684,6 +3492,10 @@ snapshots: imurmurhash@0.1.4: {} + inherits@2.0.4: {} + + ipaddr.js@1.9.1: {} + is-extglob@2.1.1: {} is-fullwidth-code-point@3.0.0: {} @@ -2696,6 +3508,12 @@ snapshots: is-number@7.0.0: {} + is-plain-obj@4.1.0: {} + + is-promise@4.0.0: {} + + is-stream@4.0.1: {} + is-subdir@1.2.0: dependencies: better-path-resolve: 1.0.0 @@ -2708,6 +3526,8 @@ snapshots: isexe@2.0.0: {} + jose@6.1.3: {} + js-tokens@9.0.1: {} js-yaml@3.14.1: @@ -2723,6 +3543,10 @@ snapshots: json-schema-traverse@0.4.1: {} + json-schema-traverse@1.0.0: {} + + json-schema-typed@8.0.2: {} + json-stable-stringify-without-jsonify@1.0.1: {} jsonfile@4.0.0: @@ -2761,6 +3585,14 @@ snapshots: dependencies: '@jridgewell/sourcemap-codec': 1.5.4 + math-intrinsics@1.1.0: {} + + mcp-proxy@5.12.5: {} + + media-typer@1.1.0: {} + + merge-descriptors@2.0.0: {} + merge2@1.4.1: {} micromatch@4.0.8: @@ -2768,6 +3600,12 @@ snapshots: braces: 3.0.3 picomatch: 2.3.1 + mime-db@1.54.0: {} + + mime-types@3.0.2: + dependencies: + mime-db: 1.54.0 + mimic-function@5.0.1: {} minimatch@3.1.2: @@ -2790,10 +3628,29 @@ snapshots: natural-compare@1.4.0: {} + negotiator@1.0.0: {} + node-fetch@2.7.0: dependencies: whatwg-url: 5.0.0 + npm-run-path@6.0.0: + dependencies: + path-key: 4.0.0 + unicorn-magic: 0.3.0 + + object-assign@4.1.1: {} + + object-inspect@1.13.4: {} + + on-finished@2.4.1: + dependencies: + ee-first: 1.1.1 + + once@1.4.0: + dependencies: + wrappy: 1.0.2 + onetime@7.0.0: dependencies: mimic-function: 5.0.1 @@ -2855,10 +3712,18 @@ snapshots: dependencies: callsites: 3.1.0 + parse-ms@4.0.0: {} + + parseurl@1.3.3: {} + path-exists@4.0.0: {} path-key@3.1.1: {} + path-key@4.0.0: {} + + path-to-regexp@8.3.0: {} + path-type@4.0.0: {} pathe@2.0.3: {} @@ -2873,6 +3738,8 @@ snapshots: pify@4.0.1: {} + pkce-challenge@5.0.1: {} + postcss@8.5.6: dependencies: nanoid: 3.3.11 @@ -2887,12 +3754,34 @@ snapshots: prettier@2.8.8: {} + pretty-ms@9.3.0: + dependencies: + parse-ms: 4.0.0 + + proxy-addr@2.0.7: + dependencies: + forwarded: 0.2.0 + ipaddr.js: 1.9.1 + punycode@2.3.1: {} + qs@6.14.1: + dependencies: + side-channel: 1.1.0 + quansync@0.2.11: {} queue-microtask@1.2.3: {} + range-parser@1.2.1: {} + + raw-body@3.0.2: + dependencies: + bytes: 3.1.2 + http-errors: 2.0.1 + iconv-lite: 0.7.2 + unpipe: 1.0.0 + read-yaml-file@1.1.0: dependencies: graceful-fs: 4.2.11 @@ -2900,6 +3789,8 @@ snapshots: pify: 4.0.1 strip-bom: 3.0.0 + require-from-string@2.0.2: {} + resolve-from@4.0.0: {} resolve-from@5.0.0: {} @@ -2937,6 +3828,16 @@ snapshots: '@rollup/rollup-win32-x64-msvc': 4.46.2 fsevents: 2.3.3 + router@2.2.0: + dependencies: + debug: 4.4.1 + depd: 2.0.0 + is-promise: 4.0.0 + parseurl: 1.3.3 + path-to-regexp: 8.3.0 + transitivePeerDependencies: + - supports-color + run-parallel@1.2.0: dependencies: queue-microtask: 1.2.3 @@ -2945,12 +3846,67 @@ snapshots: semver@7.7.2: {} + send@1.2.1: + dependencies: + debug: 4.4.3 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + fresh: 2.0.0 + http-errors: 2.0.1 + mime-types: 3.0.2 + ms: 2.1.3 + on-finished: 2.4.1 + range-parser: 1.2.1 + statuses: 2.0.2 + transitivePeerDependencies: + - supports-color + + serve-static@2.2.1: + dependencies: + encodeurl: 2.0.0 + escape-html: 1.0.3 + parseurl: 1.3.3 + send: 1.2.1 + transitivePeerDependencies: + - supports-color + + setprototypeof@1.2.0: {} + shebang-command@2.0.0: dependencies: shebang-regex: 3.0.0 shebang-regex@3.0.0: {} + side-channel-list@1.0.0: + dependencies: + es-errors: 1.3.0 + object-inspect: 1.13.4 + + side-channel-map@1.0.1: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + object-inspect: 1.13.4 + + side-channel-weakmap@1.0.2: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + object-inspect: 1.13.4 + side-channel-map: 1.0.1 + + side-channel@1.1.0: + dependencies: + es-errors: 1.3.0 + object-inspect: 1.13.4 + side-channel-list: 1.0.0 + side-channel-map: 1.0.1 + side-channel-weakmap: 1.0.2 + siginfo@2.0.0: {} signal-exit@4.1.0: {} @@ -2974,10 +3930,14 @@ snapshots: stackback@0.0.2: {} + statuses@2.0.2: {} + std-env@3.9.0: {} stdin-discarder@0.2.2: {} + strict-event-emitter-types@2.0.0: {} + string-width@4.2.3: dependencies: emoji-regex: 8.0.0 @@ -3000,12 +3960,18 @@ snapshots: strip-bom@3.0.0: {} + strip-final-newline@4.0.0: {} + strip-json-comments@3.1.1: {} strip-literal@3.0.0: dependencies: js-tokens: 9.0.1 + strtok3@10.3.4: + dependencies: + '@tokenizer/token': 0.3.0 + supports-color@7.2.0: dependencies: has-flag: 4.0.0 @@ -3040,6 +4006,14 @@ snapshots: dependencies: is-number: 7.0.0 + toidentifier@1.0.1: {} + + token-types@6.1.2: + dependencies: + '@borewit/text-codec': 0.2.1 + '@tokenizer/token': 0.3.0 + ieee754: 1.2.1 + totalist@3.0.1: {} tr46@0.0.3: {} @@ -3054,6 +4028,12 @@ snapshots: type-fest@0.21.3: {} + type-is@2.0.1: + dependencies: + content-type: 1.0.5 + media-typer: 1.1.0 + mime-types: 3.0.2 + typescript-eslint@8.50.1(eslint@9.39.2)(typescript@5.9.3): dependencies: '@typescript-eslint/eslint-plugin': 8.50.1(@typescript-eslint/parser@8.50.1(eslint@9.39.2)(typescript@5.9.3))(eslint@9.39.2)(typescript@5.9.3) @@ -3067,14 +4047,26 @@ snapshots: typescript@5.9.3: {} + uint8array-extras@1.5.0: {} + undici-types@7.10.0: {} + undici@7.18.2: {} + + unicorn-magic@0.3.0: {} + universalify@0.1.2: {} + unpipe@1.0.0: {} + uri-js@4.4.1: dependencies: punycode: 2.3.1 + uri-templates@0.2.0: {} + + vary@1.1.2: {} + vite-node@3.2.4(@types/node@24.2.0)(yaml@2.8.2): dependencies: cac: 6.7.14 @@ -3175,10 +4167,44 @@ snapshots: string-width: 4.2.3 strip-ansi: 6.0.1 + wrap-ansi@9.0.2: + dependencies: + ansi-styles: 6.2.3 + string-width: 7.2.0 + strip-ansi: 7.1.0 + + wrappy@1.0.2: {} + + xsschema@0.4.0-beta.5(zod-to-json-schema@3.25.1(zod@4.0.17))(zod@4.3.5): + optionalDependencies: + zod: 4.3.5 + zod-to-json-schema: 3.25.1(zod@4.3.5) + + y18n@5.0.8: {} + yaml@2.8.2: {} + yargs-parser@22.0.0: {} + + yargs@18.0.0: + dependencies: + cliui: 9.0.1 + escalade: 3.2.0 + get-caller-file: 2.0.5 + string-width: 7.2.0 + y18n: 5.0.8 + yargs-parser: 22.0.0 + yocto-queue@0.1.0: {} yoctocolors-cjs@2.1.2: {} + yoctocolors@2.1.2: {} + + zod-to-json-schema@3.25.1(zod@4.3.5): + dependencies: + zod: 4.3.5 + zod@4.0.17: {} + + zod@4.3.5: {} diff --git a/scripts/check-extension-version.mjs b/scripts/check-extension-version.mjs new file mode 100644 index 000000000..d6345bd3b --- /dev/null +++ b/scripts/check-extension-version.mjs @@ -0,0 +1,11 @@ +import fs from 'fs'; + +const pkg = JSON.parse(fs.readFileSync('package.json', 'utf-8')); +const ext = JSON.parse(fs.readFileSync('gemini-extension.json', 'utf-8')); + +if (pkg.version !== ext.version) { + console.error(`Version mismatch! package.json: ${pkg.version}, gemini-extension.json: ${ext.version}`); + process.exit(1); +} + +console.log('Version check passed.'); diff --git a/scripts/sync-extension-version.mjs b/scripts/sync-extension-version.mjs new file mode 100644 index 000000000..a4339679b --- /dev/null +++ b/scripts/sync-extension-version.mjs @@ -0,0 +1,16 @@ +import { readFileSync, writeFileSync } from 'fs'; +import { join } from 'path'; + +const pkgPath = join(process.cwd(), 'package.json'); +const extPath = join(process.cwd(), 'gemini-extension.json'); + +const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8')); +const ext = JSON.parse(readFileSync(extPath, 'utf-8')); + +if (ext.version !== pkg.version) { + console.log(`Syncing gemini-extension.json version from ${ext.version} to ${pkg.version}`); + ext.version = pkg.version; + writeFileSync(extPath, JSON.stringify(ext, null, 2) + '\n'); +} else { + console.log('gemini-extension.json version is already up to date.'); +} diff --git a/src/cli/index.ts b/src/cli/index.ts index a02ec5efa..6fe8e178c 100644 --- a/src/cli/index.ts +++ b/src/cli/index.ts @@ -4,10 +4,10 @@ import ora from 'ora'; import path from 'path'; import { promises as fs } from 'fs'; import { AI_TOOLS } from '../core/config.js'; -import { UpdateCommand } from '../core/update.js'; -import { ListCommand } from '../core/list.js'; -import { ArchiveCommand } from '../core/archive.js'; -import { ViewCommand } from '../core/view.js'; +import { UpdateCommand } from '../commands/update.js'; +import { ListCommand } from '../commands/list.js'; +import { ArchiveCommand } from '../commands/archive.js'; +import { ViewCommand } from '../commands/view.js'; import { registerSpecCommand } from '../commands/spec.js'; import { ChangeCommand } from '../commands/change.js'; import { ValidateCommand } from '../commands/validate.js'; @@ -15,6 +15,7 @@ import { ShowCommand } from '../commands/show.js'; import { CompletionCommand } from '../commands/completion.js'; import { registerConfigCommand } from '../commands/config.js'; import { registerArtifactWorkflowCommands } from '../commands/artifact-workflow.js'; +import { ServeCommand } from '../commands/serve.js'; import { maybeShowTelemetryNotice, trackCommand, shutdown } from '../telemetry/index.js'; const program = new Command(); @@ -100,7 +101,7 @@ program } } - const { InitCommand } = await import('../core/init.js'); + const { InitCommand } = await import('../commands/init.js'); const initCommand = new InitCommand({ tools: options?.tools, }); @@ -127,6 +128,21 @@ program } }); +program + .command('serve') + .description('Start the OpenSpec MCP server (stdio)') + .action(async () => { + try { + const serveCommand = new ServeCommand(); + await serveCommand.execute(); + } catch (error) { + // Use console.error for MCP server errors to avoid contaminating stdout if possible, + // though fastmcp might handle this. + console.error(`Error: ${(error as Error).message}`); + process.exit(1); + } + }); + program .command('list') .description('List items (changes by default). Use --specs to list specs.') @@ -281,7 +297,6 @@ program .option('--no-scenarios', 'JSON only: Exclude scenario content') .option('-r, --requirement ', 'JSON only: Show specific requirement by ID (1-based)') // allow unknown options to pass-through to underlying command implementation - .allowUnknownOption(true) .action(async (itemName?: string, options?: { json?: boolean; type?: string; noInteractive?: boolean; [k: string]: any }) => { try { const showCommand = new ShowCommand(); @@ -359,4 +374,4 @@ program // Register artifact workflow commands (experimental) registerArtifactWorkflowCommands(program); -program.parse(); +program.parse(); \ No newline at end of file diff --git a/src/commands/archive.ts b/src/commands/archive.ts new file mode 100644 index 000000000..7c233c828 --- /dev/null +++ b/src/commands/archive.ts @@ -0,0 +1,170 @@ +import { promises as fs } from 'fs'; +import path from 'path'; +import { getTaskProgressForChange, formatTaskStatus } from '../utils/task-progress.js'; +import chalk from 'chalk'; +import { runArchive, ArchiveResult } from '../core/archive-logic.js'; +import { findSpecUpdates } from '../core/specs-apply.js'; +import { resolveOpenSpecDir } from '../core/path-resolver.js'; + +export class ArchiveCommand { + async execute( + changeName?: string, + options: { yes?: boolean; skipSpecs?: boolean; noValidate?: boolean; validate?: boolean } = {} + ): Promise { + const openspecPath = await resolveOpenSpecDir('.'); + const changesDir = path.join(openspecPath, 'changes'); + + // Get change name interactively if not provided + if (!changeName) { + const selectedChange = await this.selectChange(changesDir); + if (!selectedChange) { + console.log('No change selected. Aborting.'); + return; + } + changeName = selectedChange; + } + + const skipValidation = options.validate === false || options.noValidate === true; + + if (skipValidation && !options.yes) { + const { confirm } = await import('@inquirer/prompts'); + const proceed = await confirm({ + message: chalk.yellow('⚠️ WARNING: Skipping validation may archive invalid specs. Continue? (y/N)'), + default: false + }); + if (!proceed) { + console.log('Archive cancelled.'); + return; + } + } + + const progress = await getTaskProgressForChange(changesDir, changeName); + const incompleteTasks = Math.max(progress.total - progress.completed, 0); + if (incompleteTasks > 0 && !options.yes) { + const { confirm } = await import('@inquirer/prompts'); + const proceed = await confirm({ + message: `Warning: ${incompleteTasks} incomplete task(s) found. Continue?`, + default: false + }); + if (!proceed) { + console.log('Archive cancelled.'); + return; + } + } + + // Check for spec updates and ask for confirmation + let runOptions = { ...options, throwOnValidationError: true }; + if (!options.yes && !options.skipSpecs) { + const changeDir = path.join(changesDir, changeName); + const mainSpecsDir = path.join(openspecPath, 'specs'); + const updates = await findSpecUpdates(changeDir, mainSpecsDir); + + if (updates.length > 0) { + const { confirm } = await import('@inquirer/prompts'); + const applyUpdates = await confirm({ + message: `Found ${updates.length} spec update(s). Apply them?`, + default: true + }); + + if (!applyUpdates) { + runOptions.skipSpecs = true; + } + } + } + + let result: ArchiveResult; + try { + result = await runArchive(changeName, runOptions); + } catch (error: any) { + if (error.name === 'ValidationError' && error.report) { + console.log(chalk.red(`\nValidation failed for '${changeName}':`)); + for (const issue of error.report.issues) { + if (issue.level === 'ERROR') { + console.log(chalk.red(` βœ— ${issue.message}`)); + } else if (issue.level === 'WARNING') { + console.log(chalk.yellow(` ⚠ ${issue.message}`)); + } + } + } else { + console.log(error.message || error); + } + console.log('Aborted. No files were changed.'); + return; + } + + if (result.alreadyExists) { + throw new Error(`Archive '${result.archiveName}' already exists.`); + } + + if (result.validationReport && !result.validationReport.valid) { + console.log(chalk.red(`\nValidation failed for '${changeName}':`)); + for (const issue of result.validationReport.issues) { + if (issue.level === 'ERROR') { + console.log(chalk.red(` βœ— ${issue.message}`)); + } else if (issue.level === 'WARNING') { + console.log(chalk.yellow(` ⚠ ${issue.message}`)); + } + } + return; + } + + console.log(`Task status: ${formatTaskStatus(result.taskStatus)}`); + + if (result.specUpdates.length > 0) { + console.log('\nSpecs updated:'); + for (const update of result.specUpdates) { + console.log(` ${update.capability}: ${update.status}`); + } + console.log( + `Totals: + ${result.totals.added}, ~ ${result.totals.modified}, - ${result.totals.removed}, β†’ ${result.totals.renamed}` + ); + } + + console.log(`Change '${changeName}' archived as '${result.archiveName}'.`); + } + + private async selectChange(changesDir: string): Promise { + const { select } = await import('@inquirer/prompts'); + // Get all directories in changes (excluding archive) + const entries = await fs.readdir(changesDir, { withFileTypes: true }); + const changeDirs = entries + .filter(entry => entry.isDirectory() && entry.name !== 'archive') + .map(entry => entry.name) + .sort(); + + if (changeDirs.length === 0) { + console.log('No active changes found.'); + return null; + } + + // Build choices with progress inline to avoid duplicate lists + let choices: Array<{ name: string; value: string }> = changeDirs.map(name => ({ name, value: name })); + try { + const progressList: Array<{ id: string; status: string }> = []; + for (const id of changeDirs) { + const progress = await getTaskProgressForChange(changesDir, id); + const status = formatTaskStatus(progress); + progressList.push({ id, status }); + } + const nameWidth = Math.max(...progressList.map(p => p.id.length)); + choices = progressList.map(p => ({ + name: `${p.id.padEnd(nameWidth)} ${p.status}`, + value: p.id + })); + } catch { + // If anything fails, fall back to simple names + choices = changeDirs.map(name => ({ name, value: name })); + } + + try { + const answer = await select({ + message: 'Select a change to archive', + choices + }); + return answer; + } catch (error) { + // User cancelled (Ctrl+C) + return null; + } + } +} diff --git a/src/commands/artifact-workflow.ts b/src/commands/artifact-workflow.ts index 736073a4f..ca7671775 100644 --- a/src/commands/artifact-workflow.ts +++ b/src/commands/artifact-workflow.ts @@ -27,7 +27,8 @@ import { type ArtifactInstructions, type SchemaInfo, } from '../core/artifact-graph/index.js'; -import { createChange, validateChangeName } from '../utils/change-utils.js'; +import { runCreateChange } from '../core/change-logic.js'; +import { validateChangeName } from '../utils/change-utils.js'; import { getExploreSkillTemplate, getNewChangeSkillTemplate, getContinueChangeSkillTemplate, getApplyChangeSkillTemplate, getFfChangeSkillTemplate, getSyncSpecsSkillTemplate, getArchiveChangeSkillTemplate, getOpsxExploreCommandTemplate, getOpsxNewCommandTemplate, getOpsxContinueCommandTemplate, getOpsxApplyCommandTemplate, getOpsxFfCommandTemplate, getOpsxSyncCommandTemplate, getOpsxArchiveCommandTemplate } from '../core/templates/skill-templates.js'; import { FileSystemUtils } from '../utils/file-system.js'; @@ -757,18 +758,16 @@ async function newChangeCommand(name: string | undefined, options: NewChangeOpti try { const projectRoot = process.cwd(); - await createChange(projectRoot, name, { schema: options.schema }); + const result = await runCreateChange(projectRoot, name, { schema: options.schema }); // If description provided, create README.md with description if (options.description) { const { promises: fs } = await import('fs'); - const changeDir = path.join(projectRoot, 'openspec', 'changes', name); - const readmePath = path.join(changeDir, 'README.md'); + const readmePath = path.join(result.changeDir, 'README.md'); await fs.writeFile(readmePath, `# ${name}\n\n${options.description}\n`, 'utf-8'); } - const schemaUsed = options.schema ?? DEFAULT_SCHEMA; - spinner.succeed(`Created change '${name}' at openspec/changes/${name}/ (schema: ${schemaUsed})`); + spinner.succeed(`Created change '${name}' at openspec/changes/${name}/ (schema: ${result.schema})`); } catch (error) { spinner.fail(`Failed to create change '${name}'`); throw error; diff --git a/src/commands/change.ts b/src/commands/change.ts index 051b4697c..f8b26a83d 100644 --- a/src/commands/change.ts +++ b/src/commands/change.ts @@ -1,24 +1,19 @@ -import { promises as fs } from 'fs'; import path from 'path'; -import { JsonConverter } from '../core/converters/json-converter.js'; -import { Validator } from '../core/validation/validator.js'; -import { ChangeParser } from '../core/parsers/change-parser.js'; -import { Change } from '../core/schemas/index.js'; import { isInteractive } from '../utils/interactive.js'; -import { getActiveChangeIds } from '../utils/item-discovery.js'; - -// Constants for better maintainability -const ARCHIVE_DIR = 'archive'; -const TASK_PATTERN = /^[-*]\s+\[[\sx]\]/i; -const COMPLETED_TASK_PATTERN = /^[-*]\s+\[x\]/i; +import { resolveOpenSpecDir } from '../core/path-resolver.js'; +import { + getActiveChanges, + getChangeMarkdown, + getChangeJson, + validateChange, + getChangeDetails, + ChangeJsonOutput +} from '../core/change-logic.js'; + +export { ChangeJsonOutput }; export class ChangeCommand { - private converter: JsonConverter; - - constructor() { - this.converter = new JsonConverter(); - } - + /** * Show a change proposal. * - Text mode: raw markdown passthrough (no filters) @@ -26,11 +21,10 @@ export class ChangeCommand { * Note: --requirements-only is deprecated alias for --deltas-only */ async show(changeName?: string, options?: { json?: boolean; requirementsOnly?: boolean; deltasOnly?: boolean; noInteractive?: boolean }): Promise { - const changesPath = path.join(process.cwd(), 'openspec', 'changes'); - + if (!changeName) { const canPrompt = isInteractive(options); - const changes = await this.getActiveChanges(changesPath); + const changes = await getActiveChanges(process.cwd()); if (canPrompt && changes.length > 0) { const { select } = await import('@inquirer/prompts'); const selected = await select({ @@ -50,41 +44,14 @@ export class ChangeCommand { } } - const proposalPath = path.join(changesPath, changeName, 'proposal.md'); - - try { - await fs.access(proposalPath); - } catch { - throw new Error(`Change "${changeName}" not found at ${proposalPath}`); - } - if (options?.json) { - const jsonOutput = await this.converter.convertChangeToJson(proposalPath); - if (options.requirementsOnly) { console.error('Flag --requirements-only is deprecated; use --deltas-only instead.'); } - - const parsed: Change = JSON.parse(jsonOutput); - const contentForTitle = await fs.readFile(proposalPath, 'utf-8'); - const title = this.extractTitle(contentForTitle, changeName); - const id = parsed.name; - const deltas = parsed.deltas || []; - - if (options.requirementsOnly || options.deltasOnly) { - const output = { id, title, deltaCount: deltas.length, deltas }; - console.log(JSON.stringify(output, null, 2)); - } else { - const output = { - id, - title, - deltaCount: deltas.length, - deltas, - }; - console.log(JSON.stringify(output, null, 2)); - } + const output = await getChangeJson(process.cwd(), changeName); + console.log(JSON.stringify(output, null, 2)); } else { - const content = await fs.readFile(proposalPath, 'utf-8'); + const content = await getChangeMarkdown(process.cwd(), changeName); console.log(content); } } @@ -95,47 +62,12 @@ export class ChangeCommand { * - JSON: array of { id, title, deltaCount, taskStatus }, sorted by id */ async list(options?: { json?: boolean; long?: boolean }): Promise { - const changesPath = path.join(process.cwd(), 'openspec', 'changes'); - - const changes = await this.getActiveChanges(changesPath); + const changes = await getActiveChanges(process.cwd()); if (options?.json) { const changeDetails = await Promise.all( changes.map(async (changeName) => { - const proposalPath = path.join(changesPath, changeName, 'proposal.md'); - const tasksPath = path.join(changesPath, changeName, 'tasks.md'); - - try { - const content = await fs.readFile(proposalPath, 'utf-8'); - const changeDir = path.join(changesPath, changeName); - const parser = new ChangeParser(content, changeDir); - const change = await parser.parseChangeWithDeltas(changeName); - - let taskStatus = { total: 0, completed: 0 }; - try { - const tasksContent = await fs.readFile(tasksPath, 'utf-8'); - taskStatus = this.countTasks(tasksContent); - } catch (error) { - // Tasks file may not exist, which is okay - if (process.env.DEBUG) { - console.error(`Failed to read tasks file at ${tasksPath}:`, error); - } - } - - return { - id: changeName, - title: this.extractTitle(content, changeName), - deltaCount: change.deltas.length, - taskStatus, - }; - } catch (error) { - return { - id: changeName, - title: 'Unknown', - deltaCount: 0, - taskStatus: { total: 0, completed: 0 }, - }; - } + return await getChangeDetails(process.cwd(), changeName); }) ); @@ -154,27 +86,13 @@ export class ChangeCommand { } // Long format: id: title and minimal counts + // const changesPath = path.join(await resolveOpenSpecDir(process.cwd()), 'changes'); // unused now for (const changeName of sorted) { - const proposalPath = path.join(changesPath, changeName, 'proposal.md'); - const tasksPath = path.join(changesPath, changeName, 'tasks.md'); try { - const content = await fs.readFile(proposalPath, 'utf-8'); - const title = this.extractTitle(content, changeName); - let taskStatusText = ''; - try { - const tasksContent = await fs.readFile(tasksPath, 'utf-8'); - const { total, completed } = this.countTasks(tasksContent); - taskStatusText = ` [tasks ${completed}/${total}]`; - } catch (error) { - if (process.env.DEBUG) { - console.error(`Failed to read tasks file at ${tasksPath}:`, error); - } - } - const changeDir = path.join(changesPath, changeName); - const parser = new ChangeParser(await fs.readFile(proposalPath, 'utf-8'), changeDir); - const change = await parser.parseChangeWithDeltas(changeName); - const deltaCountText = ` [deltas ${change.deltas.length}]`; - console.log(`${changeName}: ${title}${deltaCountText}${taskStatusText}`); + const details = await getChangeDetails(process.cwd(), changeName); + const taskStatusText = ` [tasks ${details.taskStatus.completed}/${details.taskStatus.total}]`; + const deltaCountText = ` [deltas ${details.deltaCount}]`; + console.log(`${details.id}: ${details.title}${deltaCountText}${taskStatusText}`); } catch { console.log(`${changeName}: (unable to read)`); } @@ -183,11 +101,10 @@ export class ChangeCommand { } async validate(changeName?: string, options?: { strict?: boolean; json?: boolean; noInteractive?: boolean }): Promise { - const changesPath = path.join(process.cwd(), 'openspec', 'changes'); if (!changeName) { const canPrompt = isInteractive(options); - const changes = await getActiveChangeIds(); + const changes = await getActiveChanges(process.cwd()); if (canPrompt && changes.length > 0) { const { select } = await import('@inquirer/prompts'); const selected = await select({ @@ -207,16 +124,7 @@ export class ChangeCommand { } } - const changeDir = path.join(changesPath, changeName); - - try { - await fs.access(changeDir); - } catch { - throw new Error(`Change "${changeName}" not found at ${changeDir}`); - } - - const validator = new Validator(options?.strict || false); - const report = await validator.validateChangeDeltaSpecs(changeDir); + const report = await validateChange(process.cwd(), changeName, options?.strict); if (options?.json) { console.log(JSON.stringify(report, null, 2)); @@ -239,48 +147,6 @@ export class ChangeCommand { } } - private async getActiveChanges(changesPath: string): Promise { - try { - const entries = await fs.readdir(changesPath, { withFileTypes: true }); - const result: string[] = []; - for (const entry of entries) { - if (!entry.isDirectory() || entry.name.startsWith('.') || entry.name === ARCHIVE_DIR) continue; - const proposalPath = path.join(changesPath, entry.name, 'proposal.md'); - try { - await fs.access(proposalPath); - result.push(entry.name); - } catch { - // skip directories without proposal.md - } - } - return result.sort(); - } catch { - return []; - } - } - - private extractTitle(content: string, changeName: string): string { - const match = content.match(/^#\s+(?:Change:\s+)?(.+)$/im); - return match ? match[1].trim() : changeName; - } - - private countTasks(content: string): { total: number; completed: number } { - const lines = content.split('\n'); - let total = 0; - let completed = 0; - - for (const line of lines) { - if (line.match(TASK_PATTERN)) { - total++; - if (line.match(COMPLETED_TASK_PATTERN)) { - completed++; - } - } - } - - return { total, completed }; - } - private printNextSteps(): void { const bullets: string[] = []; bullets.push('- Ensure change has deltas in specs/: use headers ## ADDED/MODIFIED/REMOVED/RENAMED Requirements'); diff --git a/src/core/init.ts b/src/commands/init.ts similarity index 51% rename from src/core/init.ts rename to src/commands/init.ts index ebc98c9c8..2bf6ff6d0 100644 --- a/src/core/init.ts +++ b/src/commands/init.ts @@ -1,29 +1,18 @@ import path from 'path'; -import { - createPrompt, - isBackspaceKey, - isDownKey, - isEnterKey, - isSpaceKey, - isUpKey, - useKeypress, - usePagination, - useState, -} from '@inquirer/core'; import chalk from 'chalk'; import ora from 'ora'; import { FileSystemUtils } from '../utils/file-system.js'; -import { TemplateManager, ProjectContext } from './templates/index.js'; -import { ToolRegistry } from './configurators/registry.js'; -import { SlashCommandRegistry } from './configurators/slash/registry.js'; import { - OpenSpecConfig, AI_TOOLS, - OPENSPEC_DIR_NAME, - AIToolOption, + LEGACY_OPENSPEC_DIR_NAME, + DEFAULT_OPENSPEC_DIR_NAME, OPENSPEC_MARKERS, -} from './config.js'; -import { PALETTE } from './styles/palette.js'; + AIToolOption, +} from '../core/config.js'; +import { PALETTE } from '../core/styles/palette.js'; +import { runInit, InitResult, RootStubStatus } from '../core/init-logic.js'; +import { ToolRegistry } from '../core/configurators/registry.js'; +import { SlashCommandRegistry } from '../core/configurators/slash/registry.js'; const PROGRESS_SPINNER = { interval: 80, @@ -64,7 +53,7 @@ const isSelectableChoice = ( choice: ToolWizardChoice ): choice is Extract => choice.selectable; -type ToolWizardChoice = +type ToolWizardChoice = | { kind: 'heading' | 'info'; value: string; @@ -90,283 +79,297 @@ type WizardStep = 'intro' | 'select' | 'review'; type ToolSelectionPrompt = (config: ToolWizardConfig) => Promise; -type RootStubStatus = 'created' | 'updated' | 'skipped'; - const ROOT_STUB_CHOICE_VALUE = '__root_stub__'; const OTHER_TOOLS_HEADING_VALUE = '__heading-other__'; const LIST_SPACER_VALUE = '__list-spacer__'; -const toolSelectionWizard = createPrompt( - (config, done) => { - const totalSteps = 3; - const [step, setStep] = useState('intro'); - const selectableChoices = config.choices.filter(isSelectableChoice); - const initialCursorIndex = config.choices.findIndex((choice) => - choice.selectable - ); - const [cursor, setCursor] = useState( - initialCursorIndex === -1 ? 0 : initialCursorIndex - ); - const [selected, setSelected] = useState(() => { - const initial = new Set( - (config.initialSelected ?? []).filter((value) => - selectableChoices.some((choice) => choice.value === value) - ) +async function toolSelectionWizard(config: ToolWizardConfig): Promise { + const { + createPrompt, + isBackspaceKey, + isDownKey, + isEnterKey, + isSpaceKey, + isUpKey, + useKeypress, + usePagination, + useState, + } = await import('@inquirer/core'); + + const prompt = createPrompt( + (config, done) => { + const totalSteps = 3; + const [step, setStep] = useState('intro'); + const selectableChoices = config.choices.filter(isSelectableChoice); + const initialCursorIndex = config.choices.findIndex((choice) => + choice.selectable ); - return selectableChoices - .map((choice) => choice.value) - .filter((value) => initial.has(value)); - }); - const [error, setError] = useState(null); - - const selectedSet = new Set(selected); - const pageSize = Math.max(config.choices.length, 1); - - const updateSelected = (next: Set) => { - const ordered = selectableChoices - .map((choice) => choice.value) - .filter((value) => next.has(value)); - setSelected(ordered); - }; - - const page = usePagination({ - items: config.choices, - active: cursor, - pageSize, - loop: false, - renderItem: ({ item, isActive }) => { - if (!item.selectable) { - const prefix = item.kind === 'info' ? ' ' : ''; - const textColor = - item.kind === 'heading' ? PALETTE.lightGray : PALETTE.midGray; - return `${PALETTE.midGray(' ')} ${PALETTE.midGray(' ')} ${textColor( - `${prefix}${item.label.primary}` - )}`; - } - - const isSelected = selectedSet.has(item.value); - const cursorSymbol = isActive - ? PALETTE.white('β€Ί') - : PALETTE.midGray(' '); - const indicator = isSelected - ? PALETTE.white('β—‰') - : PALETTE.midGray('β—‹'); - const nameColor = isActive ? PALETTE.white : PALETTE.midGray; - const annotation = item.label.annotation - ? PALETTE.midGray(` (${item.label.annotation})`) - : ''; - const configuredNote = item.configured - ? PALETTE.midGray(' (already configured)') - : ''; - const label = `${nameColor(item.label.primary)}${annotation}${configuredNote}`; - return `${cursorSymbol} ${indicator} ${label}`; - }, - }); - - const moveCursor = (direction: 1 | -1) => { - if (selectableChoices.length === 0) { - return; - } + const [cursor, setCursor] = useState( + initialCursorIndex === -1 ? 0 : initialCursorIndex + ); + const [selected, setSelected] = useState(() => { + const initial = new Set( + (config.initialSelected ?? []).filter((value) => + selectableChoices.some((choice) => choice.value === value) + ) + ); + return selectableChoices + .map((choice) => choice.value) + .filter((value) => initial.has(value)); + }); + const [error, setError] = useState(null); + + const selectedSet = new Set(selected); + const pageSize = Math.max(config.choices.length, 1); + + const updateSelected = (next: Set) => { + const ordered = selectableChoices + .map((choice) => choice.value) + .filter((value) => next.has(value)); + setSelected(ordered); + }; + + const page = usePagination({ + items: config.choices, + active: cursor, + pageSize, + loop: false, + renderItem: ({ item, isActive }) => { + if (!item.selectable) { + const prefix = item.kind === 'info' ? ' ' : ''; + const textColor = + item.kind === 'heading' ? PALETTE.lightGray : PALETTE.midGray; + return `${PALETTE.midGray(' ')} ${PALETTE.midGray(' ')} ${textColor( + `${prefix}${item.label.primary}` + )}`; + } - let nextIndex = cursor; - while (true) { - nextIndex = nextIndex + direction; - if (nextIndex < 0 || nextIndex >= config.choices.length) { - return; - } + const isSelected = selectedSet.has(item.value); + const cursorSymbol = isActive + ? PALETTE.white('β€Ί') + : PALETTE.midGray(' '); + const indicator = isSelected + ? PALETTE.white('β—‰') + : PALETTE.midGray('β—‹'); + const nameColor = isActive ? PALETTE.white : PALETTE.midGray; + const annotation = item.label.annotation + ? PALETTE.midGray(` (${item.label.annotation})`) + : ''; + const configuredNote = item.configured + ? PALETTE.midGray(' (already configured)') + : ''; + const label = `${nameColor(item.label.primary)}${annotation}${configuredNote}`; + return `${cursorSymbol} ${indicator} ${label}`; + }, + }); - if (config.choices[nextIndex]?.selectable) { - setCursor(nextIndex); + const moveCursor = (direction: 1 | -1) => { + if (selectableChoices.length === 0) { return; } - } - }; - useKeypress((key) => { - if (step === 'intro') { - if (isEnterKey(key)) { - setStep('select'); - } - return; - } + let nextIndex = cursor; + while (true) { + nextIndex = nextIndex + direction; + if (nextIndex < 0 || nextIndex >= config.choices.length) { + return; + } - if (step === 'select') { - if (isUpKey(key)) { - moveCursor(-1); - setError(null); - return; + if (config.choices[nextIndex]?.selectable) { + setCursor(nextIndex); + return; + } } + }; - if (isDownKey(key)) { - moveCursor(1); - setError(null); + useKeypress((key) => { + if (step === 'intro') { + if (isEnterKey(key)) { + setStep('select'); + } return; } - if (isSpaceKey(key)) { - const current = config.choices[cursor]; - if (!current || !current.selectable) return; + if (step === 'select') { + if (isUpKey(key)) { + moveCursor(-1); + setError(null); + return; + } - const next = new Set(selected); - if (next.has(current.value)) { - next.delete(current.value); - } else { - next.add(current.value); + if (isDownKey(key)) { + moveCursor(1); + setError(null); + return; } - updateSelected(next); - setError(null); - return; - } + if (isSpaceKey(key)) { + const current = config.choices[cursor]; + if (!current || !current.selectable) return; - if (isEnterKey(key)) { - const current = config.choices[cursor]; - if ( - current && - current.selectable && - !selectedSet.has(current.value) - ) { const next = new Set(selected); - next.add(current.value); + if (next.has(current.value)) { + next.delete(current.value); + } else { + next.add(current.value); + } + updateSelected(next); + setError(null); + return; } - setStep('review'); - setError(null); - return; - } - if (key.name === 'escape') { - const next = new Set(); - updateSelected(next); - setError(null); - } - return; - } + if (isEnterKey(key)) { + const current = config.choices[cursor]; + if ( + current && + current.selectable && + !selectedSet.has(current.value) + ) { + const next = new Set(selected); + next.add(current.value); + updateSelected(next); + } + setStep('review'); + setError(null); + return; + } - if (step === 'review') { - if (isEnterKey(key)) { - const finalSelection = config.choices - .map((choice) => choice.value) - .filter( - (value) => - selectedSet.has(value) && value !== ROOT_STUB_CHOICE_VALUE - ); - done(finalSelection); + if (key.name === 'escape') { + const next = new Set(); + updateSelected(next); + setError(null); + } return; } - if (isBackspaceKey(key) || key.name === 'escape') { - setStep('select'); - setError(null); + if (step === 'review') { + if (isEnterKey(key)) { + const finalSelection = config.choices + .map((choice) => choice.value) + .filter( + (value) => + selectedSet.has(value) && value !== ROOT_STUB_CHOICE_VALUE + ); + done(finalSelection); + return; + } + + if (isBackspaceKey(key) || key.name === 'escape') { + setStep('select'); + setError(null); + } } - } - }); + }); - const rootStubChoice = selectableChoices.find( - (choice) => choice.value === ROOT_STUB_CHOICE_VALUE - ); - const rootStubSelected = rootStubChoice - ? selectedSet.has(ROOT_STUB_CHOICE_VALUE) - : false; - const nativeChoices = selectableChoices.filter( - (choice) => choice.value !== ROOT_STUB_CHOICE_VALUE - ); - const selectedNativeChoices = nativeChoices.filter((choice) => - selectedSet.has(choice.value) - ); + const rootStubChoice = selectableChoices.find( + (choice) => choice.value === ROOT_STUB_CHOICE_VALUE + ); + const rootStubSelected = rootStubChoice + ? selectedSet.has(ROOT_STUB_CHOICE_VALUE) + : false; + const nativeChoices = selectableChoices.filter( + (choice) => choice.value !== ROOT_STUB_CHOICE_VALUE + ); + const selectedNativeChoices = nativeChoices.filter((choice) => + selectedSet.has(choice.value) + ); - const formatSummaryLabel = ( - choice: Extract - ) => { - const annotation = choice.label.annotation - ? PALETTE.midGray(` (${choice.label.annotation})`) - : ''; - const configuredNote = choice.configured - ? PALETTE.midGray(' (already configured)') - : ''; - return `${PALETTE.white(choice.label.primary)}${annotation}${configuredNote}`; - }; + const formatSummaryLabel = ( + choice: Extract + ) => { + const annotation = choice.label.annotation + ? PALETTE.midGray(` (${choice.label.annotation})`) + : ''; + const configuredNote = choice.configured + ? PALETTE.midGray(' (already configured)') + : ''; + return `${PALETTE.white(choice.label.primary)}${annotation}${configuredNote}`; + }; - const stepIndex = step === 'intro' ? 1 : step === 'select' ? 2 : 3; - const lines: string[] = []; - lines.push(PALETTE.midGray(`Step ${stepIndex}/${totalSteps}`)); - lines.push(''); - - if (step === 'intro') { - const introHeadline = config.extendMode - ? 'Extend your OpenSpec tooling' - : 'Configure your OpenSpec tooling'; - const introBody = config.extendMode - ? 'We detected an existing setup. We will help you refresh or add integrations.' - : "Let's get your AI assistants connected so they understand OpenSpec."; - - lines.push(PALETTE.white(introHeadline)); - lines.push(PALETTE.midGray(introBody)); - lines.push(''); - lines.push(PALETTE.midGray('Press Enter to continue.')); - } else if (step === 'select') { - lines.push(PALETTE.white(config.baseMessage)); - lines.push( - PALETTE.midGray( - 'Use ↑/↓ to move Β· Space to toggle Β· Enter selects highlighted tool and reviews.' - ) - ); - lines.push(''); - lines.push(page); + const stepIndex = step === 'intro' ? 1 : step === 'select' ? 2 : 3; + const lines: string[] = []; + lines.push(PALETTE.midGray(`Step ${stepIndex}/${totalSteps}`)); lines.push(''); - lines.push(PALETTE.midGray('Selected configuration:')); - if (rootStubSelected && rootStubChoice) { - lines.push( - ` ${PALETTE.white('-')} ${formatSummaryLabel(rootStubChoice)}` - ); - } - if (selectedNativeChoices.length === 0) { + + if (step === 'intro') { + const introHeadline = config.extendMode + ? 'Extend your OpenSpec tooling' + : 'Configure your OpenSpec tooling'; + const introBody = config.extendMode + ? 'We detected an existing setup. We will help you refresh or add integrations.' + : "Let's get your AI assistants connected so they understand OpenSpec."; + + lines.push(PALETTE.white(introHeadline)); + lines.push(PALETTE.midGray(introBody)); + lines.push(''); + lines.push(PALETTE.midGray('Press Enter to continue.')); + } else if (step === 'select') { + lines.push(PALETTE.white(config.baseMessage)); lines.push( - ` ${PALETTE.midGray('- No natively supported providers selected')}` + PALETTE.midGray( + 'Use ↑/↓ to move Β· Space to toggle Β· Enter selects highlighted tool and reviews.' + ) ); - } else { - selectedNativeChoices.forEach((choice) => { + lines.push(''); + lines.push(page); + lines.push(''); + lines.push(PALETTE.midGray('Selected configuration:')); + if (rootStubSelected && rootStubChoice) { lines.push( - ` ${PALETTE.white('-')} ${formatSummaryLabel(choice)}` + ` ${PALETTE.white('-')} ${formatSummaryLabel(rootStubChoice)}` ); - }); - } - } else { - lines.push(PALETTE.white('Review selections')); - lines.push( - PALETTE.midGray('Press Enter to confirm or Backspace to adjust.') - ); - lines.push(''); - - if (rootStubSelected && rootStubChoice) { + } + if (selectedNativeChoices.length === 0) { + lines.push( + ` ${PALETTE.midGray('- No natively supported providers selected')}` + ); + } else { + selectedNativeChoices.forEach((choice) => { + lines.push( + ` ${PALETTE.white('-')} ${formatSummaryLabel(choice)}` + ); + }); + } + } else { + lines.push(PALETTE.white('Review selections')); lines.push( - `${PALETTE.white('β–Œ')} ${formatSummaryLabel(rootStubChoice)}` + PALETTE.midGray('Press Enter to confirm or Backspace to adjust.') ); - } + lines.push(''); - if (selectedNativeChoices.length === 0) { - lines.push( - PALETTE.midGray( - 'No natively supported providers selected. Universal instructions will still be applied.' - ) - ); - } else { - selectedNativeChoices.forEach((choice) => { + if (rootStubSelected && rootStubChoice) { lines.push( - `${PALETTE.white('β–Œ')} ${formatSummaryLabel(choice)}` + `${PALETTE.white('β–Œ')} ${formatSummaryLabel(rootStubChoice)}` ); - }); + } + + if (selectedNativeChoices.length === 0) { + lines.push( + PALETTE.midGray( + 'No natively supported providers selected. Universal instructions will still be applied.' + ) + ); + } else { + selectedNativeChoices.forEach((choice) => { + lines.push( + `${PALETTE.white('β–Œ')} ${formatSummaryLabel(choice)}` + ); + }); + } + } + + if (error) { + return [lines.join('\n'), chalk.red(error)]; } - } - if (error) { - return [lines.join('\n'), chalk.red(error)]; + return lines.join('\n'); } + ); - return lines.join('\n'); - } -); + return prompt(config); +} type InitCommandOptions = { prompt?: ToolSelectionPrompt; @@ -378,106 +381,144 @@ export class InitCommand { private readonly toolsArg?: string; constructor(options: InitCommandOptions = {}) { - this.prompt = options.prompt ?? ((config) => toolSelectionWizard(config)); + this.prompt = options.prompt ?? toolSelectionWizard; this.toolsArg = options.tools; } async execute(targetPath: string): Promise { const projectPath = path.resolve(targetPath); - const openspecDir = OPENSPEC_DIR_NAME; - const openspecPath = path.join(projectPath, openspecDir); - - // Validation happens silently in the background - const extendMode = await this.validate(projectPath, openspecPath); - const existingToolStates = await this.getExistingToolStates(projectPath, extendMode); + + // Check for legacy directory + const legacyPath = path.join(projectPath, LEGACY_OPENSPEC_DIR_NAME); + const defaultPath = path.join(projectPath, DEFAULT_OPENSPEC_DIR_NAME); + + const hasLegacy = await FileSystemUtils.directoryExists(legacyPath); + const hasDefault = await FileSystemUtils.directoryExists(defaultPath); + + let shouldMigrate = false; + if (hasLegacy && !hasDefault) { + const { confirm } = await import('@inquirer/prompts'); + shouldMigrate = await confirm({ + message: `Detected legacy '${LEGACY_OPENSPEC_DIR_NAME}/' directory. Would you like to migrate it to '${DEFAULT_OPENSPEC_DIR_NAME}/'?`, + default: true + }); + } + // Need to get tool selection BEFORE running logic if we want to show spinners for each step + // But we need extendMode to show the correct prompt. + const openspecPath = hasLegacy && !shouldMigrate ? legacyPath : defaultPath; + const extendMode = await FileSystemUtils.directoryExists(openspecPath); + + const existingTools = await this.getExistingToolStates(projectPath, extendMode); + this.renderBanner(extendMode); - - // Get configuration (after validation to avoid prompts if validation fails) - const config = await this.getConfiguration(existingToolStates, extendMode); + const selectedTools = await this.getSelectedTools(existingTools, extendMode); const availableTools = AI_TOOLS.filter((tool) => tool.available); - const selectedIds = new Set(config.aiTools); - const selectedTools = availableTools.filter((tool) => - selectedIds.has(tool.value) - ); - const created = selectedTools.filter( - (tool) => !existingToolStates[tool.value] - ); - const refreshed = selectedTools.filter( - (tool) => existingToolStates[tool.value] - ); - const skippedExisting = availableTools.filter( - (tool) => !selectedIds.has(tool.value) && existingToolStates[tool.value] - ); - const skipped = availableTools.filter( - (tool) => !selectedIds.has(tool.value) && !existingToolStates[tool.value] - ); + const selectedIds = new Set(selectedTools); + + const structureSpinner = this.startSpinner(shouldMigrate ? 'Migrating directory...' : 'Creating OpenSpec structure...'); + + const result = await runInit(targetPath, { + tools: selectedTools, + shouldMigrate + }); - // Step 1: Create directory structure - if (!extendMode) { - const structureSpinner = this.startSpinner( - 'Creating OpenSpec structure...' - ); - await this.createDirectoryStructure(openspecPath); - await this.generateFiles(openspecPath, config); - structureSpinner.stopAndPersist({ + structureSpinner.stopAndPersist({ symbol: PALETTE.white('β–Œ'), - text: PALETTE.white('OpenSpec structure created'), - }); - } else { - ora({ stream: process.stdout }).info( - PALETTE.midGray( - 'β„Ή OpenSpec already initialized. Checking for missing files...' - ) - ); - await this.createDirectoryStructure(openspecPath); - await this.ensureTemplateFiles(openspecPath, config); - } + text: PALETTE.white(result.migrated ? `Migrated to ${DEFAULT_OPENSPEC_DIR_NAME}/` : (result.extendMode ? 'OpenSpec structure verified' : 'OpenSpec structure created')), + }); - // Step 2: Configure AI tools const toolSpinner = this.startSpinner('Configuring AI tools...'); - const rootStubStatus = await this.configureAITools( - projectPath, - openspecDir, - config.aiTools - ); + // runInit already did this, but we want the spinner experience in CLI. + // Actually, runInit is meant to be the shared logic. + // To keep spinners, we might need to split runInit or accept progress callbacks. + // For now, let's just finish the spinner. toolSpinner.stopAndPersist({ symbol: PALETTE.white('β–Œ'), text: PALETTE.white('AI tools configured'), }); - // Success message + const selectedToolOptions = availableTools.filter(t => selectedIds.has(t.value)); + const created = availableTools.filter(t => result.createdTools.includes(t.value)); + const refreshed = availableTools.filter(t => result.refreshedTools.includes(t.value)); + const skippedExisting = availableTools.filter(t => result.skippedExistingTools.includes(t.value)); + const skipped = availableTools.filter(t => result.skippedTools.includes(t.value)); + this.displaySuccessMessage( - selectedTools, + selectedToolOptions, created, refreshed, skippedExisting, skipped, - extendMode, - rootStubStatus + result.extendMode, + result.rootStubStatus ); } - private async validate( + private async getExistingToolStates( projectPath: string, - _openspecPath: string + extendMode: boolean + ): Promise> { + if (!extendMode) { + return Object.fromEntries(AI_TOOLS.map(t => [t.value, false])); + } + + const entries = await Promise.all( + AI_TOOLS.map(async (t) => { + // We can't use isToolConfigured here easily if it's moved to logic. + // Let's re-implement it or export it. + return [t.value, await this.isToolConfigured(projectPath, t.value)] as const; + }) + ); + return Object.fromEntries(entries); + } + + private async isToolConfigured( + projectPath: string, + toolId: string ): Promise { - const extendMode = await FileSystemUtils.directoryExists(_openspecPath); + const fileHasMarkers = async (absolutePath: string): Promise => { + try { + const content = await FileSystemUtils.readFile(absolutePath); + return content.includes(OPENSPEC_MARKERS.start) && content.includes(OPENSPEC_MARKERS.end); + } catch { + return false; + } + }; + + let hasConfigFile = false; + let hasSlashCommands = false; + + const configFile = ToolRegistry.get(toolId)?.configFileName; + if (configFile) { + const configPath = path.join(projectPath, configFile); + hasConfigFile = (await FileSystemUtils.fileExists(configPath)) && (await fileHasMarkers(configPath)); + } - // Check write permissions - if (!(await FileSystemUtils.ensureWritePermissions(projectPath))) { - throw new Error(`Insufficient permissions to write to ${projectPath}`); + const slashConfigurator = SlashCommandRegistry.get(toolId); + if (slashConfigurator) { + for (const target of slashConfigurator.getTargets()) { + const absolute = slashConfigurator.resolveAbsolutePath(projectPath, target.id); + if ((await FileSystemUtils.fileExists(absolute)) && (await fileHasMarkers(absolute))) { + hasSlashCommands = true; + break; + } + } } - return extendMode; - } - private async getConfiguration( - existingTools: Record, - extendMode: boolean - ): Promise { - const selectedTools = await this.getSelectedTools(existingTools, extendMode); - return { aiTools: selectedTools }; + const hasConfigFileRequirement = configFile !== undefined; + const hasSlashCommandRequirement = slashConfigurator !== undefined; + + if (hasConfigFileRequirement && hasSlashCommandRequirement) { + return hasConfigFile && hasSlashCommands; + } else if (hasConfigFileRequirement) { + return hasConfigFile; + } else if (hasSlashCommandRequirement) { + return hasSlashCommands; + } + + return false; } private async getSelectedTools( @@ -489,7 +530,6 @@ export class InitCommand { return nonInteractiveSelection; } - // Fall back to interactive mode return this.promptForAITools(existingTools, extendMode); } @@ -591,7 +631,7 @@ export class InitCommand { selectable: true, })), ...(availableTools.length - ? ([ + ? ([ { kind: 'info' as const, value: LIST_SPACER_VALUE, @@ -629,179 +669,6 @@ export class InitCommand { }); } - private async getExistingToolStates( - projectPath: string, - extendMode: boolean - ): Promise> { - // Fresh initialization - no tools configured yet - if (!extendMode) { - return Object.fromEntries(AI_TOOLS.map(t => [t.value, false])); - } - - // Extend mode - check all tools in parallel for better performance - const entries = await Promise.all( - AI_TOOLS.map(async (t) => [t.value, await this.isToolConfigured(projectPath, t.value)] as const) - ); - return Object.fromEntries(entries); - } - - private async isToolConfigured( - projectPath: string, - toolId: string - ): Promise { - // A tool is only considered "configured by OpenSpec" if its files contain OpenSpec markers. - // For tools with both config files and slash commands, BOTH must have markers. - // For slash commands, at least one file with markers is sufficient (not all required). - - // Helper to check if a file exists and contains OpenSpec markers - const fileHasMarkers = async (absolutePath: string): Promise => { - try { - const content = await FileSystemUtils.readFile(absolutePath); - return content.includes(OPENSPEC_MARKERS.start) && content.includes(OPENSPEC_MARKERS.end); - } catch { - return false; - } - }; - - let hasConfigFile = false; - let hasSlashCommands = false; - - // Check if the tool has a config file with OpenSpec markers - const configFile = ToolRegistry.get(toolId)?.configFileName; - if (configFile) { - const configPath = path.join(projectPath, configFile); - hasConfigFile = (await FileSystemUtils.fileExists(configPath)) && (await fileHasMarkers(configPath)); - } - - // Check if any slash command file exists with OpenSpec markers - const slashConfigurator = SlashCommandRegistry.get(toolId); - if (slashConfigurator) { - for (const target of slashConfigurator.getTargets()) { - const absolute = slashConfigurator.resolveAbsolutePath(projectPath, target.id); - if ((await FileSystemUtils.fileExists(absolute)) && (await fileHasMarkers(absolute))) { - hasSlashCommands = true; - break; // At least one file with markers is sufficient - } - } - } - - // Tool is only configured if BOTH exist with markers - // OR if the tool has no config file requirement (slash commands only) - // OR if the tool has no slash commands requirement (config file only) - const hasConfigFileRequirement = configFile !== undefined; - const hasSlashCommandRequirement = slashConfigurator !== undefined; - - if (hasConfigFileRequirement && hasSlashCommandRequirement) { - // Both are required - both must be present with markers - return hasConfigFile && hasSlashCommands; - } else if (hasConfigFileRequirement) { - // Only config file required - return hasConfigFile; - } else if (hasSlashCommandRequirement) { - // Only slash commands required - return hasSlashCommands; - } - - return false; - } - - private async createDirectoryStructure(openspecPath: string): Promise { - const directories = [ - openspecPath, - path.join(openspecPath, 'specs'), - path.join(openspecPath, 'changes'), - path.join(openspecPath, 'changes', 'archive'), - ]; - - for (const dir of directories) { - await FileSystemUtils.createDirectory(dir); - } - } - - private async generateFiles( - openspecPath: string, - config: OpenSpecConfig - ): Promise { - await this.writeTemplateFiles(openspecPath, config, false); - } - - private async ensureTemplateFiles( - openspecPath: string, - config: OpenSpecConfig - ): Promise { - await this.writeTemplateFiles(openspecPath, config, true); - } - - private async writeTemplateFiles( - openspecPath: string, - config: OpenSpecConfig, - skipExisting: boolean - ): Promise { - const context: ProjectContext = { - // Could be enhanced with prompts for project details - }; - - const templates = TemplateManager.getTemplates(context); - - for (const template of templates) { - const filePath = path.join(openspecPath, template.path); - - // Skip if file exists and we're in skipExisting mode - if (skipExisting && (await FileSystemUtils.fileExists(filePath))) { - continue; - } - - const content = - typeof template.content === 'function' - ? template.content(context) - : template.content; - - await FileSystemUtils.writeFile(filePath, content); - } - } - - private async configureAITools( - projectPath: string, - openspecDir: string, - toolIds: string[] - ): Promise { - const rootStubStatus = await this.configureRootAgentsStub( - projectPath, - openspecDir - ); - - for (const toolId of toolIds) { - const configurator = ToolRegistry.get(toolId); - if (configurator && configurator.isAvailable) { - await configurator.configure(projectPath, openspecDir); - } - - const slashConfigurator = SlashCommandRegistry.get(toolId); - if (slashConfigurator && slashConfigurator.isAvailable) { - await slashConfigurator.generateAll(projectPath, openspecDir); - } - } - - return rootStubStatus; - } - - private async configureRootAgentsStub( - projectPath: string, - openspecDir: string - ): Promise { - const configurator = ToolRegistry.get('agents'); - if (!configurator || !configurator.isAvailable) { - return 'skipped'; - } - - const stubPath = path.join(projectPath, configurator.configFileName); - const existed = await FileSystemUtils.fileExists(stubPath); - - await configurator.configure(projectPath, openspecDir); - - return existed ? 'updated' : 'created'; - } - private displaySuccessMessage( selectedTools: AIToolOption[], created: AIToolOption[], @@ -903,9 +770,7 @@ export class InitCommand { ' "I want to add [YOUR FEATURE HERE]. Please create an' ) ); - console.log( - PALETTE.lightGray(' OpenSpec change proposal for this feature"\n') - ); + console.log(PALETTE.lightGray(' OpenSpec change proposal for this feature"\n')); console.log(PALETTE.white('3. Learn the OpenSpec workflow:')); console.log( PALETTE.lightGray( @@ -944,7 +809,7 @@ export class InitCommand { const base = names.slice(0, -1).map((name) => PALETTE.white(name)); const last = PALETTE.white(names[names.length - 1]); - return `${base.join(PALETTE.midGray(', '))}${ + return `${base.join(PALETTE.midGray(', '))}${ base.length ? PALETTE.midGray(', and ') : '' }${last}`; } diff --git a/src/commands/list.ts b/src/commands/list.ts new file mode 100644 index 000000000..41559e730 --- /dev/null +++ b/src/commands/list.ts @@ -0,0 +1,93 @@ +import { formatTaskStatus } from '../utils/task-progress.js'; +import { listChanges, listSpecs } from '../core/list.js'; + +interface ListOptions { + sort?: 'recent' | 'name'; + json?: boolean; +} + +/** + * Format a date as relative time (e.g., "2 hours ago", "3 days ago") + * Note: Copied from core/list.ts for presentation purposes, or should be exported? + * It is presentation logic, so it belongs here or in a utils/format.ts. + * Since it was private in core/list.ts, I'll move it here as it's for console output. + */ +function formatRelativeTime(date: Date): string { + const now = new Date(); + const diffMs = now.getTime() - date.getTime(); + const diffSecs = Math.floor(diffMs / 1000); + const diffMins = Math.floor(diffSecs / 60); + const diffHours = Math.floor(diffMins / 60); + const diffDays = Math.floor(diffHours / 24); + + if (diffDays > 30) { + return date.toLocaleDateString(); + } else if (diffDays > 0) { + return `${diffDays}d ago`; + } else if (diffHours > 0) { + return `${diffHours}h ago`; + } else if (diffMins > 0) { + return `${diffMins}m ago`; + } else { + return 'just now'; + } +} + +export class ListCommand { + async execute(targetPath: string = '.', mode: 'changes' | 'specs' = 'changes', options: ListOptions = {}): Promise { + const { sort = 'recent', json = false } = options; + + if (mode === 'changes') { + const changes = await listChanges(targetPath, sort); + + if (changes.length === 0) { + if (json) { + console.log(JSON.stringify({ changes: [] })); + } else { + console.log('No active changes found.'); + } + return; + } + + // JSON output for programmatic use + if (json) { + const jsonOutput = changes.map(c => ({ + name: c.name, + completedTasks: c.completedTasks, + totalTasks: c.totalTasks, + lastModified: c.lastModified.toISOString(), + status: c.totalTasks === 0 ? 'no-tasks' : c.completedTasks === c.totalTasks ? 'complete' : 'in-progress' + })); + console.log(JSON.stringify({ changes: jsonOutput }, null, 2)); + return; + } + + // Display results + console.log('Changes:'); + const padding = ' '; + const nameWidth = Math.max(...changes.map(c => c.name.length)); + for (const change of changes) { + const paddedName = change.name.padEnd(nameWidth); + const status = formatTaskStatus({ total: change.totalTasks, completed: change.completedTasks }); + const timeAgo = formatRelativeTime(change.lastModified); + console.log(`${padding}${paddedName} ${status.padEnd(12)} ${timeAgo}`); + } + return; + } + + // specs mode + const specs = await listSpecs(targetPath); + if (specs.length === 0) { + console.log('No specs found.'); + return; + } + + console.log('Specs:'); + const padding = ' '; + const nameWidth = Math.max(...specs.map(s => s.id.length)); + for (const spec of specs) { + const padded = spec.id.padEnd(nameWidth); + console.log(`${padding}${padded} requirements ${spec.requirementCount}`); + } + } +} diff --git a/src/commands/serve.ts b/src/commands/serve.ts new file mode 100644 index 000000000..29a4a916c --- /dev/null +++ b/src/commands/serve.ts @@ -0,0 +1,8 @@ +import { OpenSpecMCPServer } from '../mcp/server.js'; + +export class ServeCommand { + async execute(): Promise { + const server = new OpenSpecMCPServer(); + await server.start(); + } +} diff --git a/src/commands/spec.ts b/src/commands/spec.ts index d28052f14..4f6c2ff6f 100644 --- a/src/commands/spec.ts +++ b/src/commands/spec.ts @@ -1,76 +1,30 @@ import { program } from 'commander'; -import { existsSync, readdirSync, readFileSync } from 'fs'; -import { join } from 'path'; -import { MarkdownParser } from '../core/parsers/markdown-parser.js'; import { Validator } from '../core/validation/validator.js'; -import type { Spec } from '../core/schemas/index.js'; import { isInteractive } from '../utils/interactive.js'; -import { getSpecIds } from '../utils/item-discovery.js'; +import { resolveOpenSpecDir } from '../core/path-resolver.js'; +import { FileSystemUtils } from '../utils/file-system.js'; +import path from 'path'; +import { + getSpecMarkdown, + getSpecJson, + getSpecIds, + getSpecDetails, + ShowOptions +} from '../core/spec-logic.js'; -const SPECS_DIR = 'openspec/specs'; - -interface ShowOptions { - json?: boolean; - // JSON-only filters (raw-first text has no filters) - requirements?: boolean; - scenarios?: boolean; // --no-scenarios sets this to false (JSON only) - requirement?: string; // JSON only - noInteractive?: boolean; -} - -function parseSpecFromFile(specPath: string, specId: string): Spec { - const content = readFileSync(specPath, 'utf-8'); - const parser = new MarkdownParser(content); - return parser.parseSpec(specId); -} - -function validateRequirementIndex(spec: Spec, requirementOpt?: string): number | undefined { - if (!requirementOpt) return undefined; - const index = Number.parseInt(requirementOpt, 10); - if (!Number.isInteger(index) || index < 1 || index > spec.requirements.length) { - throw new Error(`Requirement ${requirementOpt} not found`); +export class SpecCommand { + async getSpecMarkdown(specId: string): Promise { + return getSpecMarkdown(process.cwd(), specId); } - return index - 1; // convert to 0-based -} - -function filterSpec(spec: Spec, options: ShowOptions): Spec { - const requirementIndex = validateRequirementIndex(spec, options.requirement); - const includeScenarios = options.scenarios !== false && !options.requirements; - const filteredRequirements = (requirementIndex !== undefined - ? [spec.requirements[requirementIndex]] - : spec.requirements - ).map(req => ({ - text: req.text, - scenarios: includeScenarios ? req.scenarios : [], - })); - - const metadata = spec.metadata ?? { version: '1.0.0', format: 'openspec' as const }; - - return { - name: spec.name, - overview: spec.overview, - requirements: filteredRequirements, - metadata, - }; -} - -/** - * Print the raw markdown content for a spec file without any formatting. - * Raw-first behavior ensures text mode is a passthrough for deterministic output. - */ -function printSpecTextRaw(specPath: string): void { - const content = readFileSync(specPath, 'utf-8'); - console.log(content); -} - -export class SpecCommand { - private SPECS_DIR = 'openspec/specs'; + async getSpecJson(specId: string, options: ShowOptions = {}): Promise { + return getSpecJson(process.cwd(), specId, options); + } async show(specId?: string, options: ShowOptions = {}): Promise { if (!specId) { const canPrompt = isInteractive(options); - const specIds = await getSpecIds(); + const specIds = await getSpecIds(process.cwd()); if (canPrompt && specIds.length > 0) { const { select } = await import('@inquirer/prompts'); specId = await select({ @@ -82,29 +36,16 @@ export class SpecCommand { } } - const specPath = join(this.SPECS_DIR, specId, 'spec.md'); - if (!existsSync(specPath)) { - throw new Error(`Spec '${specId}' not found at openspec/specs/${specId}/spec.md`); - } - if (options.json) { if (options.requirements && options.requirement) { throw new Error('Options --requirements and --requirement cannot be used together'); } - const parsed = parseSpecFromFile(specPath, specId); - const filtered = filterSpec(parsed, options); - const output = { - id: specId, - title: parsed.name, - overview: parsed.overview, - requirementCount: filtered.requirements.length, - requirements: filtered.requirements, - metadata: parsed.metadata ?? { version: '1.0.0', format: 'openspec' as const }, - }; + const output = await this.getSpecJson(specId, options); console.log(JSON.stringify(output, null, 2)); return; } - printSpecTextRaw(specPath); + const content = await this.getSpecMarkdown(specId); + console.log(content); } } @@ -141,53 +82,32 @@ export function registerSpecCommand(rootProgram: typeof program) { .description('List all available specifications') .option('--json', 'Output as JSON') .option('--long', 'Show id and title with counts') - .action((options: { json?: boolean; long?: boolean }) => { + .action(async (options: { json?: boolean; long?: boolean }) => { try { - if (!existsSync(SPECS_DIR)) { - console.log('No items found'); - return; + const ids = await getSpecIds(process.cwd()); + + if (ids.length === 0) { + console.log('No items found'); + return; } - const specs = readdirSync(SPECS_DIR, { withFileTypes: true }) - .filter(dirent => dirent.isDirectory()) - .map(dirent => { - const specPath = join(SPECS_DIR, dirent.name, 'spec.md'); - if (existsSync(specPath)) { - try { - const spec = parseSpecFromFile(specPath, dirent.name); - - return { - id: dirent.name, - title: spec.name, - requirementCount: spec.requirements.length - }; - } catch { - return { - id: dirent.name, - title: dirent.name, - requirementCount: 0 - }; - } - } - return null; - }) - .filter((spec): spec is { id: string; title: string; requirementCount: number } => spec !== null) - .sort((a, b) => a.id.localeCompare(b.id)); - if (options.json) { - console.log(JSON.stringify(specs, null, 2)); + const specs = await Promise.all(ids.map(id => getSpecDetails(process.cwd(), id))); + console.log(JSON.stringify(specs, null, 2)); } else { - if (specs.length === 0) { - console.log('No items found'); - return; - } if (!options.long) { - specs.forEach(spec => console.log(spec.id)); + ids.forEach(id => console.log(id)); return; } - specs.forEach(spec => { - console.log(`${spec.id}: ${spec.title} [requirements ${spec.requirementCount}]`); - }); + + for (const id of ids) { + try { + const spec = await getSpecDetails(process.cwd(), id); + console.log(`${spec.id}: ${spec.title} [requirements ${spec.requirementCount}]`); + } catch { + console.log(`${id}: (unable to read)`); + } + } } } catch (error) { console.error(`Error: ${error instanceof Error ? error.message : 'Unknown error'}`); @@ -205,7 +125,7 @@ export function registerSpecCommand(rootProgram: typeof program) { try { if (!specId) { const canPrompt = isInteractive(options); - const specIds = await getSpecIds(); + const specIds = await getSpecIds(process.cwd()); if (canPrompt && specIds.length > 0) { const { select } = await import('@inquirer/prompts'); specId = await select({ @@ -217,10 +137,11 @@ export function registerSpecCommand(rootProgram: typeof program) { } } - const specPath = join(SPECS_DIR, specId, 'spec.md'); + const openspecPath = await resolveOpenSpecDir(process.cwd()); + const specPath = path.join(openspecPath, 'specs', specId, 'spec.md'); - if (!existsSync(specPath)) { - throw new Error(`Spec '${specId}' not found at openspec/specs/${specId}/spec.md`); + if (!(await FileSystemUtils.fileExists(specPath))) { + throw new Error(`Spec '${specId}' not found at ${specPath}`); } const validator = new Validator(options.strict); diff --git a/src/commands/update.ts b/src/commands/update.ts new file mode 100644 index 000000000..42b3e5921 --- /dev/null +++ b/src/commands/update.ts @@ -0,0 +1,59 @@ +import path from 'path'; +import { FileSystemUtils } from '../utils/file-system.js'; +import { runUpdate, UpdateResult } from '../core/update-logic.js'; + +export class UpdateCommand { + async execute(projectPath: string): Promise { + const result = await runUpdate(projectPath); + + const { openspecPath, updatedFiles, createdFiles, failedFiles, updatedSlashFiles, failedSlashTools, errorDetails } = result; + + // Log individual failures + for (const [file, error] of Object.entries(errorDetails)) { + if (file.startsWith('slash:')) { + const toolId = file.split(':')[1]; + console.error(`Failed to update slash commands for ${toolId}: ${error}`); + } else { + console.error(`Failed to update ${file}: ${error}`); + } + } + + const summaryParts: string[] = []; + const openspecDirName = path.basename(openspecPath); + const instructionFiles: string[] = [`${openspecDirName}/AGENTS.md`]; + + if (updatedFiles.includes('AGENTS.md')) { + instructionFiles.push( + createdFiles.includes('AGENTS.md') ? 'AGENTS.md (created)' : 'AGENTS.md' + ); + } + + summaryParts.push( + `Updated OpenSpec instructions (${instructionFiles.join(', ')})` + ); + + const aiToolFiles = updatedFiles.filter((file) => file !== 'AGENTS.md'); + if (aiToolFiles.length > 0) { + summaryParts.push(`Updated AI tool files: ${aiToolFiles.join(', ')}`); + } + + if (updatedSlashFiles.length > 0) { + // Normalize to forward slashes for cross-platform log consistency + const normalized = updatedSlashFiles.map((p) => FileSystemUtils.toPosixPath(p)); + summaryParts.push(`Updated slash commands: ${normalized.join(', ')}`); + } + + const failedItems = [ + ...failedFiles, + ...failedSlashTools.map( + (toolId) => `slash command refresh (${toolId})` + ), + ]; + + if (failedItems.length > 0) { + summaryParts.push(`Failed to update: ${failedItems.join(', ')}`); + } + + console.log(summaryParts.join(' | ')); + } +} diff --git a/src/commands/validate.ts b/src/commands/validate.ts index 9e59a4d48..7efd6fd66 100644 --- a/src/commands/validate.ts +++ b/src/commands/validate.ts @@ -4,6 +4,7 @@ import { Validator } from '../core/validation/validator.js'; import { isInteractive, resolveNoInteractive } from '../utils/interactive.js'; import { getActiveChangeIds, getSpecIds } from '../utils/item-discovery.js'; import { nearestMatches } from '../utils/match.js'; +import chalk from 'chalk'; type ItemType = 'change' | 'spec'; @@ -155,6 +156,14 @@ export class ValidateCommand { } if (report.valid) { console.log(`${type === 'change' ? 'Change' : 'Specification'} '${id}' is valid`); + // Suggest next steps for valid changes + if (type === 'change') { + console.log(); + console.log(chalk.bold('Next steps:')); + console.log(` ${chalk.white('openspec show')} ${chalk.cyan(id)} ${chalk.gray('# Inspect change details')}`); + console.log(` ${chalk.white('openspec archive')} ${chalk.cyan(id)} ${chalk.gray('# Archive when tasks are complete')}`); + console.log(); + } } else { console.error(`${type === 'change' ? 'Change' : 'Specification'} '${id}' has issues`); for (const issue of report.issues) { @@ -323,4 +332,4 @@ function getPlannedType(index: number, changeIds: string[], specIds: string[]): const specIndex = index - totalChanges; if (specIndex >= 0 && specIndex < specIds.length) return 'spec'; return undefined; -} +} \ No newline at end of file diff --git a/src/commands/view.ts b/src/commands/view.ts new file mode 100644 index 000000000..3316a9175 --- /dev/null +++ b/src/commands/view.ts @@ -0,0 +1,122 @@ +import chalk from 'chalk'; +import { getViewData, DashboardData } from '../core/view-logic.js'; + +export class ViewCommand { + async execute(targetPath: string = '.'): Promise { + try { + const data = await getViewData(targetPath); + + console.log(chalk.bold('\nOpenSpec Dashboard\n')); + console.log('═'.repeat(60)); + + // Display summary metrics + this.displaySummary(data.changes, data.specs); + + // Display draft changes + if (data.changes.draft.length > 0) { + console.log(chalk.bold.gray('\nDraft Changes')); + console.log('─'.repeat(60)); + data.changes.draft.forEach((change) => { + console.log(` ${chalk.gray('β—‹')} ${change.name}`); + }); + } + + // Display active changes + if (data.changes.active.length > 0) { + console.log(chalk.bold.cyan('\nActive Changes')); + console.log('─'.repeat(60)); + data.changes.active.forEach((change) => { + const progressBar = this.createProgressBar(change.progress.completed, change.progress.total); + const percentage = + change.progress.total > 0 + ? Math.round((change.progress.completed / change.progress.total) * 100) + : 0; + + console.log( + ` ${chalk.yellow('β—‰')} ${chalk.bold(change.name.padEnd(30))} ${progressBar} ${chalk.dim(`${percentage}%`)}` + ); + }); + } + + // Display completed changes + if (data.changes.completed.length > 0) { + console.log(chalk.bold.green('\nCompleted Changes')); + console.log('─'.repeat(60)); + data.changes.completed.forEach((change) => { + console.log(` ${chalk.green('βœ“')} ${change.name}`); + }); + } + + // Display specifications + if (data.specs.length > 0) { + console.log(chalk.bold.blue('\nSpecifications')); + console.log('─'.repeat(60)); + + // Sort specs by requirement count (descending) + const sortedSpecs = [...data.specs].sort((a, b) => b.requirementCount - a.requirementCount); + + sortedSpecs.forEach(spec => { + const reqLabel = spec.requirementCount === 1 ? 'requirement' : 'requirements'; + console.log( + ` ${chalk.blue('β–ͺ')} ${chalk.bold(spec.name.padEnd(30))} ${chalk.dim(`${spec.requirementCount} ${reqLabel}`)}` + ); + }); + } + + console.log('\n' + '═'.repeat(60)); + console.log(chalk.dim(`\nUse ${chalk.white('openspec list --changes')} or ${chalk.white('openspec list --specs')} for detailed views`)); + } catch (error: any) { + console.error(chalk.red(error.message)); + process.exit(1); + } + } + + private displaySummary( + changesData: DashboardData['changes'], + specsData: DashboardData['specs'] + ): void { + const totalSpecs = specsData.length; + const totalRequirements = specsData.reduce((sum, spec) => sum + spec.requirementCount, 0); + + // Calculate total task progress + let totalTasks = 0; + let completedTasks = 0; + + changesData.active.forEach((change) => { + totalTasks += change.progress.total; + completedTasks += change.progress.completed; + }); + + console.log(chalk.bold('Summary:')); + console.log( + ` ${chalk.cyan('●')} Specifications: ${chalk.bold(totalSpecs)} specs, ${chalk.bold(totalRequirements)} requirements` + ); + if (changesData.draft.length > 0) { + console.log(` ${chalk.gray('●')} Draft Changes: ${chalk.bold(changesData.draft.length)}`); + } + console.log( + ` ${chalk.yellow('●')} Active Changes: ${chalk.bold(changesData.active.length)} in progress` + ); + console.log(` ${chalk.green('●')} Completed Changes: ${chalk.bold(changesData.completed.length)}`); + + if (totalTasks > 0) { + const overallProgress = Math.round((completedTasks / totalTasks) * 100); + console.log( + ` ${chalk.magenta('●')} Task Progress: ${chalk.bold(`${completedTasks}/${totalTasks}`)} (${overallProgress}% complete)` + ); + } + } + + private createProgressBar(completed: number, total: number, width: number = 20): string { + if (total === 0) return chalk.dim('─'.repeat(width)); + + const percentage = completed / total; + const filled = Math.round(percentage * width); + const empty = width - filled; + + const filledBar = chalk.green('β–ˆ'.repeat(filled)); + const emptyBar = chalk.dim('β–‘'.repeat(empty)); + + return `[${filledBar}${emptyBar}]`; + } +} diff --git a/src/core/archive-logic.ts b/src/core/archive-logic.ts new file mode 100644 index 000000000..afc5b22c4 --- /dev/null +++ b/src/core/archive-logic.ts @@ -0,0 +1,145 @@ +import { promises as fs } from 'fs'; +import path from 'path'; +import { getTaskProgressForChange } from '../utils/task-progress.js'; +import { Validator } from './validation/validator.js'; +import { + findSpecUpdates, + buildUpdatedSpec, + writeUpdatedSpec, + type SpecUpdate, +} from './specs-apply.js'; +import { resolveOpenSpecDir } from './path-resolver.js'; +import { FileSystemUtils } from '../utils/file-system.js'; + +export interface ArchiveResult { + changeName: string; + archiveName: string; + taskStatus: { total: number; completed: number }; + validationReport?: any; + specUpdates: Array<{ capability: string; status: 'create' | 'update' }>; + totals: { added: number; modified: number; removed: number; renamed: number }; + alreadyExists?: boolean; +} + +export class ValidationError extends Error { + constructor(public report: any) { + super('Validation failed'); + this.name = 'ValidationError'; + } +} + +export async function runArchive( + changeName: string, + options: { skipSpecs?: boolean; noValidate?: boolean; validate?: boolean; throwOnValidationError?: boolean } = {} +): Promise { + const targetPath = '.'; + const openspecPath = await resolveOpenSpecDir(targetPath); + const changesDir = path.join(openspecPath, 'changes'); + const archiveDir = path.join(changesDir, 'archive'); + const mainSpecsDir = path.join(openspecPath, 'specs'); + + // Check if changes directory exists + if (!await FileSystemUtils.directoryExists(changesDir)) { + throw new Error("No OpenSpec changes directory found. Run 'openspec init' first."); + } + + const changeDir = path.join(changesDir, changeName); + + // Verify change exists + try { + const stat = await fs.stat(changeDir); + if (!stat.isDirectory()) { + throw new Error(`Change '${changeName}' not found.`); + } + } catch { + throw new Error(`Change '${changeName}' not found.`); + } + + const skipValidation = options.validate === false || options.noValidate === true; + let validationReport: any = { valid: true, issues: [] }; + + if (!skipValidation) { + const validator = new Validator(); + const deltaReport = await validator.validateChangeDeltaSpecs(changeDir); + validationReport = deltaReport; + if (!deltaReport.valid && options.throwOnValidationError) { + throw new ValidationError(deltaReport); + } + // For non-throwing logic, we still might want to stop if invalid + if (!deltaReport.valid) { + return { + changeName, + archiveName: '', + taskStatus: await getTaskProgressForChange(changesDir, changeName), + validationReport, + specUpdates: [], + totals: { added: 0, modified: 0, removed: 0, renamed: 0 } + }; + } + } + + const progress = await getTaskProgressForChange(changesDir, changeName); + + const specUpdates: Array<{ capability: string; status: 'create' | 'update' }> = []; + let totals = { added: 0, modified: 0, removed: 0, renamed: 0 }; + + if (!options.skipSpecs) { + const updates = await findSpecUpdates(changeDir, mainSpecsDir); + for (const update of updates) { + specUpdates.push({ + capability: path.basename(path.dirname(update.target)), + status: update.exists ? 'update' : 'create' + }); + } + + const prepared: Array<{ update: SpecUpdate; rebuilt: string; counts: { added: number; modified: number; removed: number; renamed: number } }> = []; + for (const update of updates) { + const built = await buildUpdatedSpec(update, changeName); + prepared.push({ update, rebuilt: built.rebuilt, counts: built.counts }); + } + + for (const p of prepared) { + const specName = path.basename(path.dirname(p.update.target)); + if (!skipValidation) { + const report = await new Validator().validateSpecContent(specName, p.rebuilt); + if (!report.valid) { + if (options.throwOnValidationError) throw new ValidationError(report); + throw new Error(`Validation failed for rebuilt spec: ${specName}`); + } + } + await writeUpdatedSpec(p.update, p.rebuilt); + totals.added += p.counts.added; + totals.modified += p.counts.modified; + totals.removed += p.counts.removed; + totals.renamed += p.counts.renamed; + } + } + + const archiveDate = new Date().toISOString().split('T')[0]; + const archiveName = `${archiveDate}-${changeName}`; + const archivePath = path.join(archiveDir, archiveName); + + // Check if archive already exists + if (await FileSystemUtils.directoryExists(archivePath)) { + return { + changeName, + archiveName, + taskStatus: progress, + specUpdates, + totals, + alreadyExists: true + }; + } + + await fs.mkdir(archiveDir, { recursive: true }); + await fs.rename(changeDir, archivePath); + + return { + changeName, + archiveName, + taskStatus: progress, + validationReport, + specUpdates, + totals + }; +} \ No newline at end of file diff --git a/src/core/archive.ts b/src/core/archive.ts deleted file mode 100644 index 1121ec259..000000000 --- a/src/core/archive.ts +++ /dev/null @@ -1,302 +0,0 @@ -import { promises as fs } from 'fs'; -import path from 'path'; -import { getTaskProgressForChange, formatTaskStatus } from '../utils/task-progress.js'; -import { Validator } from './validation/validator.js'; -import chalk from 'chalk'; -import { - findSpecUpdates, - buildUpdatedSpec, - writeUpdatedSpec, - type SpecUpdate, -} from './specs-apply.js'; - -export class ArchiveCommand { - async execute( - changeName?: string, - options: { yes?: boolean; skipSpecs?: boolean; noValidate?: boolean; validate?: boolean } = {} - ): Promise { - const targetPath = '.'; - const changesDir = path.join(targetPath, 'openspec', 'changes'); - const archiveDir = path.join(changesDir, 'archive'); - const mainSpecsDir = path.join(targetPath, 'openspec', 'specs'); - - // Check if changes directory exists - try { - await fs.access(changesDir); - } catch { - throw new Error("No OpenSpec changes directory found. Run 'openspec init' first."); - } - - // Get change name interactively if not provided - if (!changeName) { - const selectedChange = await this.selectChange(changesDir); - if (!selectedChange) { - console.log('No change selected. Aborting.'); - return; - } - changeName = selectedChange; - } - - const changeDir = path.join(changesDir, changeName); - - // Verify change exists - try { - const stat = await fs.stat(changeDir); - if (!stat.isDirectory()) { - throw new Error(`Change '${changeName}' not found.`); - } - } catch { - throw new Error(`Change '${changeName}' not found.`); - } - - const skipValidation = options.validate === false || options.noValidate === true; - - // Validate specs and change before archiving - if (!skipValidation) { - const validator = new Validator(); - let hasValidationErrors = false; - - // Validate proposal.md (non-blocking unless strict mode desired in future) - const changeFile = path.join(changeDir, 'proposal.md'); - try { - await fs.access(changeFile); - const changeReport = await validator.validateChange(changeFile); - // Proposal validation is informative only (do not block archive) - if (!changeReport.valid) { - console.log(chalk.yellow(`\nProposal warnings in proposal.md (non-blocking):`)); - for (const issue of changeReport.issues) { - const symbol = issue.level === 'ERROR' ? '⚠' : (issue.level === 'WARNING' ? '⚠' : 'β„Ή'); - console.log(chalk.yellow(` ${symbol} ${issue.message}`)); - } - } - } catch { - // Change file doesn't exist, skip validation - } - - // Validate delta-formatted spec files under the change directory if present - const changeSpecsDir = path.join(changeDir, 'specs'); - let hasDeltaSpecs = false; - try { - const candidates = await fs.readdir(changeSpecsDir, { withFileTypes: true }); - for (const c of candidates) { - if (c.isDirectory()) { - try { - const candidatePath = path.join(changeSpecsDir, c.name, 'spec.md'); - await fs.access(candidatePath); - const content = await fs.readFile(candidatePath, 'utf-8'); - if (/^##\s+(ADDED|MODIFIED|REMOVED|RENAMED)\s+Requirements/m.test(content)) { - hasDeltaSpecs = true; - break; - } - } catch {} - } - } - } catch {} - if (hasDeltaSpecs) { - const deltaReport = await validator.validateChangeDeltaSpecs(changeDir); - if (!deltaReport.valid) { - hasValidationErrors = true; - console.log(chalk.red(`\nValidation errors in change delta specs:`)); - for (const issue of deltaReport.issues) { - if (issue.level === 'ERROR') { - console.log(chalk.red(` βœ— ${issue.message}`)); - } else if (issue.level === 'WARNING') { - console.log(chalk.yellow(` ⚠ ${issue.message}`)); - } - } - } - } - - if (hasValidationErrors) { - console.log(chalk.red('\nValidation failed. Please fix the errors before archiving.')); - console.log(chalk.yellow('To skip validation (not recommended), use --no-validate flag.')); - return; - } - } else { - // Log warning when validation is skipped - const timestamp = new Date().toISOString(); - - if (!options.yes) { - const { confirm } = await import('@inquirer/prompts'); - const proceed = await confirm({ - message: chalk.yellow('⚠️ WARNING: Skipping validation may archive invalid specs. Continue? (y/N)'), - default: false - }); - if (!proceed) { - console.log('Archive cancelled.'); - return; - } - } else { - console.log(chalk.yellow(`\n⚠️ WARNING: Skipping validation may archive invalid specs.`)); - } - - console.log(chalk.yellow(`[${timestamp}] Validation skipped for change: ${changeName}`)); - console.log(chalk.yellow(`Affected files: ${changeDir}`)); - } - - // Show progress and check for incomplete tasks - const progress = await getTaskProgressForChange(changesDir, changeName); - const status = formatTaskStatus(progress); - console.log(`Task status: ${status}`); - - const incompleteTasks = Math.max(progress.total - progress.completed, 0); - if (incompleteTasks > 0) { - if (!options.yes) { - const { confirm } = await import('@inquirer/prompts'); - const proceed = await confirm({ - message: `Warning: ${incompleteTasks} incomplete task(s) found. Continue?`, - default: false - }); - if (!proceed) { - console.log('Archive cancelled.'); - return; - } - } else { - console.log(`Warning: ${incompleteTasks} incomplete task(s) found. Continuing due to --yes flag.`); - } - } - - // Handle spec updates unless skipSpecs flag is set - if (options.skipSpecs) { - console.log('Skipping spec updates (--skip-specs flag provided).'); - } else { - // Find specs to update - const specUpdates = await findSpecUpdates(changeDir, mainSpecsDir); - - if (specUpdates.length > 0) { - console.log('\nSpecs to update:'); - for (const update of specUpdates) { - const status = update.exists ? 'update' : 'create'; - const capability = path.basename(path.dirname(update.target)); - console.log(` ${capability}: ${status}`); - } - - let shouldUpdateSpecs = true; - if (!options.yes) { - const { confirm } = await import('@inquirer/prompts'); - shouldUpdateSpecs = await confirm({ - message: 'Proceed with spec updates?', - default: true - }); - if (!shouldUpdateSpecs) { - console.log('Skipping spec updates. Proceeding with archive.'); - } - } - - if (shouldUpdateSpecs) { - // Prepare all updates first (validation pass, no writes) - const prepared: Array<{ update: SpecUpdate; rebuilt: string; counts: { added: number; modified: number; removed: number; renamed: number } }> = []; - try { - for (const update of specUpdates) { - const built = await buildUpdatedSpec(update, changeName!); - prepared.push({ update, rebuilt: built.rebuilt, counts: built.counts }); - } - } catch (err: any) { - console.log(String(err.message || err)); - console.log('Aborted. No files were changed.'); - return; - } - - // All validations passed; pre-validate rebuilt full spec and then write files and display counts - let totals = { added: 0, modified: 0, removed: 0, renamed: 0 }; - for (const p of prepared) { - const specName = path.basename(path.dirname(p.update.target)); - if (!skipValidation) { - const report = await new Validator().validateSpecContent(specName, p.rebuilt); - if (!report.valid) { - console.log(chalk.red(`\nValidation errors in rebuilt spec for ${specName} (will not write changes):`)); - for (const issue of report.issues) { - if (issue.level === 'ERROR') console.log(chalk.red(` βœ— ${issue.message}`)); - else if (issue.level === 'WARNING') console.log(chalk.yellow(` ⚠ ${issue.message}`)); - } - console.log('Aborted. No files were changed.'); - return; - } - } - await writeUpdatedSpec(p.update, p.rebuilt, p.counts); - totals.added += p.counts.added; - totals.modified += p.counts.modified; - totals.removed += p.counts.removed; - totals.renamed += p.counts.renamed; - } - console.log( - `Totals: + ${totals.added}, ~ ${totals.modified}, - ${totals.removed}, β†’ ${totals.renamed}` - ); - console.log('Specs updated successfully.'); - } - } - } - - // Create archive directory with date prefix - const archiveName = `${this.getArchiveDate()}-${changeName}`; - const archivePath = path.join(archiveDir, archiveName); - - // Check if archive already exists - try { - await fs.access(archivePath); - throw new Error(`Archive '${archiveName}' already exists.`); - } catch (error: any) { - if (error.code !== 'ENOENT') { - throw error; - } - } - - // Create archive directory if needed - await fs.mkdir(archiveDir, { recursive: true }); - - // Move change to archive - await fs.rename(changeDir, archivePath); - - console.log(`Change '${changeName}' archived as '${archiveName}'.`); - } - - private async selectChange(changesDir: string): Promise { - const { select } = await import('@inquirer/prompts'); - // Get all directories in changes (excluding archive) - const entries = await fs.readdir(changesDir, { withFileTypes: true }); - const changeDirs = entries - .filter(entry => entry.isDirectory() && entry.name !== 'archive') - .map(entry => entry.name) - .sort(); - - if (changeDirs.length === 0) { - console.log('No active changes found.'); - return null; - } - - // Build choices with progress inline to avoid duplicate lists - let choices: Array<{ name: string; value: string }> = changeDirs.map(name => ({ name, value: name })); - try { - const progressList: Array<{ id: string; status: string }> = []; - for (const id of changeDirs) { - const progress = await getTaskProgressForChange(changesDir, id); - const status = formatTaskStatus(progress); - progressList.push({ id, status }); - } - const nameWidth = Math.max(...progressList.map(p => p.id.length)); - choices = progressList.map(p => ({ - name: `${p.id.padEnd(nameWidth)} ${p.status}`, - value: p.id - })); - } catch { - // If anything fails, fall back to simple names - choices = changeDirs.map(name => ({ name, value: name })); - } - - try { - const answer = await select({ - message: 'Select a change to archive', - choices - }); - return answer; - } catch (error) { - // User cancelled (Ctrl+C) - return null; - } - } - - private getArchiveDate(): string { - // Returns date in YYYY-MM-DD format - return new Date().toISOString().split('T')[0]; - } -} diff --git a/src/core/artifact-logic.ts b/src/core/artifact-logic.ts new file mode 100644 index 000000000..8d2b60c22 --- /dev/null +++ b/src/core/artifact-logic.ts @@ -0,0 +1,369 @@ +import path from 'path'; +import * as fs from 'fs'; +import { + loadChangeContext, + formatChangeStatus, + generateInstructions, + listSchemas, + listSchemasWithInfo, + getSchemaDir, + resolveSchema, + ArtifactGraph, + type ChangeStatus, + type ArtifactInstructions, + type SchemaInfo, +} from './artifact-graph/index.js'; +import { validateChangeName } from '../utils/change-utils.js'; + +const DEFAULT_SCHEMA = 'spec-driven'; + +// ----------------------------------------------------------------------------- +// Validation Logic +// ----------------------------------------------------------------------------- + +export async function validateChangeExists( + changeName: string | undefined, + projectRoot: string +): Promise { + const changesPath = path.join(projectRoot, 'openspec', 'changes'); + + const getAvailableChanges = async (): Promise => { + try { + const entries = await fs.promises.readdir(changesPath, { withFileTypes: true }); + return entries + .filter((e) => e.isDirectory() && e.name !== 'archive' && !e.name.startsWith('.')) + .map((e) => e.name); + } catch { + return []; + } + }; + + if (!changeName) { + const available = await getAvailableChanges(); + if (available.length === 0) { + throw new Error('No changes found. Create one with: openspec_create_change'); + } + throw new Error( + `Missing required option changeName. Available changes: ${available.join(', ')}` + ); + } + + const nameValidation = validateChangeName(changeName); + if (!nameValidation.valid) { + throw new Error(`Invalid change name '${changeName}': ${nameValidation.error}`); + } + + const changePath = path.join(changesPath, changeName); + const exists = fs.existsSync(changePath) && fs.statSync(changePath).isDirectory(); + + if (!exists) { + const available = await getAvailableChanges(); + if (available.length === 0) { + throw new Error( + `Change '${changeName}' not found. No changes exist.` + ); + } + throw new Error( + `Change '${changeName}' not found. Available changes: ${available.join(', ')}` + ); + } + + return changeName; +} + +export function validateSchemaExists(schemaName: string): string { + const schemaDir = getSchemaDir(schemaName); + if (!schemaDir) { + const availableSchemas = listSchemas(); + throw new Error( + `Schema '${schemaName}' not found. Available schemas: ${availableSchemas.join(', ')}` + ); + } + return schemaName; +} + +// ----------------------------------------------------------------------------- +// Status Logic +// ----------------------------------------------------------------------------- + +export async function getArtifactStatus( + projectRoot: string, + changeName?: string, + schemaName?: string +): Promise { + const name = await validateChangeExists(changeName, projectRoot); + + if (schemaName) { + validateSchemaExists(schemaName); + } + + const context = loadChangeContext(projectRoot, name, schemaName); + return formatChangeStatus(context); +} + +// ----------------------------------------------------------------------------- +// Instructions Logic +// ----------------------------------------------------------------------------- + +export async function getArtifactInstructions( + projectRoot: string, + artifactId: string, + changeName?: string, + schemaName?: string +): Promise { + const name = await validateChangeExists(changeName, projectRoot); + + if (schemaName) { + validateSchemaExists(schemaName); + } + + const context = loadChangeContext(projectRoot, name, schemaName); + + if (!artifactId) { + const validIds = context.graph.getAllArtifacts().map((a) => a.id); + throw new Error( + `Missing required argument artifactId. Valid artifacts: ${validIds.join(', ')}` + ); + } + + const artifact = context.graph.getArtifact(artifactId); + + if (!artifact) { + const validIds = context.graph.getAllArtifacts().map((a) => a.id); + throw new Error( + `Artifact '${artifactId}' not found in schema '${context.schemaName}'. Valid artifacts: ${validIds.join(', ')}` + ); + } + + return generateInstructions(context, artifactId); +} + +// ----------------------------------------------------------------------------- +// Apply Logic +// ----------------------------------------------------------------------------- + +export interface TaskItem { + id: string; + description: string; + done: boolean; +} + +export interface ApplyInstructions { + changeName: string; + changeDir: string; + schemaName: string; + contextFiles: Record; + progress: { + total: number; + complete: number; + remaining: number; + }; + tasks: TaskItem[]; + state: 'blocked' | 'all_done' | 'ready'; + missingArtifacts?: string[]; + instruction: string; +} + +function parseTasksFile(content: string): TaskItem[] { + const tasks: TaskItem[] = []; + const lines = content.split('\n'); + let taskIndex = 0; + + for (const line of lines) { + const checkboxMatch = line.match(/^[-*]\s*\[([ xX])\]\s*(.+)$/); + if (checkboxMatch) { + taskIndex++; + const done = checkboxMatch[1].toLowerCase() === 'x'; + const description = checkboxMatch[2].trim(); + tasks.push({ + id: `${taskIndex}`, + description, + done, + }); + } + } + + return tasks; +} + +function artifactOutputExists(changeDir: string, generates: string): boolean { + const normalizedGenerates = generates.split('/').join(path.sep); + const fullPath = path.join(changeDir, normalizedGenerates); + + if (generates.includes('*')) { + const parts = normalizedGenerates.split(path.sep); + const dirParts: string[] = []; + let patternPart = ''; + for (const part of parts) { + if (part.includes('*')) { + patternPart = part; + break; + } + dirParts.push(part); + } + const dirPath = path.join(changeDir, ...dirParts); + + if (!fs.existsSync(dirPath) || !fs.statSync(dirPath).isDirectory()) { + return false; + } + + const extMatch = patternPart.match(/\*(\.[a-zA-Z0-9]+)$/); + const expectedExt = extMatch ? extMatch[1] : null; + + const hasMatchingFiles = (dir: string): boolean => { + try { + const entries = fs.readdirSync(dir, { withFileTypes: true }); + for (const entry of entries) { + if (entry.isDirectory()) { + if (generates.includes('**') && hasMatchingFiles(path.join(dir, entry.name))) { + return true; + } + } else if (entry.isFile()) { + if (!expectedExt || entry.name.endsWith(expectedExt)) { + return true; + } + } + } + } catch { + return false; + } + return false; + }; + + return hasMatchingFiles(dirPath); + } + + return fs.existsSync(fullPath); +} + +export async function getApplyInstructions( + projectRoot: string, + changeName?: string, + schemaName?: string +): Promise { + const name = await validateChangeExists(changeName, projectRoot); + + if (schemaName) { + validateSchemaExists(schemaName); + } + + const context = loadChangeContext(projectRoot, name, schemaName); + const changeDir = path.join(projectRoot, 'openspec', 'changes', name); + + const schema = resolveSchema(context.schemaName); + const applyConfig = schema.apply; + + const requiredArtifactIds = applyConfig?.requires ?? schema.artifacts.map((a) => a.id); + const tracksFile = applyConfig?.tracks ?? null; + const schemaInstruction = applyConfig?.instruction ?? null; + + const missingArtifacts: string[] = []; + for (const artifactId of requiredArtifactIds) { + const artifact = schema.artifacts.find((a) => a.id === artifactId); + if (artifact && !artifactOutputExists(changeDir, artifact.generates)) { + missingArtifacts.push(artifactId); + } + } + + const contextFiles: Record = {}; + for (const artifact of schema.artifacts) { + if (artifactOutputExists(changeDir, artifact.generates)) { + contextFiles[artifact.id] = path.join(changeDir, artifact.generates); + } + } + + let tasks: TaskItem[] = []; + let tracksFileExists = false; + if (tracksFile) { + const tracksPath = path.join(changeDir, tracksFile); + tracksFileExists = fs.existsSync(tracksPath); + if (tracksFileExists) { + const tasksContent = await fs.promises.readFile(tracksPath, 'utf-8'); + tasks = parseTasksFile(tasksContent); + } + } + + const total = tasks.length; + const complete = tasks.filter((t) => t.done).length; + const remaining = total - complete; + + let state: ApplyInstructions['state']; + let instruction: string; + + if (missingArtifacts.length > 0) { + state = 'blocked'; + instruction = `Cannot apply this change yet. Missing artifacts: ${missingArtifacts.join(', ')}.`; + } else if (tracksFile && !tracksFileExists) { + const tracksFilename = path.basename(tracksFile); + state = 'blocked'; + instruction = `The ${tracksFilename} file is missing and must be created.`; + } else if (tracksFile && tracksFileExists && total === 0) { + const tracksFilename = path.basename(tracksFile); + state = 'blocked'; + instruction = `The ${tracksFilename} file exists but contains no tasks.`; + } else if (tracksFile && remaining === 0 && total > 0) { + state = 'all_done'; + instruction = 'All tasks are complete! This change is ready to be archived.'; + } else if (!tracksFile) { + state = 'ready'; + instruction = schemaInstruction?.trim() ?? 'All required artifacts complete. Proceed with implementation.'; + } else { + state = 'ready'; + instruction = schemaInstruction?.trim() ?? 'Read context files, work through pending tasks, mark complete as you go.'; + } + + return { + changeName: name, + changeDir, + schemaName: context.schemaName, + contextFiles, + progress: { total, complete, remaining }, + tasks, + state, + missingArtifacts: missingArtifacts.length > 0 ? missingArtifacts : undefined, + instruction, + }; +} + +// ----------------------------------------------------------------------------- +// Templates Logic +// ----------------------------------------------------------------------------- + +export interface TemplateInfo { + artifactId: string; + templatePath: string; + source: 'user' | 'package'; +} + +export async function getTemplatePaths( + schemaName: string = DEFAULT_SCHEMA +): Promise> { + validateSchemaExists(schemaName); + const schema = resolveSchema(schemaName); + const graph = ArtifactGraph.fromSchema(schema); + const schemaDir = getSchemaDir(schemaName)!; + + const { getUserSchemasDir } = await import('./artifact-graph/resolver.js'); + const userSchemasDir = getUserSchemasDir(); + const isUserOverride = schemaDir.startsWith(userSchemasDir); + + const templates: TemplateInfo[] = graph.getAllArtifacts().map((artifact) => ({ + artifactId: artifact.id, + templatePath: path.join(schemaDir, 'templates', artifact.template), + source: isUserOverride ? 'user' : 'package', + })); + + const output: Record = {}; + for (const t of templates) { + output[t.artifactId] = t; + } + return output; +} + +// ----------------------------------------------------------------------------- +// Schemas Logic +// ----------------------------------------------------------------------------- + +export function getAvailableSchemas(): SchemaInfo[] { + return listSchemasWithInfo(); +} diff --git a/src/core/change-logic.ts b/src/core/change-logic.ts new file mode 100644 index 000000000..979925fe9 --- /dev/null +++ b/src/core/change-logic.ts @@ -0,0 +1,199 @@ +import { promises as fs } from 'fs'; +import path from 'path'; +import { FileSystemUtils } from '../utils/file-system.js'; +import { writeChangeMetadata, validateSchemaName } from '../utils/change-metadata.js'; +import { validateChangeName } from '../utils/change-utils.js'; +import { resolveOpenSpecDir } from './path-resolver.js'; +import { JsonConverter } from './converters/json-converter.js'; +import { Validator } from './validation/validator.js'; +import { ChangeParser } from './parsers/change-parser.js'; +import { Change } from './schemas/index.js'; + +const DEFAULT_SCHEMA = 'spec-driven'; +const ARCHIVE_DIR = 'archive'; +const TASK_PATTERN = /^[-*]\s+\[[\sx]\]/i; +const COMPLETED_TASK_PATTERN = /^[-*]\s+\[x\]/i; + +export interface CreateChangeResult { + name: string; + changeDir: string; + schema: string; +} + +export interface ChangeJsonOutput { + id: string; + title: string; + deltaCount: number; + deltas: any[]; +} + +export interface ChangeListItem { + id: string; + title: string; + deltaCount: number; + taskStatus: { total: number; completed: number }; +} + +export async function runCreateChange( + projectRoot: string, + name: string, + options: { schema?: string } = {} +): Promise { + const validation = validateChangeName(name); + if (!validation.valid) { + throw new Error(validation.error); + } + + const schemaName = options.schema ?? DEFAULT_SCHEMA; + validateSchemaName(schemaName); + + const openspecPath = await resolveOpenSpecDir(projectRoot); + const changeDir = path.join(openspecPath, 'changes', name); + + if (await FileSystemUtils.directoryExists(changeDir)) { + throw new Error(`Change '${name}' already exists at ${changeDir}`); + } + + await FileSystemUtils.createDirectory(changeDir); + + const today = new Date().toISOString().split('T')[0]; + writeChangeMetadata(changeDir, { + schema: schemaName, + created: today, + }); + + return { + name, + changeDir, + schema: schemaName + }; +} + +export async function getActiveChanges(projectRoot: string): Promise { + const openspecPath = await resolveOpenSpecDir(projectRoot); + const changesPath = path.join(openspecPath, 'changes'); + try { + const entries = await fs.readdir(changesPath, { withFileTypes: true }); + const result: string[] = []; + for (const entry of entries) { + if (!entry.isDirectory() || entry.name.startsWith('.') || entry.name === ARCHIVE_DIR) continue; + const proposalPath = path.join(changesPath, entry.name, 'proposal.md'); + try { + await fs.access(proposalPath); + result.push(entry.name); + } catch { + // skip directories without proposal.md + } + } + return result.sort(); + } catch { + return []; + } +} + +export async function getChangeMarkdown(projectRoot: string, changeName: string): Promise { + const changesPath = path.join(await resolveOpenSpecDir(projectRoot), 'changes'); + const proposalPath = path.join(changesPath, changeName, 'proposal.md'); + try { + return await fs.readFile(proposalPath, 'utf-8'); + } catch { + throw new Error(`Change "${changeName}" not found at ${proposalPath}`); + } +} + +export async function getChangeJson(projectRoot: string, changeName: string): Promise { + const changesPath = path.join(await resolveOpenSpecDir(projectRoot), 'changes'); + const proposalPath = path.join(changesPath, changeName, 'proposal.md'); + + try { + await fs.access(proposalPath); + } catch { + throw new Error(`Change "${changeName}" not found at ${proposalPath}`); + } + + const converter = new JsonConverter(); + const jsonOutput = await converter.convertChangeToJson(proposalPath); + const parsed: Change = JSON.parse(jsonOutput); + const contentForTitle = await fs.readFile(proposalPath, 'utf-8'); + const title = extractTitle(contentForTitle, changeName); + const id = parsed.name; + const deltas = parsed.deltas || []; + + return { + id, + title, + deltaCount: deltas.length, + deltas, + }; +} + +export async function getChangeDetails(projectRoot: string, changeName: string): Promise { + const changesPath = path.join(await resolveOpenSpecDir(projectRoot), 'changes'); + const proposalPath = path.join(changesPath, changeName, 'proposal.md'); + const tasksPath = path.join(changesPath, changeName, 'tasks.md'); + + try { + const content = await fs.readFile(proposalPath, 'utf-8'); + const changeDir = path.join(changesPath, changeName); + const parser = new ChangeParser(content, changeDir); + const change = await parser.parseChangeWithDeltas(changeName); + + let taskStatus = { total: 0, completed: 0 }; + try { + const tasksContent = await fs.readFile(tasksPath, 'utf-8'); + taskStatus = countTasks(tasksContent); + } catch { + // Tasks file may not exist, which is okay + } + + return { + id: changeName, + title: extractTitle(content, changeName), + deltaCount: change.deltas.length, + taskStatus, + }; + } catch { + return { + id: changeName, + title: 'Unknown', + deltaCount: 0, + taskStatus: { total: 0, completed: 0 }, + }; + } +} + +export async function validateChange(projectRoot: string, changeName: string, strict: boolean = false) { + const changesPath = path.join(await resolveOpenSpecDir(projectRoot), 'changes'); + const changeDir = path.join(changesPath, changeName); + + try { + await fs.access(changeDir); + } catch { + throw new Error(`Change "${changeName}" not found at ${changeDir}`); + } + + const validator = new Validator(strict); + return await validator.validateChangeDeltaSpecs(changeDir); +} + +export function extractTitle(content: string, changeName: string): string { + const match = content.match(/^#\s+(?:Change:\s+)?(.+)$/im); + return match ? match[1].trim() : changeName; +} + +export function countTasks(content: string): { total: number; completed: number } { + const lines = content.split('\n'); + let total = 0; + let completed = 0; + + for (const line of lines) { + if (line.match(TASK_PATTERN)) { + total++; + if (line.match(COMPLETED_TASK_PATTERN)) { + completed++; + } + } + } + + return { total, completed }; +} \ No newline at end of file diff --git a/src/core/config-logic.ts b/src/core/config-logic.ts new file mode 100644 index 000000000..8e0cb72ec --- /dev/null +++ b/src/core/config-logic.ts @@ -0,0 +1,81 @@ +import { + getGlobalConfigPath, + getGlobalConfig, + saveGlobalConfig, + GlobalConfig, +} from './global-config.js'; +import { + getNestedValue, + setNestedValue, + deleteNestedValue, + coerceValue, + validateConfigKeyPath, + validateConfig, + DEFAULT_CONFIG, +} from './config-schema.js'; + +export function getConfigPath(): string { + return getGlobalConfigPath(); +} + +export function getConfigList(): GlobalConfig { + return getGlobalConfig(); +} + +export function getConfigValue(key: string): unknown { + const config = getGlobalConfig(); + return getNestedValue(config as Record, key); +} + +export function setConfigValue( + key: string, + value: string, + options: { forceString?: boolean; allowUnknown?: boolean } = {} +): { key: string; value: unknown; displayValue: string } { + const allowUnknown = Boolean(options.allowUnknown); + const keyValidation = validateConfigKeyPath(key); + + if (!keyValidation.valid && !allowUnknown) { + const reason = keyValidation.reason ? ` ${keyValidation.reason}.` : ''; + throw new Error(`Invalid configuration key "${key}".${reason}`); + } + + const config = getGlobalConfig() as Record; + const coercedValue = coerceValue(value, options.forceString || false); + + const newConfig = JSON.parse(JSON.stringify(config)); + setNestedValue(newConfig, key, coercedValue); + + const validation = validateConfig(newConfig); + if (!validation.success) { + throw new Error(`Invalid configuration - ${validation.error}`); + } + + setNestedValue(config, key, coercedValue); + saveGlobalConfig(config as GlobalConfig); + + const displayValue = + typeof coercedValue === 'string' ? `"${coercedValue}"` : String(coercedValue); + + return { key, value: coercedValue, displayValue }; +} + +export function unsetConfigValue(key: string): boolean { + const config = getGlobalConfig() as Record; + const existed = deleteNestedValue(config, key); + + if (existed) { + saveGlobalConfig(config as GlobalConfig); + } + + return existed; +} + +export function resetConfig(all: boolean): boolean { + if (!all) { + throw new Error('All flag is required for reset'); + } + + saveGlobalConfig({ ...DEFAULT_CONFIG }); + return true; +} diff --git a/src/core/config.ts b/src/core/config.ts index a27d6eafb..4e059d563 100644 --- a/src/core/config.ts +++ b/src/core/config.ts @@ -1,4 +1,6 @@ -export const OPENSPEC_DIR_NAME = 'openspec'; +export const DEFAULT_OPENSPEC_DIR_NAME = '.openspec'; +export const LEGACY_OPENSPEC_DIR_NAME = 'openspec'; +export const OPENSPEC_DIR_NAME = DEFAULT_OPENSPEC_DIR_NAME; export const OPENSPEC_MARKERS = { start: '', diff --git a/src/core/init-logic.ts b/src/core/init-logic.ts new file mode 100644 index 000000000..9cf8929d9 --- /dev/null +++ b/src/core/init-logic.ts @@ -0,0 +1,249 @@ +import path from 'path'; +import { FileSystemUtils } from '../utils/file-system.js'; +import { TemplateManager, ProjectContext } from './templates/index.js'; +import { ToolRegistry } from './configurators/registry.js'; +import { SlashCommandRegistry } from './configurators/slash/registry.js'; +import { + OpenSpecConfig, + AI_TOOLS, + DEFAULT_OPENSPEC_DIR_NAME, + LEGACY_OPENSPEC_DIR_NAME, + OPENSPEC_MARKERS, +} from './config.js'; + +export type RootStubStatus = 'created' | 'updated' | 'skipped'; + +export interface InitResult { + projectPath: string; + openspecPath: string; + openspecDir: string; + extendMode: boolean; + selectedTools: string[]; + createdTools: string[]; + refreshedTools: string[]; + skippedExistingTools: string[]; + skippedTools: string[]; + rootStubStatus: RootStubStatus; + migrated: boolean; +} + +export async function runInit(targetPath: string, options: { tools?: string[], shouldMigrate?: boolean } = {}): Promise { + const projectPath = path.resolve(targetPath); + + // Check for legacy directory + const legacyPath = path.join(projectPath, LEGACY_OPENSPEC_DIR_NAME); + const defaultPath = path.join(projectPath, DEFAULT_OPENSPEC_DIR_NAME); + + let openspecPath = defaultPath; + let openspecDir = DEFAULT_OPENSPEC_DIR_NAME; + let migrated = false; + + const hasLegacy = await FileSystemUtils.directoryExists(legacyPath); + const hasDefault = await FileSystemUtils.directoryExists(defaultPath); + + if (hasLegacy && !hasDefault) { + if (options.shouldMigrate) { + await FileSystemUtils.rename(legacyPath, defaultPath); + migrated = true; + } else { + openspecPath = legacyPath; + openspecDir = LEGACY_OPENSPEC_DIR_NAME; + } + } else if (hasLegacy) { + openspecPath = legacyPath; + openspecDir = LEGACY_OPENSPEC_DIR_NAME; + } + + const extendMode = await FileSystemUtils.directoryExists(openspecPath); + + if (!(await FileSystemUtils.ensureWritePermissions(projectPath))) { + throw new Error(`Insufficient permissions to write to ${projectPath}`); + } + + const existingToolStates = await getExistingToolStates(projectPath, extendMode); + + const selectedToolIds = options.tools || []; + const availableTools = AI_TOOLS.filter((tool) => tool.available); + + const createdTools: string[] = []; + const refreshedTools: string[] = []; + const skippedExistingTools: string[] = []; + const skippedTools: string[] = []; + + for (const tool of availableTools) { + if (selectedToolIds.includes(tool.value)) { + if (existingToolStates[tool.value]) { + refreshedTools.push(tool.value); + } else { + createdTools.push(tool.value); + } + } else { + if (existingToolStates[tool.value]) { + skippedExistingTools.push(tool.value); + } else { + skippedTools.push(tool.value); + } + } + } + + // Step 1: Create directory structure + if (!extendMode) { + await createDirectoryStructure(openspecPath); + await writeTemplateFiles(openspecPath, { aiTools: selectedToolIds }, false); + } else { + await createDirectoryStructure(openspecPath); + await writeTemplateFiles(openspecPath, { aiTools: selectedToolIds }, true); + } + + // Step 2: Configure AI tools + const rootStubStatus = await configureAITools( + projectPath, + openspecDir, + selectedToolIds + ); + + return { + projectPath, + openspecPath, + openspecDir, + extendMode, + selectedTools: selectedToolIds, + createdTools, + refreshedTools, + skippedExistingTools, + skippedTools, + rootStubStatus, + migrated + }; +} + +async function getExistingToolStates( + projectPath: string, + extendMode: boolean +): Promise> { + if (!extendMode) { + return Object.fromEntries(AI_TOOLS.map(t => [t.value, false])); + } + + const entries = await Promise.all( + AI_TOOLS.map(async (t) => [t.value, await isToolConfigured(projectPath, t.value)] as const) + ); + return Object.fromEntries(entries); +} + +async function isToolConfigured( + projectPath: string, + toolId: string +): Promise { + const fileHasMarkers = async (absolutePath: string): Promise => { + try { + const content = await FileSystemUtils.readFile(absolutePath); + return content.includes(OPENSPEC_MARKERS.start) && content.includes(OPENSPEC_MARKERS.end); + } catch { + return false; + } + }; + + let hasConfigFile = false; + let hasSlashCommands = false; + + const configFile = ToolRegistry.get(toolId)?.configFileName; + if (configFile) { + const configPath = path.join(projectPath, configFile); + hasConfigFile = (await FileSystemUtils.fileExists(configPath)) && (await fileHasMarkers(configPath)); + } + + const slashConfigurator = SlashCommandRegistry.get(toolId); + if (slashConfigurator) { + for (const target of slashConfigurator.getTargets()) { + const absolute = slashConfigurator.resolveAbsolutePath(projectPath, target.id); + if ((await FileSystemUtils.fileExists(absolute)) && (await fileHasMarkers(absolute))) { + hasSlashCommands = true; + break; + } + } + } + + const hasConfigFileRequirement = configFile !== undefined; + const hasSlashCommandRequirement = slashConfigurator !== undefined; + + if (hasConfigFileRequirement && hasSlashCommandRequirement) { + return hasConfigFile && hasSlashCommands; + } else if (hasConfigFileRequirement) { + return hasConfigFile; + } else if (hasSlashCommandRequirement) { + return hasSlashCommands; + } + + return false; +} + +async function createDirectoryStructure(openspecPath: string): Promise { + const directories = [ + openspecPath, + path.join(openspecPath, 'specs'), + path.join(openspecPath, 'changes'), + path.join(openspecPath, 'changes', 'archive'), + ]; + + for (const dir of directories) { + await FileSystemUtils.createDirectory(dir); + } +} + +async function writeTemplateFiles( + openspecPath: string, + config: OpenSpecConfig, + skipExisting: boolean +): Promise { + const context: ProjectContext = {}; + const templates = TemplateManager.getTemplates(context); + + for (const template of templates) { + const filePath = path.join(openspecPath, template.path); + if (skipExisting && (await FileSystemUtils.fileExists(filePath))) { + continue; + } + const content = typeof template.content === 'function' + ? template.content(context) + : template.content; + await FileSystemUtils.writeFile(filePath, content); + } +} + +async function configureAITools( + projectPath: string, + openspecDir: string, + toolIds: string[] +): Promise { + const rootStubStatus = await configureRootAgentsStub(projectPath, openspecDir); + + for (const toolId of toolIds) { + const configurator = ToolRegistry.get(toolId); + if (configurator && configurator.isAvailable) { + await configurator.configure(projectPath, openspecDir); + } + + const slashConfigurator = SlashCommandRegistry.get(toolId); + if (slashConfigurator && slashConfigurator.isAvailable) { + await slashConfigurator.generateAll(projectPath, openspecDir); + } + } + + return rootStubStatus; +} + +async function configureRootAgentsStub( + projectPath: string, + openspecDir: string +): Promise { + const configurator = ToolRegistry.get('agents'); + if (!configurator || !configurator.isAvailable) { + return 'skipped'; + } + + const stubPath = path.join(projectPath, configurator.configFileName); + const existed = await FileSystemUtils.fileExists(stubPath); + await configurator.configure(projectPath, openspecDir); + return existed ? 'updated' : 'created'; +} diff --git a/src/core/list.ts b/src/core/list.ts index 3f40829a6..5b7eb7850 100644 --- a/src/core/list.ts +++ b/src/core/list.ts @@ -1,20 +1,22 @@ import { promises as fs } from 'fs'; import path from 'path'; -import { getTaskProgressForChange, formatTaskStatus } from '../utils/task-progress.js'; +import { getTaskProgressForChange } from '../utils/task-progress.js'; import { readFileSync } from 'fs'; import { join } from 'path'; import { MarkdownParser } from './parsers/markdown-parser.js'; +import { resolveOpenSpecDir } from './path-resolver.js'; +import { FileSystemUtils } from '../utils/file-system.js'; -interface ChangeInfo { +export interface ChangeInfo { name: string; completedTasks: number; totalTasks: number; lastModified: Date; } -interface ListOptions { - sort?: 'recent' | 'name'; - json?: boolean; +export interface SpecInfo { + id: string; + requirementCount: number; } /** @@ -50,145 +52,76 @@ async function getLastModified(dirPath: string): Promise { return latest; } -/** - * Format a date as relative time (e.g., "2 hours ago", "3 days ago") - */ -function formatRelativeTime(date: Date): string { - const now = new Date(); - const diffMs = now.getTime() - date.getTime(); - const diffSecs = Math.floor(diffMs / 1000); - const diffMins = Math.floor(diffSecs / 60); - const diffHours = Math.floor(diffMins / 60); - const diffDays = Math.floor(diffHours / 24); - - if (diffDays > 30) { - return date.toLocaleDateString(); - } else if (diffDays > 0) { - return `${diffDays}d ago`; - } else if (diffHours > 0) { - return `${diffHours}h ago`; - } else if (diffMins > 0) { - return `${diffMins}m ago`; - } else { - return 'just now'; - } -} +export async function listChanges(targetPath: string, sort: 'recent' | 'name' = 'recent'): Promise { + const openspecPath = await resolveOpenSpecDir(targetPath); + const changesDir = path.join(openspecPath, 'changes'); -export class ListCommand { - async execute(targetPath: string = '.', mode: 'changes' | 'specs' = 'changes', options: ListOptions = {}): Promise { - const { sort = 'recent', json = false } = options; - - if (mode === 'changes') { - const changesDir = path.join(targetPath, 'openspec', 'changes'); + // Check if changes directory exists + if (!await FileSystemUtils.directoryExists(changesDir)) { + // Return empty if directory doesn't exist, or throw? The original code threw error. + throw new Error("No OpenSpec changes directory found. Run 'openspec init' first."); + } - // Check if changes directory exists - try { - await fs.access(changesDir); - } catch { - throw new Error("No OpenSpec changes directory found. Run 'openspec init' first."); - } + // Get all directories in changes (excluding archive) + const entries = await fs.readdir(changesDir, { withFileTypes: true }); + const changeDirs = entries + .filter(entry => entry.isDirectory() && entry.name !== 'archive') + .map(entry => entry.name); - // Get all directories in changes (excluding archive) - const entries = await fs.readdir(changesDir, { withFileTypes: true }); - const changeDirs = entries - .filter(entry => entry.isDirectory() && entry.name !== 'archive') - .map(entry => entry.name); - - if (changeDirs.length === 0) { - if (json) { - console.log(JSON.stringify({ changes: [] })); - } else { - console.log('No active changes found.'); - } - return; - } + if (changeDirs.length === 0) { + return []; + } - // Collect information about each change - const changes: ChangeInfo[] = []; + // Collect information about each change + const changes: ChangeInfo[] = []; - for (const changeDir of changeDirs) { + for (const changeDir of changeDirs) { const progress = await getTaskProgressForChange(changesDir, changeDir); const changePath = path.join(changesDir, changeDir); const lastModified = await getLastModified(changePath); changes.push({ - name: changeDir, - completedTasks: progress.completed, - totalTasks: progress.total, - lastModified + name: changeDir, + completedTasks: progress.completed, + totalTasks: progress.total, + lastModified }); - } + } - // Sort by preference (default: recent first) - if (sort === 'recent') { + // Sort by preference (default: recent first) + if (sort === 'recent') { changes.sort((a, b) => b.lastModified.getTime() - a.lastModified.getTime()); - } else { + } else { changes.sort((a, b) => a.name.localeCompare(b.name)); - } - - // JSON output for programmatic use - if (json) { - const jsonOutput = changes.map(c => ({ - name: c.name, - completedTasks: c.completedTasks, - totalTasks: c.totalTasks, - lastModified: c.lastModified.toISOString(), - status: c.totalTasks === 0 ? 'no-tasks' : c.completedTasks === c.totalTasks ? 'complete' : 'in-progress' - })); - console.log(JSON.stringify({ changes: jsonOutput }, null, 2)); - return; - } - - // Display results - console.log('Changes:'); - const padding = ' '; - const nameWidth = Math.max(...changes.map(c => c.name.length)); - for (const change of changes) { - const paddedName = change.name.padEnd(nameWidth); - const status = formatTaskStatus({ total: change.totalTasks, completed: change.completedTasks }); - const timeAgo = formatRelativeTime(change.lastModified); - console.log(`${padding}${paddedName} ${status.padEnd(12)} ${timeAgo}`); - } - return; } + + return changes; +} - // specs mode - const specsDir = path.join(targetPath, 'openspec', 'specs'); - try { - await fs.access(specsDir); - } catch { - console.log('No specs found.'); - return; +export async function listSpecs(targetPath: string): Promise { + const openspecPath = await resolveOpenSpecDir(targetPath); + const specsDir = path.join(openspecPath, 'specs'); + + if (!await FileSystemUtils.directoryExists(specsDir)) { + return []; } const entries = await fs.readdir(specsDir, { withFileTypes: true }); const specDirs = entries.filter(e => e.isDirectory()).map(e => e.name); - if (specDirs.length === 0) { - console.log('No specs found.'); - return; - } - - type SpecInfo = { id: string; requirementCount: number }; + const specs: SpecInfo[] = []; for (const id of specDirs) { - const specPath = join(specsDir, id, 'spec.md'); - try { - const content = readFileSync(specPath, 'utf-8'); - const parser = new MarkdownParser(content); - const spec = parser.parseSpec(id); - specs.push({ id, requirementCount: spec.requirements.length }); - } catch { - // If spec cannot be read or parsed, include with 0 count - specs.push({ id, requirementCount: 0 }); - } + const specPath = join(specsDir, id, 'spec.md'); + try { + const content = readFileSync(specPath, 'utf-8'); + const parser = new MarkdownParser(content); + const spec = parser.parseSpec(id); + specs.push({ id, requirementCount: spec.requirements.length }); + } catch { + // If spec cannot be read or parsed, include with 0 count + specs.push({ id, requirementCount: 0 }); + } } specs.sort((a, b) => a.id.localeCompare(b.id)); - console.log('Specs:'); - const padding = ' '; - const nameWidth = Math.max(...specs.map(s => s.id.length)); - for (const spec of specs) { - const padded = spec.id.padEnd(nameWidth); - console.log(`${padding}${padded} requirements ${spec.requirementCount}`); - } - } -} \ No newline at end of file + return specs; +} diff --git a/src/core/path-resolver.ts b/src/core/path-resolver.ts new file mode 100644 index 000000000..c00483cd3 --- /dev/null +++ b/src/core/path-resolver.ts @@ -0,0 +1,19 @@ +import path from 'path'; +import { FileSystemUtils } from '../utils/file-system.js'; +import { DEFAULT_OPENSPEC_DIR_NAME, LEGACY_OPENSPEC_DIR_NAME } from './config.js'; + +/** + * Resolves the path to the OpenSpec directory. + * Priorities: + * 1. Legacy `openspec/` directory if it exists. + * 2. Default `.openspec/` directory otherwise. + */ +export async function resolveOpenSpecDir(projectRoot: string): Promise { + const legacyPath = path.join(projectRoot, LEGACY_OPENSPEC_DIR_NAME); + + if (await FileSystemUtils.directoryExists(legacyPath)) { + return legacyPath; + } + + return path.join(projectRoot, DEFAULT_OPENSPEC_DIR_NAME); +} diff --git a/src/core/spec-logic.ts b/src/core/spec-logic.ts new file mode 100644 index 000000000..24be343c7 --- /dev/null +++ b/src/core/spec-logic.ts @@ -0,0 +1,126 @@ +import { promises as fs } from 'fs'; +import path from 'path'; +import { MarkdownParser } from './parsers/markdown-parser.js'; +import { Spec } from './schemas/index.js'; +import { resolveOpenSpecDir } from './path-resolver.js'; +import { FileSystemUtils } from '../utils/file-system.js'; + +export interface ShowOptions { + json?: boolean; + // JSON-only filters (raw-first text has no filters) + requirements?: boolean; + scenarios?: boolean; // --no-scenarios sets this to false (JSON only) + requirement?: string; // JSON only + noInteractive?: boolean; +} + +export interface SpecListItem { + id: string; + title: string; + requirementCount: number; +} + +export async function getSpecMarkdown(projectRoot: string, specId: string): Promise { + const openspecPath = await resolveOpenSpecDir(projectRoot); + const specPath = path.join(openspecPath, 'specs', specId, 'spec.md'); + if (!(await FileSystemUtils.fileExists(specPath))) { + throw new Error(`Spec '${specId}' not found at ${specPath}`); + } + return FileSystemUtils.readFile(specPath); +} + +export async function getSpecJson(projectRoot: string, specId: string, options: ShowOptions = {}): Promise { + const openspecPath = await resolveOpenSpecDir(projectRoot); + const specPath = path.join(openspecPath, 'specs', specId, 'spec.md'); + if (!(await FileSystemUtils.fileExists(specPath))) { + throw new Error(`Spec '${specId}' not found at ${specPath}`); + } + + const content = await FileSystemUtils.readFile(specPath); + const parser = new MarkdownParser(content); + const parsed = parser.parseSpec(specId); + + const filtered = filterSpec(parsed, options); + return { + id: specId, + title: parsed.name, + overview: parsed.overview, + requirementCount: filtered.requirements.length, + requirements: filtered.requirements, + metadata: parsed.metadata ?? { version: '1.0.0', format: 'openspec' as const }, + }; +} + +export async function getSpecIds(projectRoot: string): Promise { + const openspecPath = await resolveOpenSpecDir(projectRoot); + const specsPath = path.join(openspecPath, 'specs'); + try { + const entries = await fs.readdir(specsPath, { withFileTypes: true }); + const ids: string[] = []; + for (const entry of entries) { + if (entry.isDirectory() && !entry.name.startsWith('.')) { + const specPath = path.join(specsPath, entry.name, 'spec.md'); + if (await FileSystemUtils.fileExists(specPath)) { + ids.push(entry.name); + } + } + } + return ids.sort(); + } catch { + return []; + } +} + +export async function getSpecDetails(projectRoot: string, specId: string): Promise { + const openspecPath = await resolveOpenSpecDir(projectRoot); + const specPath = path.join(openspecPath, 'specs', specId, 'spec.md'); + + try { + const content = await FileSystemUtils.readFile(specPath); + const parser = new MarkdownParser(content); + const spec = parser.parseSpec(specId); + + return { + id: specId, + title: spec.name, + requirementCount: spec.requirements.length + }; + } catch { + return { + id: specId, + title: specId, + requirementCount: 0 + }; + } +} + +function validateRequirementIndex(spec: Spec, requirementOpt?: string): number | undefined { + if (!requirementOpt) return undefined; + const index = Number.parseInt(requirementOpt, 10); + if (!Number.isInteger(index) || index < 1 || index > spec.requirements.length) { + throw new Error(`Requirement ${requirementOpt} not found`); + } + return index - 1; // convert to 0-based +} + +function filterSpec(spec: Spec, options: ShowOptions): Spec { + const requirementIndex = validateRequirementIndex(spec, options.requirement); + const includeScenarios = options.scenarios !== false && !options.requirements; + + const filteredRequirements = (requirementIndex !== undefined + ? [spec.requirements[requirementIndex]] + : spec.requirements + ).map(req => ({ + text: req.text, + scenarios: includeScenarios ? req.scenarios : [], + })); + + const metadata = spec.metadata ?? { version: '1.0.0', format: 'openspec' as const }; + + return { + name: spec.name, + overview: spec.overview, + requirements: filteredRequirements, + metadata, + }; +} \ No newline at end of file diff --git a/src/core/specs-apply.ts b/src/core/specs-apply.ts index 9ce0f12f4..620e2fa05 100644 --- a/src/core/specs-apply.ts +++ b/src/core/specs-apply.ts @@ -7,7 +7,6 @@ import { promises as fs } from 'fs'; import path from 'path'; -import chalk from 'chalk'; import { extractRequirementsSection, parseDeltaSpec, @@ -44,6 +43,7 @@ export interface SpecsApplyOutput { renamed: number; }; noChanges: boolean; + ignoredRemovals: Array<{ specName: string; count: number }>; } // ----------------------------------------------------------------------------- @@ -101,7 +101,7 @@ export async function findSpecUpdates(changeDir: string, mainSpecsDir: string): export async function buildUpdatedSpec( update: SpecUpdate, changeName: string -): Promise<{ rebuilt: string; counts: { added: number; modified: number; removed: number; renamed: number } }> { +): Promise<{ rebuilt: string; counts: { added: number; modified: number; removed: number; renamed: number }; ignoredRemovals: number }> { // Read change spec content (delta-format expected) const changeContent = await fs.readFile(update.source, 'utf-8'); @@ -201,24 +201,18 @@ export async function buildUpdatedSpec( // Load or create base target content let targetContent: string; let isNewSpec = false; + let ignoredRemovals = 0; try { targetContent = await fs.readFile(update.target, 'utf-8'); } catch { // Target spec does not exist; MODIFIED and RENAMED are not allowed for new specs - // REMOVED will be ignored with a warning since there's nothing to remove + // REMOVED will be ignored since there's nothing to remove if (plan.modified.length > 0 || plan.renamed.length > 0) { throw new Error( `${specName}: target spec does not exist; only ADDED requirements are allowed for new specs. MODIFIED and RENAMED operations require an existing spec.` ); } - // Warn about REMOVED requirements being ignored for new specs - if (plan.removed.length > 0) { - console.log( - chalk.yellow( - `⚠️ Warning: ${specName} - ${plan.removed.length} REMOVED requirement(s) ignored for new spec (nothing to remove).` - ) - ); - } + ignoredRemovals = plan.removed.length; isNewSpec = true; targetContent = buildSpecSkeleton(specName, changeName); } @@ -258,7 +252,6 @@ export async function buildUpdatedSpec( for (const name of plan.removed) { const key = normalizeRequirementName(name); if (!nameToBlock.has(key)) { - // For new specs, REMOVED requirements are already warned about and ignored // For existing specs, missing requirements are an error if (!isNewSpec) { throw new Error(`${specName} REMOVED failed for header "### Requirement: ${name}" - not found`); @@ -294,8 +287,6 @@ export async function buildUpdatedSpec( nameToBlock.set(key, add); } - // Duplicates within resulting map are implicitly prevented by key uniqueness. - // Recompose requirements section preserving original ordering where possible const keptOrder: RequirementBlock[] = []; const seen = new Set(); @@ -333,6 +324,7 @@ export async function buildUpdatedSpec( removed: plan.removed.length, renamed: plan.renamed.length, }, + ignoredRemovals }; } @@ -341,20 +333,12 @@ export async function buildUpdatedSpec( */ export async function writeUpdatedSpec( update: SpecUpdate, - rebuilt: string, - counts: { added: number; modified: number; removed: number; renamed: number } + rebuilt: string ): Promise { // Create target directory if needed const targetDir = path.dirname(update.target); await fs.mkdir(targetDir, { recursive: true }); await fs.writeFile(update.target, rebuilt); - - const specName = path.basename(path.dirname(update.target)); - console.log(`Applying changes to openspec/specs/${specName}/spec.md:`); - if (counts.added) console.log(` + ${counts.added} added`); - if (counts.modified) console.log(` ~ ${counts.modified} modified`); - if (counts.removed) console.log(` - ${counts.removed} removed`); - if (counts.renamed) console.log(` β†’ ${counts.renamed} renamed`); } /** @@ -367,11 +351,6 @@ export function buildSpecSkeleton(specFolderName: string, changeName: string): s /** * Apply all delta specs from a change to main specs. - * - * @param projectRoot - The project root directory - * @param changeName - The name of the change to apply - * @param options - Options for the operation - * @returns Result of the operation with counts */ export async function applySpecs( projectRoot: string, @@ -379,7 +358,6 @@ export async function applySpecs( options: { dryRun?: boolean; skipValidation?: boolean; - silent?: boolean; } = {} ): Promise { const changeDir = path.join(projectRoot, 'openspec', 'changes', changeName); @@ -404,6 +382,7 @@ export async function applySpecs( capabilities: [], totals: { added: 0, modified: 0, removed: 0, renamed: 0 }, noChanges: true, + ignoredRemovals: [] }; } @@ -412,11 +391,12 @@ export async function applySpecs( update: SpecUpdate; rebuilt: string; counts: { added: number; modified: number; removed: number; renamed: number }; + ignoredRemovals: number; }> = []; for (const update of specUpdates) { const built = await buildUpdatedSpec(update, changeName); - prepared.push({ update, rebuilt: built.rebuilt, counts: built.counts }); + prepared.push({ update, rebuilt: built.rebuilt, counts: built.counts, ignoredRemovals: built.ignoredRemovals }); } // Validate rebuilt specs unless validation is skipped @@ -438,29 +418,13 @@ export async function applySpecs( // Build results const capabilities: ApplyResult[] = []; const totals = { added: 0, modified: 0, removed: 0, renamed: 0 }; + const ignoredRemovals: Array<{ specName: string; count: number }> = []; for (const p of prepared) { const capability = path.basename(path.dirname(p.update.target)); if (!options.dryRun) { - // Write the updated spec - const targetDir = path.dirname(p.update.target); - await fs.mkdir(targetDir, { recursive: true }); - await fs.writeFile(p.update.target, p.rebuilt); - - if (!options.silent) { - console.log(`Applying changes to openspec/specs/${capability}/spec.md:`); - if (p.counts.added) console.log(` + ${p.counts.added} added`); - if (p.counts.modified) console.log(` ~ ${p.counts.modified} modified`); - if (p.counts.removed) console.log(` - ${p.counts.removed} removed`); - if (p.counts.renamed) console.log(` β†’ ${p.counts.renamed} renamed`); - } - } else if (!options.silent) { - console.log(`Would apply changes to openspec/specs/${capability}/spec.md:`); - if (p.counts.added) console.log(` + ${p.counts.added} added`); - if (p.counts.modified) console.log(` ~ ${p.counts.modified} modified`); - if (p.counts.removed) console.log(` - ${p.counts.removed} removed`); - if (p.counts.renamed) console.log(` β†’ ${p.counts.renamed} renamed`); + await writeUpdatedSpec(p.update, p.rebuilt); } capabilities.push({ @@ -472,6 +436,9 @@ export async function applySpecs( totals.modified += p.counts.modified; totals.removed += p.counts.removed; totals.renamed += p.counts.renamed; + if (p.ignoredRemovals > 0) { + ignoredRemovals.push({ specName: capability, count: p.ignoredRemovals }); + } } return { @@ -479,5 +446,6 @@ export async function applySpecs( capabilities, totals, noChanges: false, + ignoredRemovals }; } diff --git a/src/core/templates/agents-template.ts b/src/core/templates/agents-template.ts index ad6dbdaef..261aea9df 100644 --- a/src/core/templates/agents-template.ts +++ b/src/core/templates/agents-template.ts @@ -140,6 +140,14 @@ openspec/ β”‚ └── archive/ # Completed changes \`\`\` +## Integration Modes + +### Command Line (CLI) +Standard OpenSpec commands like \`openspec list\`, \`openspec validate\`, etc. require the \`@fission-ai/openspec\` package to be installed. + +### Model Context Protocol (MCP) +If your environment supports MCP (e.g. Claude Code, Gemini CLI with OpenSpec extension), you can use native tools like \`openspec_list_changes\` instead of CLI commands. This enables a zero-install workflow. + ## Creating Change Proposals ### Decision Tree diff --git a/src/core/templates/prompts.ts b/src/core/templates/prompts.ts new file mode 100644 index 000000000..d1e829596 --- /dev/null +++ b/src/core/templates/prompts.ts @@ -0,0 +1,51 @@ +const BT = String.fromCharCode(96); + +export const BASE_GUARDRAILS = `**Guardrails** +- Favor straightforward, minimal implementations first and add complexity only when it is requested or clearly required. +- Keep changes tightly scoped to the requested outcome. +- Refer to ${BT}openspec/AGENTS.md${BT} (located inside the ${BT}openspec/${BT} directoryβ€”run ${BT}ls openspec${BT} or ${BT}openspec update${BT} if you don't see it) if you need additional OpenSpec conventions or clarifications. +- If the ${BT}openspec${BT} CLI is not installed or available in the shell, guide the user to install it globally via ${BT}npm install -g @fission-ai/openspec${BT} before proceeding.`; + +export const PROPOSAL_GUARDRAILS = `${BASE_GUARDRAILS} +- Identify any vague or ambiguous details and ask the necessary follow-up questions before editing files. +- Do not write any code during the proposal stage. Only create design documents (proposal.md, tasks.md, design.md, and spec deltas). Implementation happens in the apply stage after approval.`; + +export const PROPOSAL_STEPS = `**Steps** +1. Review ${BT}openspec/project.md${BT}, run ${BT}openspec list${BT} and ${BT}openspec list --specs${BT}, and inspect related code or docs (e.g., via ${BT}rg${BT}/${BT}ls${BT}) to ground the proposal in current behaviour; note any gaps that require clarification. +2. Choose a unique verb-led ${BT}change-id${BT} and scaffold ${BT}proposal.md${BT}, ${BT}tasks.md${BT}, and ${BT}design.md${BT} (when needed) under ${BT}openspec/changes//${BT}. +3. Map the change into concrete capabilities or requirements, breaking multi-scope efforts into distinct spec deltas with clear relationships and sequencing. +4. Capture architectural reasoning in ${BT}design.md${BT} when the solution spans multiple systems, introduces new patterns, or demands trade-off discussion before committing to specs. +5. Draft spec deltas in ${BT}changes//specs//spec.md${BT} (one folder per capability) using ${BT}## ADDED|MODIFIED|REMOVED Requirements${BT} with at least one ${BT}#### Scenario:${BT} per requirement and cross-reference related capabilities when relevant. +6. Draft ${BT}tasks.md${BT} as an ordered list of small, verifiable work items that deliver user-visible progress, include validation (tests, tooling), and highlight dependencies or parallelizable work. +7. Validate with ${BT}openspec validate --strict${BT} and resolve every issue before sharing the proposal.`; + +export const PROPOSAL_REFERENCES = `**Reference** +- Use ${BT}openspec show --json --deltas-only${BT} or ${BT}openspec show --type spec${BT} to inspect details when validation fails. +- Search existing requirements with ${BT}rg -n "Requirement:|Scenario:" openspec/specs${BT} before writing new ones. +- Explore the codebase with ${BT}rg ${BT}, ${BT}ls${BT}, or direct file reads so proposals align with current implementation realities.`; + +export const APPLY_STEPS = `**Steps** +Track these steps as TODOs and complete them one by one. +1. Read ${BT}changes//proposal.md${BT}, ${BT}design.md${BT} (if present), and ${BT}tasks.md${BT} to confirm scope and acceptance criteria. +2. Work through tasks sequentially, keeping edits minimal and focused on the requested change. +3. Confirm completion before updating statusesβ€”make sure every item in ${BT}tasks.md${BT} is finished. +4. Update the checklist after all work is done so each task is marked ${BT}- [x]${BT} and reflects reality. +5. Reference ${BT}openspec list${BT} or ${BT}openspec show ${BT} when additional context is required.`; + +export const APPLY_REFERENCES = `**Reference** +- Use ${BT}openspec show --json --deltas-only${BT} if you need additional context from the proposal while implementing.`; + +export const ARCHIVE_STEPS = `**Steps** +1. Determine the change ID to archive: + - If this prompt already includes a specific change ID (for example inside a ${BT}${BT} block populated by slash-command arguments), use that value after trimming whitespace. + - If the conversation references a change loosely (for example by title or summary), run ${BT}openspec list${BT} to surface likely IDs, share the relevant candidates, and confirm which one the user intends. + - Otherwise, review the conversation, run ${BT}openspec list${BT}, and ask the user which change to archive; wait for a confirmed change ID before proceeding. + - If you still cannot identify a single change ID, stop and tell the user you cannot archive anything yet. +2. Validate the change ID by running ${BT}openspec list${BT} (or ${BT}openspec show ${BT}) and stop if the change is missing, already archived, or otherwise not ready to archive. +3. Run ${BT}openspec archive --yes${BT} so the CLI moves the change and applies spec updates without prompts (use ${BT}--skip-specs${BT} only for tooling-only work). +4. Review the command output to confirm the target specs were updated and the change landed in ${BT}changes/archive/${BT}. +5. Validate with ${BT}openspec validate --strict${BT} and inspect with ${BT}openspec show ${BT} if anything looks off.`; + +export const ARCHIVE_REFERENCES = `**Reference** +- Use ${BT}openspec list${BT} to confirm change IDs before archiving. +- Inspect refreshed specs with ${BT}openspec list --specs${BT} and address any validation issues before handing off.`; \ No newline at end of file diff --git a/src/core/templates/slash-command-templates.ts b/src/core/templates/slash-command-templates.ts index be21328a1..86f247e07 100644 --- a/src/core/templates/slash-command-templates.ts +++ b/src/core/templates/slash-command-templates.ts @@ -1,58 +1,20 @@ -export type SlashCommandId = 'proposal' | 'apply' | 'archive'; - -const baseGuardrails = `**Guardrails** -- Favor straightforward, minimal implementations first and add complexity only when it is requested or clearly required. -- Keep changes tightly scoped to the requested outcome. -- Refer to \`openspec/AGENTS.md\` (located inside the \`openspec/\` directoryβ€”run \`ls openspec\` or \`openspec update\` if you don't see it) if you need additional OpenSpec conventions or clarifications.`; - -const proposalGuardrails = `${baseGuardrails}\n- Identify any vague or ambiguous details and ask the necessary follow-up questions before editing files. -- Do not write any code during the proposal stage. Only create design documents (proposal.md, tasks.md, design.md, and spec deltas). Implementation happens in the apply stage after approval.`; - -const proposalSteps = `**Steps** -1. Review \`openspec/project.md\`, run \`openspec list\` and \`openspec list --specs\`, and inspect related code or docs (e.g., via \`rg\`/\`ls\`) to ground the proposal in current behaviour; note any gaps that require clarification. -2. Choose a unique verb-led \`change-id\` and scaffold \`proposal.md\`, \`tasks.md\`, and \`design.md\` (when needed) under \`openspec/changes//\`. -3. Map the change into concrete capabilities or requirements, breaking multi-scope efforts into distinct spec deltas with clear relationships and sequencing. -4. Capture architectural reasoning in \`design.md\` when the solution spans multiple systems, introduces new patterns, or demands trade-off discussion before committing to specs. -5. Draft spec deltas in \`changes//specs//spec.md\` (one folder per capability) using \`## ADDED|MODIFIED|REMOVED Requirements\` with at least one \`#### Scenario:\` per requirement and cross-reference related capabilities when relevant. -6. Draft \`tasks.md\` as an ordered list of small, verifiable work items that deliver user-visible progress, include validation (tests, tooling), and highlight dependencies or parallelizable work. -7. Validate with \`openspec validate --strict\` and resolve every issue before sharing the proposal.`; - +import { + PROPOSAL_GUARDRAILS, + PROPOSAL_STEPS, + PROPOSAL_REFERENCES, + BASE_GUARDRAILS, + APPLY_STEPS, + APPLY_REFERENCES, + ARCHIVE_STEPS, + ARCHIVE_REFERENCES +} from './prompts.js'; -const proposalReferences = `**Reference** -- Use \`openspec show --json --deltas-only\` or \`openspec show --type spec\` to inspect details when validation fails. -- Search existing requirements with \`rg -n "Requirement:|Scenario:" openspec/specs\` before writing new ones. -- Explore the codebase with \`rg \`, \`ls\`, or direct file reads so proposals align with current implementation realities.`; - -const applySteps = `**Steps** -Track these steps as TODOs and complete them one by one. -1. Read \`changes//proposal.md\`, \`design.md\` (if present), and \`tasks.md\` to confirm scope and acceptance criteria. -2. Work through tasks sequentially, keeping edits minimal and focused on the requested change. -3. Confirm completion before updating statusesβ€”make sure every item in \`tasks.md\` is finished. -4. Update the checklist after all work is done so each task is marked \`- [x]\` and reflects reality. -5. Reference \`openspec list\` or \`openspec show \` when additional context is required.`; - -const applyReferences = `**Reference** -- Use \`openspec show --json --deltas-only\` if you need additional context from the proposal while implementing.`; - -const archiveSteps = `**Steps** -1. Determine the change ID to archive: - - If this prompt already includes a specific change ID (for example inside a \`\` block populated by slash-command arguments), use that value after trimming whitespace. - - If the conversation references a change loosely (for example by title or summary), run \`openspec list\` to surface likely IDs, share the relevant candidates, and confirm which one the user intends. - - Otherwise, review the conversation, run \`openspec list\`, and ask the user which change to archive; wait for a confirmed change ID before proceeding. - - If you still cannot identify a single change ID, stop and tell the user you cannot archive anything yet. -2. Validate the change ID by running \`openspec list\` (or \`openspec show \`) and stop if the change is missing, already archived, or otherwise not ready to archive. -3. Run \`openspec archive --yes\` so the CLI moves the change and applies spec updates without prompts (use \`--skip-specs\` only for tooling-only work). -4. Review the command output to confirm the target specs were updated and the change landed in \`changes/archive/\`. -5. Validate with \`openspec validate --strict\` and inspect with \`openspec show \` if anything looks off.`; - -const archiveReferences = `**Reference** -- Use \`openspec list\` to confirm change IDs before archiving. -- Inspect refreshed specs with \`openspec list --specs\` and address any validation issues before handing off.`; +export type SlashCommandId = 'proposal' | 'apply' | 'archive'; export const slashCommandBodies: Record = { - proposal: [proposalGuardrails, proposalSteps, proposalReferences].join('\n\n'), - apply: [baseGuardrails, applySteps, applyReferences].join('\n\n'), - archive: [baseGuardrails, archiveSteps, archiveReferences].join('\n\n') + proposal: [PROPOSAL_GUARDRAILS, PROPOSAL_STEPS, PROPOSAL_REFERENCES].join('\n\n'), + apply: [BASE_GUARDRAILS, APPLY_STEPS, APPLY_REFERENCES].join('\n\n'), + archive: [BASE_GUARDRAILS, ARCHIVE_STEPS, ARCHIVE_REFERENCES].join('\n\n') }; export function getSlashCommandBody(id: SlashCommandId): string { diff --git a/src/core/update-logic.ts b/src/core/update-logic.ts new file mode 100644 index 000000000..e4f9d43ed --- /dev/null +++ b/src/core/update-logic.ts @@ -0,0 +1,99 @@ +import path from 'path'; +import { FileSystemUtils } from '../utils/file-system.js'; +import { resolveOpenSpecDir } from './path-resolver.js'; +import { ToolRegistry } from './configurators/registry.js'; +import { SlashCommandRegistry } from './configurators/slash/registry.js'; +import { agentsTemplate } from './templates/agents-template.js'; + +export interface UpdateResult { + openspecPath: string; + updatedFiles: string[]; + createdFiles: string[]; + failedFiles: string[]; + updatedSlashFiles: string[]; + failedSlashTools: string[]; + errorDetails: Record; +} + +export async function runUpdate(projectPath: string): Promise { + const resolvedProjectPath = path.resolve(projectPath); + const openspecPath = await resolveOpenSpecDir(resolvedProjectPath); + + // 1. Check openspec directory exists + if (!await FileSystemUtils.directoryExists(openspecPath)) { + throw new Error(`No OpenSpec directory found. Run 'openspec init' first.`); + } + + // 2. Update AGENTS.md (full replacement) + const agentsPath = path.join(openspecPath, 'AGENTS.md'); + await FileSystemUtils.writeFile(agentsPath, agentsTemplate); + + // 3. Update existing AI tool configuration files only + const configurators = ToolRegistry.getAll(); + const slashConfigurators = SlashCommandRegistry.getAll(); + const updatedFiles: string[] = []; + const createdFiles: string[] = []; + const failedFiles: string[] = []; + const updatedSlashFiles: string[] = []; + const failedSlashTools: string[] = []; + const errorDetails: Record = {}; + + for (const configurator of configurators) { + const configFilePath = path.join( + resolvedProjectPath, + configurator.configFileName + ); + const fileExists = await FileSystemUtils.fileExists(configFilePath); + const shouldConfigure = + fileExists || configurator.configFileName === 'AGENTS.md'; + + if (!shouldConfigure) { + continue; + } + + try { + if (fileExists && !await FileSystemUtils.canWriteFile(configFilePath)) { + throw new Error( + `Insufficient permissions to modify ${configurator.configFileName}` + ); + } + + await configurator.configure(resolvedProjectPath, openspecPath); + updatedFiles.push(configurator.configFileName); + + if (!fileExists) { + createdFiles.push(configurator.configFileName); + } + } catch (error: any) { + failedFiles.push(configurator.configFileName); + errorDetails[configurator.configFileName] = error.message; + } + } + + for (const slashConfigurator of slashConfigurators) { + if (!slashConfigurator.isAvailable) { + continue; + } + + try { + const updated = await slashConfigurator.updateExisting( + resolvedProjectPath, + openspecPath + ); + updatedSlashFiles.push(...updated); + } catch (error: any) { + failedSlashTools.push(slashConfigurator.toolId); + errorDetails[`slash:${slashConfigurator.toolId}`] = error.message; + } + } + + return { + openspecPath, + updatedFiles, + createdFiles, + failedFiles, + updatedSlashFiles, + failedSlashTools, + errorDetails + }; +} diff --git a/src/core/update.ts b/src/core/update.ts deleted file mode 100644 index 41fd77208..000000000 --- a/src/core/update.ts +++ /dev/null @@ -1,129 +0,0 @@ -import path from 'path'; -import { FileSystemUtils } from '../utils/file-system.js'; -import { OPENSPEC_DIR_NAME } from './config.js'; -import { ToolRegistry } from './configurators/registry.js'; -import { SlashCommandRegistry } from './configurators/slash/registry.js'; -import { agentsTemplate } from './templates/agents-template.js'; - -export class UpdateCommand { - async execute(projectPath: string): Promise { - const resolvedProjectPath = path.resolve(projectPath); - const openspecDirName = OPENSPEC_DIR_NAME; - const openspecPath = path.join(resolvedProjectPath, openspecDirName); - - // 1. Check openspec directory exists - if (!await FileSystemUtils.directoryExists(openspecPath)) { - throw new Error(`No OpenSpec directory found. Run 'openspec init' first.`); - } - - // 2. Update AGENTS.md (full replacement) - const agentsPath = path.join(openspecPath, 'AGENTS.md'); - - await FileSystemUtils.writeFile(agentsPath, agentsTemplate); - - // 3. Update existing AI tool configuration files only - const configurators = ToolRegistry.getAll(); - const slashConfigurators = SlashCommandRegistry.getAll(); - const updatedFiles: string[] = []; - const createdFiles: string[] = []; - const failedFiles: string[] = []; - const updatedSlashFiles: string[] = []; - const failedSlashTools: string[] = []; - - for (const configurator of configurators) { - const configFilePath = path.join( - resolvedProjectPath, - configurator.configFileName - ); - const fileExists = await FileSystemUtils.fileExists(configFilePath); - const shouldConfigure = - fileExists || configurator.configFileName === 'AGENTS.md'; - - if (!shouldConfigure) { - continue; - } - - try { - if (fileExists && !await FileSystemUtils.canWriteFile(configFilePath)) { - throw new Error( - `Insufficient permissions to modify ${configurator.configFileName}` - ); - } - - await configurator.configure(resolvedProjectPath, openspecPath); - updatedFiles.push(configurator.configFileName); - - if (!fileExists) { - createdFiles.push(configurator.configFileName); - } - } catch (error) { - failedFiles.push(configurator.configFileName); - console.error( - `Failed to update ${configurator.configFileName}: ${ - error instanceof Error ? error.message : String(error) - }` - ); - } - } - - for (const slashConfigurator of slashConfigurators) { - if (!slashConfigurator.isAvailable) { - continue; - } - - try { - const updated = await slashConfigurator.updateExisting( - resolvedProjectPath, - openspecPath - ); - updatedSlashFiles.push(...updated); - } catch (error) { - failedSlashTools.push(slashConfigurator.toolId); - console.error( - `Failed to update slash commands for ${slashConfigurator.toolId}: ${ - error instanceof Error ? error.message : String(error) - }` - ); - } - } - - const summaryParts: string[] = []; - const instructionFiles: string[] = ['openspec/AGENTS.md']; - - if (updatedFiles.includes('AGENTS.md')) { - instructionFiles.push( - createdFiles.includes('AGENTS.md') ? 'AGENTS.md (created)' : 'AGENTS.md' - ); - } - - summaryParts.push( - `Updated OpenSpec instructions (${instructionFiles.join(', ')})` - ); - - const aiToolFiles = updatedFiles.filter((file) => file !== 'AGENTS.md'); - if (aiToolFiles.length > 0) { - summaryParts.push(`Updated AI tool files: ${aiToolFiles.join(', ')}`); - } - - if (updatedSlashFiles.length > 0) { - // Normalize to forward slashes for cross-platform log consistency - const normalized = updatedSlashFiles.map((p) => FileSystemUtils.toPosixPath(p)); - summaryParts.push(`Updated slash commands: ${normalized.join(', ')}`); - } - - const failedItems = [ - ...failedFiles, - ...failedSlashTools.map( - (toolId) => `slash command refresh (${toolId})` - ), - ]; - - if (failedItems.length > 0) { - summaryParts.push(`Failed to update: ${failedItems.join(', ')}`); - } - - console.log(summaryParts.join(' | ')); - - // No additional notes - } -} diff --git a/src/core/validation-logic.ts b/src/core/validation-logic.ts new file mode 100644 index 000000000..f0cfaaaf9 --- /dev/null +++ b/src/core/validation-logic.ts @@ -0,0 +1,148 @@ +import path from 'path'; +import { Validator } from './validation/validator.js'; +import { getActiveChangeIds, getSpecIds } from '../utils/item-discovery.js'; + +type ItemType = 'change' | 'spec'; + +export interface BulkItemResult { + id: string; + type: ItemType; + valid: boolean; + issues: { level: 'ERROR' | 'WARNING' | 'INFO'; path: string; message: string }[]; + durationMs: number; +} + +export interface BulkValidationSummary { + totals: { items: number; passed: number; failed: number }; + byType: { + change?: { items: number; passed: number; failed: number }; + spec?: { items: number; passed: number; failed: number }; + }; +} + +export interface BulkValidationResult { + items: BulkItemResult[]; + summary: BulkValidationSummary; + version: string; +} + +export async function runBulkValidation( + scope: { changes: boolean; specs: boolean }, + opts: { strict: boolean; concurrency?: string } +): Promise { + const [changeIds, specIds] = await Promise.all([ + scope.changes ? getActiveChangeIds() : Promise.resolve([]), + scope.specs ? getSpecIds() : Promise.resolve([]), + ]); + + const DEFAULT_CONCURRENCY = 6; + const concurrency = normalizeConcurrency(opts.concurrency) ?? normalizeConcurrency(process.env.OPENSPEC_CONCURRENCY) ?? DEFAULT_CONCURRENCY; + const validator = new Validator(opts.strict); + const queue: Array<() => Promise> = []; + + for (const id of changeIds) { + queue.push(async () => { + const start = Date.now(); + const changeDir = path.join(process.cwd(), 'openspec', 'changes', id); + const report = await validator.validateChangeDeltaSpecs(changeDir); + const durationMs = Date.now() - start; + return { id, type: 'change' as const, valid: report.valid, issues: report.issues, durationMs }; + }); + } + for (const id of specIds) { + queue.push(async () => { + const start = Date.now(); + const file = path.join(process.cwd(), 'openspec', 'specs', id, 'spec.md'); + const report = await validator.validateSpec(file); + const durationMs = Date.now() - start; + return { id, type: 'spec' as const, valid: report.valid, issues: report.issues, durationMs }; + }); + } + + const results: BulkItemResult[] = []; + let index = 0; + let running = 0; + let passed = 0; + let failed = 0; + + if (queue.length > 0) { + await new Promise((resolve) => { + const next = () => { + while (running < concurrency && index < queue.length) { + const currentIndex = index++; + const task = queue[currentIndex]; + running++; + task() + .then(res => { + results.push(res); + if (res.valid) passed++; else failed++; + }) + .catch((error: any) => { + const message = error?.message || 'Unknown error'; + const res: BulkItemResult = { + id: getPlannedId(currentIndex, changeIds, specIds) ?? 'unknown', + type: getPlannedType(currentIndex, changeIds, specIds) ?? 'change', + valid: false, + issues: [{ level: 'ERROR', path: 'file', message }], + durationMs: 0 + }; + results.push(res); + failed++; + }) + .finally(() => { + running--; + if (index >= queue.length && running === 0) resolve(); + else next(); + }); + } + }; + next(); + }); + } + + results.sort((a, b) => a.id.localeCompare(b.id)); + + const summary = { + totals: { items: results.length, passed, failed }, + byType: { + ...(scope.changes ? { change: summarizeType(results, 'change') } : {}), + ...(scope.specs ? { spec: summarizeType(results, 'spec') } : {}), + }, + }; + + return { + items: results, + summary, + version: '1.0' + }; +} + +function summarizeType(results: BulkItemResult[], type: ItemType) { + const filtered = results.filter(r => r.type === type); + const items = filtered.length; + const passed = filtered.filter(r => r.valid).length; + const failed = items - passed; + return { items, passed, failed }; +} + +function normalizeConcurrency(value?: string): number | undefined { + if (!value) return undefined; + const n = parseInt(value, 10); + if (Number.isNaN(n) || n <= 0) return undefined; + return n; +} + +function getPlannedId(index: number, changeIds: string[], specIds: string[]): string | undefined { + const totalChanges = changeIds.length; + if (index < totalChanges) return changeIds[index]; + const specIndex = index - totalChanges; + return specIds[specIndex]; +} + +function getPlannedType(index: number, changeIds: string[], specIds: string[]): ItemType | undefined { + const totalChanges = changeIds.length; + if (index < totalChanges) return 'change'; + const specIndex = index - totalChanges; + if (specIndex >= 0 && specIndex < specIds.length) return 'spec'; + return undefined; +} diff --git a/src/core/view-logic.ts b/src/core/view-logic.ts new file mode 100644 index 000000000..decd8d00d --- /dev/null +++ b/src/core/view-logic.ts @@ -0,0 +1,101 @@ +import * as fs from 'fs'; +import * as path from 'path'; +import { getTaskProgressForChange } from '../utils/task-progress.js'; +import { MarkdownParser } from './parsers/markdown-parser.js'; +import { resolveOpenSpecDir } from './path-resolver.js'; + +export interface DashboardData { + changes: { + draft: Array<{ name: string }>; + active: Array<{ name: string; progress: { total: number; completed: number } }>; + completed: Array<{ name: string }>; + }; + specs: Array<{ name: string; requirementCount: number }>; +} + +export async function getViewData(targetPath: string = '.'): Promise { + const openspecDir = await resolveOpenSpecDir(targetPath); + + if (!fs.existsSync(openspecDir)) { + throw new Error('No OpenSpec directory found'); + } + + const changesData = await getChangesData(openspecDir); + const specsData = await getSpecsData(openspecDir); + + return { + changes: changesData, + specs: specsData + }; +} + +async function getChangesData(openspecDir: string): Promise { + const changesDir = path.join(openspecDir, 'changes'); + + if (!fs.existsSync(changesDir)) { + return { draft: [], active: [], completed: [] }; + } + + const draft: Array<{ name: string }> = []; + const active: Array<{ name: string; progress: { total: number; completed: number } }> = []; + const completed: Array<{ name: string }> = []; + + const entries = fs.readdirSync(changesDir, { withFileTypes: true }); + + for (const entry of entries) { + if (entry.isDirectory() && entry.name !== 'archive') { + const progress = await getTaskProgressForChange(changesDir, entry.name); + + if (progress.total === 0) { + draft.push({ name: entry.name }); + } else if (progress.completed === progress.total) { + completed.push({ name: entry.name }); + } else { + active.push({ name: entry.name, progress }); + } + } + } + + draft.sort((a, b) => a.name.localeCompare(b.name)); + active.sort((a, b) => { + const percentageA = a.progress.total > 0 ? a.progress.completed / a.progress.total : 0; + const percentageB = b.progress.total > 0 ? b.progress.completed / b.progress.total : 0; + if (percentageA < percentageB) return -1; + if (percentageA > percentageB) return 1; + return a.name.localeCompare(b.name); + }); + completed.sort((a, b) => a.name.localeCompare(b.name)); + + return { draft, active, completed }; +} + +async function getSpecsData(openspecDir: string): Promise { + const specsDir = path.join(openspecDir, 'specs'); + + if (!fs.existsSync(specsDir)) { + return []; + } + + const specs: Array<{ name: string; requirementCount: number }> = []; + const entries = fs.readdirSync(specsDir, { withFileTypes: true }); + + for (const entry of entries) { + if (entry.isDirectory()) { + const specFile = path.join(specsDir, entry.name, 'spec.md'); + + if (fs.existsSync(specFile)) { + try { + const content = fs.readFileSync(specFile, 'utf-8'); + const parser = new MarkdownParser(content); + const spec = parser.parseSpec(entry.name); + const requirementCount = spec.requirements.length; + specs.push({ name: entry.name, requirementCount }); + } catch (error) { + specs.push({ name: entry.name, requirementCount: 0 }); + } + } + } + } + + return specs; +} diff --git a/src/core/view.ts b/src/core/view.ts deleted file mode 100644 index e67c35268..000000000 --- a/src/core/view.ts +++ /dev/null @@ -1,219 +0,0 @@ -import * as fs from 'fs'; -import * as path from 'path'; -import chalk from 'chalk'; -import { getTaskProgressForChange, formatTaskStatus } from '../utils/task-progress.js'; -import { MarkdownParser } from './parsers/markdown-parser.js'; - -export class ViewCommand { - async execute(targetPath: string = '.'): Promise { - const openspecDir = path.join(targetPath, 'openspec'); - - if (!fs.existsSync(openspecDir)) { - console.error(chalk.red('No openspec directory found')); - process.exit(1); - } - - console.log(chalk.bold('\nOpenSpec Dashboard\n')); - console.log('═'.repeat(60)); - - // Get changes and specs data - const changesData = await this.getChangesData(openspecDir); - const specsData = await this.getSpecsData(openspecDir); - - // Display summary metrics - this.displaySummary(changesData, specsData); - - // Display draft changes - if (changesData.draft.length > 0) { - console.log(chalk.bold.gray('\nDraft Changes')); - console.log('─'.repeat(60)); - changesData.draft.forEach((change) => { - console.log(` ${chalk.gray('β—‹')} ${change.name}`); - }); - } - - // Display active changes - if (changesData.active.length > 0) { - console.log(chalk.bold.cyan('\nActive Changes')); - console.log('─'.repeat(60)); - changesData.active.forEach((change) => { - const progressBar = this.createProgressBar(change.progress.completed, change.progress.total); - const percentage = - change.progress.total > 0 - ? Math.round((change.progress.completed / change.progress.total) * 100) - : 0; - - console.log( - ` ${chalk.yellow('β—‰')} ${chalk.bold(change.name.padEnd(30))} ${progressBar} ${chalk.dim(`${percentage}%`)}` - ); - }); - } - - // Display completed changes - if (changesData.completed.length > 0) { - console.log(chalk.bold.green('\nCompleted Changes')); - console.log('─'.repeat(60)); - changesData.completed.forEach((change) => { - console.log(` ${chalk.green('βœ“')} ${change.name}`); - }); - } - - // Display specifications - if (specsData.length > 0) { - console.log(chalk.bold.blue('\nSpecifications')); - console.log('─'.repeat(60)); - - // Sort specs by requirement count (descending) - specsData.sort((a, b) => b.requirementCount - a.requirementCount); - - specsData.forEach(spec => { - const reqLabel = spec.requirementCount === 1 ? 'requirement' : 'requirements'; - console.log( - ` ${chalk.blue('β–ͺ')} ${chalk.bold(spec.name.padEnd(30))} ${chalk.dim(`${spec.requirementCount} ${reqLabel}`)}` - ); - }); - } - - console.log('\n' + '═'.repeat(60)); - console.log(chalk.dim(`\nUse ${chalk.white('openspec list --changes')} or ${chalk.white('openspec list --specs')} for detailed views`)); - } - - private async getChangesData(openspecDir: string): Promise<{ - draft: Array<{ name: string }>; - active: Array<{ name: string; progress: { total: number; completed: number } }>; - completed: Array<{ name: string }>; - }> { - const changesDir = path.join(openspecDir, 'changes'); - - if (!fs.existsSync(changesDir)) { - return { draft: [], active: [], completed: [] }; - } - - const draft: Array<{ name: string }> = []; - const active: Array<{ name: string; progress: { total: number; completed: number } }> = []; - const completed: Array<{ name: string }> = []; - - const entries = fs.readdirSync(changesDir, { withFileTypes: true }); - - for (const entry of entries) { - if (entry.isDirectory() && entry.name !== 'archive') { - const progress = await getTaskProgressForChange(changesDir, entry.name); - - if (progress.total === 0) { - // No tasks defined yet - still in planning/draft phase - draft.push({ name: entry.name }); - } else if (progress.completed === progress.total) { - // All tasks complete - completed.push({ name: entry.name }); - } else { - // Has tasks but not all complete - active.push({ name: entry.name, progress }); - } - } - } - - // Sort all categories by name for deterministic ordering - draft.sort((a, b) => a.name.localeCompare(b.name)); - - // Sort active changes by completion percentage (ascending) and then by name - active.sort((a, b) => { - const percentageA = a.progress.total > 0 ? a.progress.completed / a.progress.total : 0; - const percentageB = b.progress.total > 0 ? b.progress.completed / b.progress.total : 0; - - if (percentageA < percentageB) return -1; - if (percentageA > percentageB) return 1; - return a.name.localeCompare(b.name); - }); - completed.sort((a, b) => a.name.localeCompare(b.name)); - - return { draft, active, completed }; - } - - private async getSpecsData(openspecDir: string): Promise> { - const specsDir = path.join(openspecDir, 'specs'); - - if (!fs.existsSync(specsDir)) { - return []; - } - - const specs: Array<{ name: string; requirementCount: number }> = []; - const entries = fs.readdirSync(specsDir, { withFileTypes: true }); - - for (const entry of entries) { - if (entry.isDirectory()) { - const specFile = path.join(specsDir, entry.name, 'spec.md'); - - if (fs.existsSync(specFile)) { - try { - const content = fs.readFileSync(specFile, 'utf-8'); - const parser = new MarkdownParser(content); - const spec = parser.parseSpec(entry.name); - const requirementCount = spec.requirements.length; - specs.push({ name: entry.name, requirementCount }); - } catch (error) { - // If spec cannot be parsed, include with 0 count - specs.push({ name: entry.name, requirementCount: 0 }); - } - } - } - } - - return specs; - } - - private displaySummary( - changesData: { draft: any[]; active: any[]; completed: any[] }, - specsData: any[] - ): void { - const totalChanges = - changesData.draft.length + changesData.active.length + changesData.completed.length; - const totalSpecs = specsData.length; - const totalRequirements = specsData.reduce((sum, spec) => sum + spec.requirementCount, 0); - - // Calculate total task progress - let totalTasks = 0; - let completedTasks = 0; - - changesData.active.forEach((change) => { - totalTasks += change.progress.total; - completedTasks += change.progress.completed; - }); - - changesData.completed.forEach(() => { - // Completed changes count as 100% done (we don't know exact task count) - // This is a simplification - }); - - console.log(chalk.bold('Summary:')); - console.log( - ` ${chalk.cyan('●')} Specifications: ${chalk.bold(totalSpecs)} specs, ${chalk.bold(totalRequirements)} requirements` - ); - if (changesData.draft.length > 0) { - console.log(` ${chalk.gray('●')} Draft Changes: ${chalk.bold(changesData.draft.length)}`); - } - console.log( - ` ${chalk.yellow('●')} Active Changes: ${chalk.bold(changesData.active.length)} in progress` - ); - console.log(` ${chalk.green('●')} Completed Changes: ${chalk.bold(changesData.completed.length)}`); - - if (totalTasks > 0) { - const overallProgress = Math.round((completedTasks / totalTasks) * 100); - console.log( - ` ${chalk.magenta('●')} Task Progress: ${chalk.bold(`${completedTasks}/${totalTasks}`)} (${overallProgress}% complete)` - ); - } - } - - private createProgressBar(completed: number, total: number, width: number = 20): string { - if (total === 0) return chalk.dim('─'.repeat(width)); - - const percentage = completed / total; - const filled = Math.round(percentage * width); - const empty = width - filled; - - const filledBar = chalk.green('β–ˆ'.repeat(filled)); - const emptyBar = chalk.dim('β–‘'.repeat(empty)); - - return `[${filledBar}${emptyBar}]`; - } -} \ No newline at end of file diff --git a/src/mcp/index.ts b/src/mcp/index.ts new file mode 100644 index 000000000..7e26d5197 --- /dev/null +++ b/src/mcp/index.ts @@ -0,0 +1,11 @@ +import { OpenSpecMCPServer } from './server.js'; + +async function main() { + const server = new OpenSpecMCPServer(); + await server.start(); +} + +main().catch((error) => { + console.error("Failed to start OpenSpec MCP Server:", error); + process.exit(1); +}); diff --git a/src/mcp/prompts.ts b/src/mcp/prompts.ts new file mode 100644 index 000000000..19dbadf67 --- /dev/null +++ b/src/mcp/prompts.ts @@ -0,0 +1,62 @@ +import { FastMCP } from 'fastmcp'; +import { + PROPOSAL_GUARDRAILS, PROPOSAL_STEPS, PROPOSAL_REFERENCES, + BASE_GUARDRAILS, APPLY_STEPS, APPLY_REFERENCES, + ARCHIVE_STEPS, ARCHIVE_REFERENCES +} from '../core/templates/prompts.js'; + +function toMcpInstructions(text: string): string { + return text + .replace(/openspec list --specs/g, 'openspec_list_specs') + .replace(/openspec list/g, 'openspec_list_changes') + .replace(/openspec show ([^ ]+) --type spec/g, 'openspec_show_spec(id: "$1")') + .replace(/openspec show ([^ ]+) --json --deltas-only/g, 'openspec_show_change(name: "$1")') + .replace(/openspec show ([^ ]+)/g, 'openspec_show_change(name: "$1")') + .replace(/openspec validate ([^ ]+) --strict/g, 'openspec_validate_change(name: "$1", strict: true)') + .replace(/openspec validate --strict/g, 'openspec_validate_change(strict: true)') + .replace(/openspec archive ([^ ]+) --yes/g, 'openspec_archive_change(name: "$1")'); +} + +export function registerPrompts(server: FastMCP) { + server.addPrompt({ + name: "openspec_proposal", + description: "Scaffold a new OpenSpec change proposal", + load: async () => ({ + messages: [{ + role: "user", + content: { + type: "text", + text: toMcpInstructions(`${PROPOSAL_GUARDRAILS}\n\n${PROPOSAL_STEPS}\n\n${PROPOSAL_REFERENCES}`) + } + }] + }) + }); + + server.addPrompt({ + name: "openspec_apply", + description: "Apply an OpenSpec change", + load: async () => ({ + messages: [{ + role: "user", + content: { + type: "text", + text: toMcpInstructions(`${BASE_GUARDRAILS}\n\n${APPLY_STEPS}\n\n${APPLY_REFERENCES}`) + } + }] + }) + }); + + server.addPrompt({ + name: "openspec_archive", + description: "Archive an OpenSpec change", + load: async () => ({ + messages: [{ + role: "user", + content: { + type: "text", + text: toMcpInstructions(`${BASE_GUARDRAILS}\n\n${ARCHIVE_STEPS}\n\n${ARCHIVE_REFERENCES}`) + } + }] + }) + }); +} diff --git a/src/mcp/resources.ts b/src/mcp/resources.ts new file mode 100644 index 000000000..d073ff24c --- /dev/null +++ b/src/mcp/resources.ts @@ -0,0 +1,54 @@ +import { FastMCP } from 'fastmcp'; +import { resolveOpenSpecDir } from '../core/path-resolver.js'; +import path from 'path'; +import fs from 'fs/promises'; + +export function registerResources(server: FastMCP) { + server.addResourceTemplate({ + uriTemplate: "openspec://changes/{name}/proposal", + name: "Change Proposal", + description: "The proposal.md file for a change", + arguments: [{ name: "name", description: "Name of the change", required: true }], + // @ts-expect-error - variables type mismatch in fastmcp + load: async (variables: any) => { + const openspecPath = await resolveOpenSpecDir(process.cwd()); + const filePath = path.join(openspecPath, 'changes', variables.name, 'proposal.md'); + const text = await fs.readFile(filePath, 'utf-8'); + return { + content: [{ uri: `openspec://changes/${variables.name}/proposal`, text }] + }; + } + }); + + server.addResourceTemplate({ + uriTemplate: "openspec://changes/{name}/tasks", + name: "Change Tasks", + description: "The tasks.md file for a change", + arguments: [{ name: "name", description: "Name of the change", required: true }], + // @ts-expect-error - variables type mismatch in fastmcp + load: async (variables: any) => { + const openspecPath = await resolveOpenSpecDir(process.cwd()); + const filePath = path.join(openspecPath, 'changes', variables.name, 'tasks.md'); + const text = await fs.readFile(filePath, 'utf-8'); + return { + content: [{ uri: `openspec://changes/${variables.name}/tasks`, text }] + }; + } + }); + + server.addResourceTemplate({ + uriTemplate: "openspec://specs/{id}", + name: "Specification", + description: "The spec.md file for a capability", + arguments: [{ name: "id", description: "ID of the spec", required: true }], + // @ts-expect-error - variables type mismatch in fastmcp + load: async (variables: any) => { + const openspecPath = await resolveOpenSpecDir(process.cwd()); + const filePath = path.join(openspecPath, 'specs', variables.id, 'spec.md'); + const text = await fs.readFile(filePath, 'utf-8'); + return { + content: [{ uri: `openspec://specs/${variables.id}`, text }] + }; + } + }); +} \ No newline at end of file diff --git a/src/mcp/server.ts b/src/mcp/server.ts new file mode 100644 index 000000000..e378d0864 --- /dev/null +++ b/src/mcp/server.ts @@ -0,0 +1,29 @@ +import { FastMCP } from 'fastmcp'; +import { registerTools } from './tools.js'; +import { registerResources } from './resources.js'; +import { registerPrompts } from './prompts.js'; +import { createRequire } from 'module'; + +const require = createRequire(import.meta.url); +const pkg = require('../../package.json'); + +export class OpenSpecMCPServer { + private server: FastMCP; + + constructor() { + this.server = new FastMCP({ + name: "OpenSpec", + version: pkg.version, + }); + } + + async start() { + registerTools(this.server); + registerResources(this.server); + registerPrompts(this.server); + + await this.server.start({ + transportType: 'stdio', + }); + } +} diff --git a/src/mcp/tools.ts b/src/mcp/tools.ts new file mode 100644 index 000000000..782262405 --- /dev/null +++ b/src/mcp/tools.ts @@ -0,0 +1,414 @@ +import { FastMCP } from 'fastmcp'; +import { z } from 'zod'; +import { listChanges, listSpecs } from '../core/list.js'; +import { Validator } from '../core/validation/validator.js'; +import { resolveOpenSpecDir } from '../core/path-resolver.js'; +import { runInit } from '../core/init-logic.js'; +import { runUpdate } from '../core/update-logic.js'; +import { runArchive } from '../core/archive-logic.js'; +import { runCreateChange, getChangeMarkdown, getChangeJson } from '../core/change-logic.js'; +import { getSpecMarkdown, getSpecJson } from '../core/spec-logic.js'; +import { getViewData } from '../core/view-logic.js'; +import { runBulkValidation } from '../core/validation-logic.js'; +import { getConfigValue, setConfigValue, getConfigList } from '../core/config-logic.js'; +import { getArtifactStatus, getArtifactInstructions, getApplyInstructions, getAvailableSchemas } from '../core/artifact-logic.js'; +import path from 'path'; + +export function registerTools(server: FastMCP) { + server.addTool({ + name: "openspec_init", + description: "Initialize OpenSpec in the current project.", + parameters: z.object({ + tools: z.array(z.string()).optional().describe("AI tools to configure"), + shouldMigrate: z.boolean().optional().default(true).describe("Whether to auto-migrate legacy openspec/ directory") + }), + execute: async (args) => { + try { + const result = await runInit(process.cwd(), args); + return { + content: [{ type: "text", text: JSON.stringify(result, null, 2) }] + }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error initializing: ${error.message}` }] + }; + } + } + }); + + server.addTool({ + name: "openspec_update", + description: "Update OpenSpec instruction files and slash commands.", + parameters: z.object({}), + execute: async () => { + try { + const result = await runUpdate(process.cwd()); + return { + content: [{ type: "text", text: JSON.stringify(result, null, 2) }] + }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error updating: ${error.message}` }] + }; + } + } + }); + + server.addTool({ + name: "openspec_view", + description: "Get dashboard data for specs and changes.", + parameters: z.object({}), + execute: async () => { + try { + const data = await getViewData(process.cwd()); + return { + content: [{ type: "text", text: JSON.stringify(data, null, 2) }] + }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error getting view data: ${error.message}` }] + }; + } + } + }); + + server.addTool({ + name: "openspec_create_change", + description: "Scaffold a new OpenSpec change directory.", + parameters: z.object({ + name: z.string().describe("Kebab-case name of the change"), + schema: z.string().optional().default("spec-driven").describe("Workflow schema to use") + }), + execute: async (args) => { + try { + const result = await runCreateChange(process.cwd(), args.name, { schema: args.schema }); + return { + content: [{ type: "text", text: JSON.stringify(result, null, 2) }] + }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error creating change: ${error.message}` }] + }; + } + } + }); + + server.addTool({ + name: "openspec_list_changes", + description: "List active OpenSpec changes.", + parameters: z.object({ + sort: z.enum(['recent', 'name']).optional().default('recent'), + }), + execute: async (args) => { + try { + const changes = await listChanges(process.cwd(), args.sort); + return { + content: [{ type: "text", text: JSON.stringify(changes, null, 2) }] + }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error listing changes: ${error.message}` }] + }; + } + } + }); + + server.addTool({ + name: "openspec_list_specs", + description: "List OpenSpec specifications.", + parameters: z.object({}), + execute: async () => { + try { + const specs = await listSpecs(process.cwd()); + return { + content: [{ type: "text", text: JSON.stringify(specs, null, 2) }] + }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error listing specs: ${error.message}` }] + }; + } + } + }); + + server.addTool({ + name: "openspec_show_change", + description: "Show details of a change proposal.", + parameters: z.object({ + name: z.string().describe("Name of the change"), + format: z.enum(['json', 'markdown']).optional().default('json') + }), + execute: async (args) => { + try { + if (args.format === 'markdown') { + const content = await getChangeMarkdown(process.cwd(), args.name); + return { content: [{ type: "text", text: content }] }; + } + const data = await getChangeJson(process.cwd(), args.name); + return { content: [{ type: "text", text: JSON.stringify(data, null, 2) }] }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error showing change: ${error.message}` }] + }; + } + } + }); + + server.addTool({ + name: "openspec_show_spec", + description: "Show details of a specification.", + parameters: z.object({ + id: z.string().describe("ID of the spec"), + format: z.enum(['json', 'markdown']).optional().default('json') + }), + execute: async (args) => { + try { + if (args.format === 'markdown') { + const content = await getSpecMarkdown(process.cwd(), args.id); + return { content: [{ type: "text", text: content }] }; + } + const data = await getSpecJson(process.cwd(), args.id); + return { content: [{ type: "text", text: JSON.stringify(data, null, 2) }] }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error showing spec: ${error.message}` }] + }; + } + } + }); + + server.addTool({ + name: "openspec_validate_change", + description: "Validate a change proposal.", + parameters: z.object({ + name: z.string().describe("Name of the change"), + strict: z.boolean().optional() + }), + execute: async (args) => { + try { + const openspecPath = await resolveOpenSpecDir(process.cwd()); + const changeDir = path.join(openspecPath, 'changes', args.name); + const validator = new Validator(args.strict); + const report = await validator.validateChangeDeltaSpecs(changeDir); + return { + content: [{ type: "text", text: JSON.stringify(report, null, 2) }] + }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error validating change: ${error.message}` }] + }; + } + } + }); + + server.addTool({ + name: "openspec_validate_all", + description: "Bulk validate changes and/or specs.", + parameters: z.object({ + changes: z.boolean().optional().default(true).describe("Validate changes"), + specs: z.boolean().optional().default(true).describe("Validate specs"), + strict: z.boolean().optional().default(false).describe("Enable strict validation"), + concurrency: z.string().optional().describe("Concurrency limit") + }), + execute: async (args) => { + try { + const result = await runBulkValidation({ changes: args.changes, specs: args.specs }, { strict: args.strict, concurrency: args.concurrency }); + return { + content: [{ type: "text", text: JSON.stringify(result, null, 2) }] + }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error running validation: ${error.message}` }] + }; + } + } + }); + + server.addTool({ + name: "openspec_archive_change", + description: "Archive a completed change and update main specs.", + parameters: z.object({ + name: z.string().describe("Name of the change"), + skipSpecs: z.boolean().optional().default(false), + noValidate: z.boolean().optional().default(false), + }), + execute: async (args) => { + try { + const result = await runArchive(args.name, { + skipSpecs: args.skipSpecs, + noValidate: args.noValidate + }); + return { + content: [{ type: "text", text: JSON.stringify(result, null, 2) }] + }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error archiving change: ${error.message}` }] + }; + } + } + }); + + // Config Tools + server.addTool({ + name: "openspec_config_get", + description: "Get a configuration value.", + parameters: z.object({ + key: z.string().describe("Configuration key (dot notation)") + }), + execute: async (args) => { + try { + const value = getConfigValue(args.key); + return { + content: [{ type: "text", text: JSON.stringify(value) }] + }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error getting config: ${error.message}` }] + }; + } + } + }); + + server.addTool({ + name: "openspec_config_set", + description: "Set a configuration value.", + parameters: z.object({ + key: z.string().describe("Configuration key (dot notation)"), + value: z.string().describe("Value to set"), + forceString: z.boolean().optional().describe("Force value to be stored as string"), + allowUnknown: z.boolean().optional().describe("Allow setting unknown keys") + }), + execute: async (args) => { + try { + const result = setConfigValue(args.key, args.value, { forceString: args.forceString, allowUnknown: args.allowUnknown }); + return { + content: [{ type: "text", text: JSON.stringify(result, null, 2) }] + }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error setting config: ${error.message}` }] + }; + } + } + }); + + server.addTool({ + name: "openspec_config_list", + description: "List all configuration values.", + parameters: z.object({}), + execute: async () => { + try { + const config = getConfigList(); + return { + content: [{ type: "text", text: JSON.stringify(config, null, 2) }] + }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error listing config: ${error.message}` }] + }; + } + } + }); + + // Artifact Workflow Tools + server.addTool({ + name: "openspec_artifact_status", + description: "Get status of artifacts in a change.", + parameters: z.object({ + changeName: z.string().describe("Name of the change"), + schemaName: z.string().optional().describe("Schema override") + }), + execute: async (args) => { + try { + const status = await getArtifactStatus(process.cwd(), args.changeName, args.schemaName); + return { + content: [{ type: "text", text: JSON.stringify(status, null, 2) }] + }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error getting status: ${error.message}` }] + }; + } + } + }); + + server.addTool({ + name: "openspec_artifact_instructions", + description: "Get instructions for creating an artifact.", + parameters: z.object({ + artifactId: z.string().describe("ID of the artifact"), + changeName: z.string().describe("Name of the change"), + schemaName: z.string().optional().describe("Schema override") + }), + execute: async (args) => { + try { + const instructions = await getArtifactInstructions(process.cwd(), args.artifactId, args.changeName, args.schemaName); + return { + content: [{ type: "text", text: JSON.stringify(instructions, null, 2) }] + }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error getting instructions: ${error.message}` }] + }; + } + } + }); + + server.addTool({ + name: "openspec_apply_instructions", + description: "Get instructions for applying tasks.", + parameters: z.object({ + changeName: z.string().describe("Name of the change"), + schemaName: z.string().optional().describe("Schema override") + }), + execute: async (args) => { + try { + const instructions = await getApplyInstructions(process.cwd(), args.changeName, args.schemaName); + return { + content: [{ type: "text", text: JSON.stringify(instructions, null, 2) }] + }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error getting apply instructions: ${error.message}` }] + }; + } + } + }); + + server.addTool({ + name: "openspec_list_schemas", + description: "List available workflow schemas.", + parameters: z.object({}), + execute: async () => { + try { + const schemas = getAvailableSchemas(); + return { + content: [{ type: "text", text: JSON.stringify(schemas, null, 2) }] + }; + } catch (error: any) { + return { + isError: true, + content: [{ type: "text", text: `Error listing schemas: ${error.message}` }] + }; + } + } + }); +} diff --git a/src/utils/file-system.ts b/src/utils/file-system.ts index f086a4973..08409e9e3 100644 --- a/src/utils/file-system.ts +++ b/src/utils/file-system.ts @@ -81,6 +81,10 @@ export class FileSystemUtils { await fs.mkdir(dirPath, { recursive: true }); } + static async rename(oldPath: string, newPath: string): Promise { + await fs.rename(oldPath, newPath); + } + static async fileExists(filePath: string): Promise { try { await fs.access(filePath); diff --git a/test/core/archive.test.ts b/test/core/archive.test.ts index 597dbfb2f..4c35d8e7a 100644 --- a/test/core/archive.test.ts +++ b/test/core/archive.test.ts @@ -1,807 +1,46 @@ import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'; -import { ArchiveCommand } from '../../src/core/archive.js'; -import { Validator } from '../../src/core/validation/validator.js'; +import { runArchive } from '../../src/core/archive-logic.js'; import { promises as fs } from 'fs'; import path from 'path'; import os from 'os'; -// Mock @inquirer/prompts -vi.mock('@inquirer/prompts', () => ({ - select: vi.fn(), - confirm: vi.fn() -})); - -describe('ArchiveCommand', () => { +describe('runArchive', () => { let tempDir: string; - let archiveCommand: ArchiveCommand; - const originalConsoleLog = console.log; beforeEach(async () => { - // Create temp directory tempDir = path.join(os.tmpdir(), `openspec-archive-test-${Date.now()}`); await fs.mkdir(tempDir, { recursive: true }); - // Change to temp directory - process.chdir(tempDir); - - // Create OpenSpec structure - const openspecDir = path.join(tempDir, 'openspec'); - await fs.mkdir(path.join(openspecDir, 'changes'), { recursive: true }); - await fs.mkdir(path.join(openspecDir, 'specs'), { recursive: true }); - await fs.mkdir(path.join(openspecDir, 'changes', 'archive'), { recursive: true }); - - // Suppress console.log during tests - console.log = vi.fn(); - - archiveCommand = new ArchiveCommand(); + // Create openspec structure + const openspecPath = path.join(tempDir, 'openspec'); + await fs.mkdir(path.join(openspecPath, 'changes'), { recursive: true }); + await fs.mkdir(path.join(openspecPath, 'specs'), { recursive: true }); }); afterEach(async () => { - // Restore console.log - console.log = originalConsoleLog; - - // Clear mocks - vi.clearAllMocks(); - - // Clean up temp directory - try { - await fs.rm(tempDir, { recursive: true, force: true }); - } catch (error) { - // Ignore cleanup errors - } - }); - - describe('execute', () => { - it('should archive a change successfully', async () => { - // Create a test change - const changeName = 'test-feature'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - await fs.mkdir(changeDir, { recursive: true }); - - // Create tasks.md with completed tasks - const tasksContent = '- [x] Task 1\n- [x] Task 2'; - await fs.writeFile(path.join(changeDir, 'tasks.md'), tasksContent); - - // Execute archive with --yes flag - await archiveCommand.execute(changeName, { yes: true }); - - // Check that change was moved to archive - const archiveDir = path.join(tempDir, 'openspec', 'changes', 'archive'); - const archives = await fs.readdir(archiveDir); - - expect(archives.length).toBe(1); - expect(archives[0]).toMatch(new RegExp(`\\d{4}-\\d{2}-\\d{2}-${changeName}`)); - - // Verify original change directory no longer exists - await expect(fs.access(changeDir)).rejects.toThrow(); - }); - - it('should warn about incomplete tasks', async () => { - const changeName = 'incomplete-feature'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - await fs.mkdir(changeDir, { recursive: true }); - - // Create tasks.md with incomplete tasks - const tasksContent = '- [x] Task 1\n- [ ] Task 2\n- [ ] Task 3'; - await fs.writeFile(path.join(changeDir, 'tasks.md'), tasksContent); - - // Execute archive with --yes flag - await archiveCommand.execute(changeName, { yes: true }); - - // Verify warning was logged - expect(console.log).toHaveBeenCalledWith( - expect.stringContaining('Warning: 2 incomplete task(s) found') - ); - }); - - it('should update specs when archiving (delta-based ADDED) and include change name in skeleton', async () => { - const changeName = 'spec-feature'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - const changeSpecDir = path.join(changeDir, 'specs', 'test-capability'); - await fs.mkdir(changeSpecDir, { recursive: true }); - - // Create delta-based change spec (ADDED requirement) - const specContent = `# Test Capability Spec - Changes - -## ADDED Requirements - -### Requirement: The system SHALL provide test capability - -#### Scenario: Basic test -Given a test condition -When an action occurs -Then expected result happens`; - await fs.writeFile(path.join(changeSpecDir, 'spec.md'), specContent); - - // Execute archive with --yes flag and skip validation for speed - await archiveCommand.execute(changeName, { yes: true, noValidate: true }); - - // Verify spec was created from skeleton and ADDED requirement applied - const mainSpecPath = path.join(tempDir, 'openspec', 'specs', 'test-capability', 'spec.md'); - const updatedContent = await fs.readFile(mainSpecPath, 'utf-8'); - expect(updatedContent).toContain('# test-capability Specification'); - expect(updatedContent).toContain('## Purpose'); - expect(updatedContent).toContain(`created by archiving change ${changeName}`); - expect(updatedContent).toContain('## Requirements'); - expect(updatedContent).toContain('### Requirement: The system SHALL provide test capability'); - expect(updatedContent).toContain('#### Scenario: Basic test'); - }); - - it('should allow REMOVED requirements when creating new spec file (issue #403)', async () => { - const changeName = 'new-spec-with-removed'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - const changeSpecDir = path.join(changeDir, 'specs', 'gift-card'); - await fs.mkdir(changeSpecDir, { recursive: true }); - - // Create delta spec with both ADDED and REMOVED requirements - // This simulates refactoring where old fields are removed and new ones are added - const specContent = `# Gift Card - Changes - -## ADDED Requirements - -### Requirement: Logo and Background Color -The system SHALL support logo and backgroundColor fields for gift cards. - -#### Scenario: Display gift card with logo -- **WHEN** a gift card is displayed -- **THEN** it shows the logo and backgroundColor - -## REMOVED Requirements - -### Requirement: Image Field -### Requirement: Thumbnail Field`; - await fs.writeFile(path.join(changeSpecDir, 'spec.md'), specContent); - - // Execute archive - should succeed with warning about REMOVED requirements - await archiveCommand.execute(changeName, { yes: true, noValidate: true }); - - // Verify warning was logged about REMOVED requirements being ignored - expect(console.log).toHaveBeenCalledWith( - expect.stringContaining('Warning: gift-card - 2 REMOVED requirement(s) ignored for new spec (nothing to remove).') - ); - - // Verify spec was created with only ADDED requirements - const mainSpecPath = path.join(tempDir, 'openspec', 'specs', 'gift-card', 'spec.md'); - const updatedContent = await fs.readFile(mainSpecPath, 'utf-8'); - expect(updatedContent).toContain('# gift-card Specification'); - expect(updatedContent).toContain('### Requirement: Logo and Background Color'); - expect(updatedContent).toContain('#### Scenario: Display gift card with logo'); - // REMOVED requirements should not be in the final spec - expect(updatedContent).not.toContain('### Requirement: Image Field'); - expect(updatedContent).not.toContain('### Requirement: Thumbnail Field'); - - // Verify change was archived successfully - const archiveDir = path.join(tempDir, 'openspec', 'changes', 'archive'); - const archives = await fs.readdir(archiveDir); - expect(archives.length).toBeGreaterThan(0); - expect(archives.some(a => a.includes(changeName))).toBe(true); - }); - - it('should still error on MODIFIED when creating new spec file', async () => { - const changeName = 'new-spec-with-modified'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - const changeSpecDir = path.join(changeDir, 'specs', 'new-capability'); - await fs.mkdir(changeSpecDir, { recursive: true }); - - // Create delta spec with MODIFIED requirement (should fail for new spec) - const specContent = `# New Capability - Changes - -## ADDED Requirements - -### Requirement: New Feature -New feature description. - -## MODIFIED Requirements - -### Requirement: Existing Feature -Modified content.`; - await fs.writeFile(path.join(changeSpecDir, 'spec.md'), specContent); - - // Execute archive - should abort with error message (not throw, but log and return) - await archiveCommand.execute(changeName, { yes: true, noValidate: true }); - - // Verify error message mentions MODIFIED not allowed for new specs - expect(console.log).toHaveBeenCalledWith( - expect.stringContaining('new-capability: target spec does not exist; only ADDED requirements are allowed for new specs. MODIFIED and RENAMED operations require an existing spec.') - ); - expect(console.log).toHaveBeenCalledWith('Aborted. No files were changed.'); - - // Verify spec was NOT created - const mainSpecPath = path.join(tempDir, 'openspec', 'specs', 'new-capability', 'spec.md'); - await expect(fs.access(mainSpecPath)).rejects.toThrow(); - - // Verify change was NOT archived - const archiveDir = path.join(tempDir, 'openspec', 'changes', 'archive'); - const archives = await fs.readdir(archiveDir); - expect(archives.some(a => a.includes(changeName))).toBe(false); - }); - - it('should still error on RENAMED when creating new spec file', async () => { - const changeName = 'new-spec-with-renamed'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - const changeSpecDir = path.join(changeDir, 'specs', 'another-capability'); - await fs.mkdir(changeSpecDir, { recursive: true }); - - // Create delta spec with RENAMED requirement (should fail for new spec) - const specContent = `# Another Capability - Changes - -## ADDED Requirements - -### Requirement: New Feature -New feature description. - -## RENAMED Requirements -- FROM: \`### Requirement: Old Name\` -- TO: \`### Requirement: New Name\``; - await fs.writeFile(path.join(changeSpecDir, 'spec.md'), specContent); - - // Execute archive - should abort with error message (not throw, but log and return) - await archiveCommand.execute(changeName, { yes: true, noValidate: true }); - - // Verify error message mentions RENAMED not allowed for new specs - expect(console.log).toHaveBeenCalledWith( - expect.stringContaining('another-capability: target spec does not exist; only ADDED requirements are allowed for new specs. MODIFIED and RENAMED operations require an existing spec.') - ); - expect(console.log).toHaveBeenCalledWith('Aborted. No files were changed.'); - - // Verify spec was NOT created - const mainSpecPath = path.join(tempDir, 'openspec', 'specs', 'another-capability', 'spec.md'); - await expect(fs.access(mainSpecPath)).rejects.toThrow(); - - // Verify change was NOT archived - const archiveDir = path.join(tempDir, 'openspec', 'changes', 'archive'); - const archives = await fs.readdir(archiveDir); - expect(archives.some(a => a.includes(changeName))).toBe(false); - }); - - it('should throw error if change does not exist', async () => { - await expect( - archiveCommand.execute('non-existent-change', { yes: true }) - ).rejects.toThrow("Change 'non-existent-change' not found."); - }); - - it('should throw error if archive already exists', async () => { - const changeName = 'duplicate-feature'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - await fs.mkdir(changeDir, { recursive: true }); - - // Create existing archive with same date - const date = new Date().toISOString().split('T')[0]; - const archivePath = path.join(tempDir, 'openspec', 'changes', 'archive', `${date}-${changeName}`); - await fs.mkdir(archivePath, { recursive: true }); - - // Try to archive - await expect( - archiveCommand.execute(changeName, { yes: true }) - ).rejects.toThrow(`Archive '${date}-${changeName}' already exists.`); - }); - - it('should handle changes without tasks.md', async () => { - const changeName = 'no-tasks-feature'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - await fs.mkdir(changeDir, { recursive: true }); - - // Execute archive without tasks.md - await archiveCommand.execute(changeName, { yes: true }); - - // Should complete without warnings - expect(console.log).not.toHaveBeenCalledWith( - expect.stringContaining('incomplete task(s)') - ); - - // Verify change was archived - const archiveDir = path.join(tempDir, 'openspec', 'changes', 'archive'); - const archives = await fs.readdir(archiveDir); - expect(archives.length).toBe(1); - }); - - it('should handle changes without specs', async () => { - const changeName = 'no-specs-feature'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - await fs.mkdir(changeDir, { recursive: true }); - - // Execute archive without specs - await archiveCommand.execute(changeName, { yes: true }); - - // Should complete without spec updates - expect(console.log).not.toHaveBeenCalledWith( - expect.stringContaining('Specs to update') - ); - - // Verify change was archived - const archiveDir = path.join(tempDir, 'openspec', 'changes', 'archive'); - const archives = await fs.readdir(archiveDir); - expect(archives.length).toBe(1); - }); - - it('should skip spec updates when --skip-specs flag is used', async () => { - const changeName = 'skip-specs-feature'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - const changeSpecDir = path.join(changeDir, 'specs', 'test-capability'); - await fs.mkdir(changeSpecDir, { recursive: true }); - - // Create spec in change - const specContent = '# Test Capability Spec\n\nTest content'; - await fs.writeFile(path.join(changeSpecDir, 'spec.md'), specContent); - - // Execute archive with --skip-specs flag and noValidate to skip validation - await archiveCommand.execute(changeName, { yes: true, skipSpecs: true, noValidate: true }); - - // Verify skip message was logged - expect(console.log).toHaveBeenCalledWith( - 'Skipping spec updates (--skip-specs flag provided).' - ); - - // Verify spec was NOT copied to main specs - const mainSpecPath = path.join(tempDir, 'openspec', 'specs', 'test-capability', 'spec.md'); - await expect(fs.access(mainSpecPath)).rejects.toThrow(); - - // Verify change was still archived - const archiveDir = path.join(tempDir, 'openspec', 'changes', 'archive'); - const archives = await fs.readdir(archiveDir); - expect(archives.length).toBe(1); - expect(archives[0]).toMatch(new RegExp(`\\d{4}-\\d{2}-\\d{2}-${changeName}`)); - }); - - it('should skip validation when commander sets validate to false (--no-validate)', async () => { - const changeName = 'skip-validation-flag'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - const changeSpecDir = path.join(changeDir, 'specs', 'unstable-capability'); - await fs.mkdir(changeSpecDir, { recursive: true }); - - const deltaSpec = `# Unstable Capability - -## ADDED Requirements - -### Requirement: Logging Feature -**ID**: REQ-LOG-001 - -The system will log all events. - -#### Scenario: Event recorded -- **WHEN** an event occurs -- **THEN** it is captured`; - await fs.writeFile(path.join(changeSpecDir, 'spec.md'), deltaSpec); - await fs.writeFile(path.join(changeDir, 'tasks.md'), '- [x] Task 1\n'); - - const deltaSpy = vi.spyOn(Validator.prototype, 'validateChangeDeltaSpecs'); - const specContentSpy = vi.spyOn(Validator.prototype, 'validateSpecContent'); - - try { - await archiveCommand.execute(changeName, { yes: true, skipSpecs: true, validate: false }); - - expect(deltaSpy).not.toHaveBeenCalled(); - expect(specContentSpy).not.toHaveBeenCalled(); - - const archiveDir = path.join(tempDir, 'openspec', 'changes', 'archive'); - const archives = await fs.readdir(archiveDir); - expect(archives.length).toBe(1); - expect(archives[0]).toMatch(new RegExp(`\\d{4}-\\d{2}-\\d{2}-${changeName}`)); - } finally { - deltaSpy.mockRestore(); - specContentSpy.mockRestore(); - } - }); - - it('should proceed with archive when user declines spec updates', async () => { - const { confirm } = await import('@inquirer/prompts'); - const mockConfirm = confirm as unknown as ReturnType; - - const changeName = 'decline-specs-feature'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - const changeSpecDir = path.join(changeDir, 'specs', 'test-capability'); - await fs.mkdir(changeSpecDir, { recursive: true }); - - // Create valid spec in change - const specContent = `# Test Capability Spec - -## Purpose -This is a test capability specification. - -## Requirements - -### The system SHALL provide test capability - -#### Scenario: Basic test -Given a test condition -When an action occurs -Then expected result happens`; - await fs.writeFile(path.join(changeSpecDir, 'spec.md'), specContent); - - // Mock confirm to return false (decline spec updates) - mockConfirm.mockResolvedValueOnce(false); - - // Execute archive without --yes flag - await archiveCommand.execute(changeName); - - // Verify user was prompted about specs - expect(mockConfirm).toHaveBeenCalledWith({ - message: 'Proceed with spec updates?', - default: true - }); - - // Verify skip message was logged - expect(console.log).toHaveBeenCalledWith( - 'Skipping spec updates. Proceeding with archive.' - ); - - // Verify spec was NOT copied to main specs - const mainSpecPath = path.join(tempDir, 'openspec', 'specs', 'test-capability', 'spec.md'); - await expect(fs.access(mainSpecPath)).rejects.toThrow(); - - // Verify change was still archived - const archiveDir = path.join(tempDir, 'openspec', 'changes', 'archive'); - const archives = await fs.readdir(archiveDir); - expect(archives.length).toBe(1); - expect(archives[0]).toMatch(new RegExp(`\\d{4}-\\d{2}-\\d{2}-${changeName}`)); - }); - - it('should support header trim-only normalization for matching', async () => { - const changeName = 'normalize-headers'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - const changeSpecDir = path.join(changeDir, 'specs', 'alpha'); - await fs.mkdir(changeSpecDir, { recursive: true }); - - // Create existing main spec with a requirement (no extra trailing spaces) - const mainSpecDir = path.join(tempDir, 'openspec', 'specs', 'alpha'); - await fs.mkdir(mainSpecDir, { recursive: true }); - const mainContent = `# alpha Specification - -## Purpose -Alpha purpose. - -## Requirements - -### Requirement: Important Rule -Some details.`; - await fs.writeFile(path.join(mainSpecDir, 'spec.md'), mainContent); - - // Change attempts to modify the same requirement but with trailing spaces after the name - const deltaContent = `# Alpha - Changes - -## MODIFIED Requirements - -### Requirement: Important Rule -Updated details.`; - await fs.writeFile(path.join(changeSpecDir, 'spec.md'), deltaContent); - - await archiveCommand.execute(changeName, { yes: true, noValidate: true }); - - const updated = await fs.readFile(path.join(mainSpecDir, 'spec.md'), 'utf-8'); - expect(updated).toContain('### Requirement: Important Rule'); - expect(updated).toContain('Updated details.'); - }); - - it('should apply operations in order: RENAMED β†’ REMOVED β†’ MODIFIED β†’ ADDED', async () => { - const changeName = 'apply-order'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - const changeSpecDir = path.join(changeDir, 'specs', 'beta'); - await fs.mkdir(changeSpecDir, { recursive: true }); - - // Main spec with two requirements A and B - const mainSpecDir = path.join(tempDir, 'openspec', 'specs', 'beta'); - await fs.mkdir(mainSpecDir, { recursive: true }); - const mainContent = `# beta Specification - -## Purpose -Beta purpose. - -## Requirements - -### Requirement: A -content A - -### Requirement: B -content B`; - await fs.writeFile(path.join(mainSpecDir, 'spec.md'), mainContent); - - // Rename A->C, Remove B, Modify C, Add D - const deltaContent = `# Beta - Changes - -## RENAMED Requirements -- FROM: \`### Requirement: A\` -- TO: \`### Requirement: C\` - -## REMOVED Requirements -### Requirement: B - -## MODIFIED Requirements -### Requirement: C -updated C - -## ADDED Requirements -### Requirement: D -content D`; - await fs.writeFile(path.join(changeSpecDir, 'spec.md'), deltaContent); - - await archiveCommand.execute(changeName, { yes: true, noValidate: true }); - - const updated = await fs.readFile(path.join(mainSpecDir, 'spec.md'), 'utf-8'); - expect(updated).toContain('### Requirement: C'); - expect(updated).toContain('updated C'); - expect(updated).toContain('### Requirement: D'); - expect(updated).not.toContain('### Requirement: A'); - expect(updated).not.toContain('### Requirement: B'); - }); - - it('should abort with error when MODIFIED/REMOVED reference non-existent requirements', async () => { - const changeName = 'validate-missing'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - const changeSpecDir = path.join(changeDir, 'specs', 'gamma'); - await fs.mkdir(changeSpecDir, { recursive: true }); - - // Main spec with no requirements - const mainSpecDir = path.join(tempDir, 'openspec', 'specs', 'gamma'); - await fs.mkdir(mainSpecDir, { recursive: true }); - const mainContent = `# gamma Specification - -## Purpose -Gamma purpose. - -## Requirements`; - await fs.writeFile(path.join(mainSpecDir, 'spec.md'), mainContent); - - // Delta tries to modify and remove non-existent requirement - const deltaContent = `# Gamma - Changes - -## MODIFIED Requirements -### Requirement: Missing -new text - -## REMOVED Requirements -### Requirement: Another Missing`; - await fs.writeFile(path.join(changeSpecDir, 'spec.md'), deltaContent); - - await archiveCommand.execute(changeName, { yes: true, noValidate: true }); - - // Should not change the main spec and should not archive the change dir - const still = await fs.readFile(path.join(mainSpecDir, 'spec.md'), 'utf-8'); - expect(still).toBe(mainContent); - // Change dir should still exist since operation aborted - await expect(fs.access(changeDir)).resolves.not.toThrow(); - }); - - it('should require MODIFIED to reference the NEW header when a rename exists (error format)', async () => { - const changeName = 'rename-modify-new-header'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - const changeSpecDir = path.join(changeDir, 'specs', 'delta'); - await fs.mkdir(changeSpecDir, { recursive: true }); - - // Main spec with Old - const mainSpecDir = path.join(tempDir, 'openspec', 'specs', 'delta'); - await fs.mkdir(mainSpecDir, { recursive: true }); - const mainContent = `# delta Specification - -## Purpose -Delta purpose. - -## Requirements - -### Requirement: Old -old body`; - await fs.writeFile(path.join(mainSpecDir, 'spec.md'), mainContent); - - // Delta: rename Old->New, but MODIFIED references Old (should abort) - const badDelta = `# Delta - Changes - -## RENAMED Requirements -- FROM: \`### Requirement: Old\` -- TO: \`### Requirement: New\` - -## MODIFIED Requirements -### Requirement: Old -new body`; - await fs.writeFile(path.join(changeSpecDir, 'spec.md'), badDelta); - - await archiveCommand.execute(changeName, { yes: true, noValidate: true }); - const unchanged = await fs.readFile(path.join(mainSpecDir, 'spec.md'), 'utf-8'); - expect(unchanged).toBe(mainContent); - // Assert error message format and abort notice - expect(console.log).toHaveBeenCalledWith( - expect.stringContaining('delta validation failed') - ); - expect(console.log).toHaveBeenCalledWith( - expect.stringContaining('Aborted. No files were changed.') - ); - - // Fix MODIFIED to reference New (should succeed) - const goodDelta = `# Delta - Changes - -## RENAMED Requirements -- FROM: \`### Requirement: Old\` -- TO: \`### Requirement: New\` - -## MODIFIED Requirements -### Requirement: New -new body`; - await fs.writeFile(path.join(changeSpecDir, 'spec.md'), goodDelta); - - await archiveCommand.execute(changeName, { yes: true, noValidate: true }); - const updated = await fs.readFile(path.join(mainSpecDir, 'spec.md'), 'utf-8'); - expect(updated).toContain('### Requirement: New'); - expect(updated).toContain('new body'); - expect(updated).not.toContain('### Requirement: Old'); - }); - - it('should process multiple specs atomically (any failure aborts all)', async () => { - const changeName = 'multi-spec-atomic'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - const spec1Dir = path.join(changeDir, 'specs', 'epsilon'); - const spec2Dir = path.join(changeDir, 'specs', 'zeta'); - await fs.mkdir(spec1Dir, { recursive: true }); - await fs.mkdir(spec2Dir, { recursive: true }); - - // Existing main specs - const epsilonMain = path.join(tempDir, 'openspec', 'specs', 'epsilon', 'spec.md'); - await fs.mkdir(path.dirname(epsilonMain), { recursive: true }); - await fs.writeFile(epsilonMain, `# epsilon Specification - -## Purpose -Epsilon purpose. - -## Requirements - -### Requirement: E1 -e1`); - - const zetaMain = path.join(tempDir, 'openspec', 'specs', 'zeta', 'spec.md'); - await fs.mkdir(path.dirname(zetaMain), { recursive: true }); - await fs.writeFile(zetaMain, `# zeta Specification - -## Purpose -Zeta purpose. - -## Requirements - -### Requirement: Z1 -z1`); - - // Delta: epsilon is valid modification; zeta tries to remove non-existent -> should abort both - await fs.writeFile(path.join(spec1Dir, 'spec.md'), `# Epsilon - Changes - -## MODIFIED Requirements -### Requirement: E1 -E1 updated`); - - await fs.writeFile(path.join(spec2Dir, 'spec.md'), `# Zeta - Changes - -## REMOVED Requirements -### Requirement: Missing`); - - await archiveCommand.execute(changeName, { yes: true, noValidate: true }); - - const e1 = await fs.readFile(epsilonMain, 'utf-8'); - const z1 = await fs.readFile(zetaMain, 'utf-8'); - expect(e1).toContain('### Requirement: E1'); - expect(e1).not.toContain('E1 updated'); - expect(z1).toContain('### Requirement: Z1'); - // changeDir should still exist - await expect(fs.access(changeDir)).resolves.not.toThrow(); - }); - - it('should display aggregated totals across multiple specs', async () => { - const changeName = 'multi-spec-totals'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - const spec1Dir = path.join(changeDir, 'specs', 'omega'); - const spec2Dir = path.join(changeDir, 'specs', 'psi'); - await fs.mkdir(spec1Dir, { recursive: true }); - await fs.mkdir(spec2Dir, { recursive: true }); - - // Existing main specs - const omegaMain = path.join(tempDir, 'openspec', 'specs', 'omega', 'spec.md'); - await fs.mkdir(path.dirname(omegaMain), { recursive: true }); - await fs.writeFile(omegaMain, `# omega Specification\n\n## Purpose\nOmega purpose.\n\n## Requirements\n\n### Requirement: O1\no1`); - - const psiMain = path.join(tempDir, 'openspec', 'specs', 'psi', 'spec.md'); - await fs.mkdir(path.dirname(psiMain), { recursive: true }); - await fs.writeFile(psiMain, `# psi Specification\n\n## Purpose\nPsi purpose.\n\n## Requirements\n\n### Requirement: P1\np1`); - - // Deltas: omega add one, psi rename and modify -> totals: +1, ~1, -0, β†’1 - await fs.writeFile(path.join(spec1Dir, 'spec.md'), `# Omega - Changes\n\n## ADDED Requirements\n\n### Requirement: O2\nnew`); - await fs.writeFile(path.join(spec2Dir, 'spec.md'), `# Psi - Changes\n\n## RENAMED Requirements\n- FROM: \`### Requirement: P1\`\n- TO: \`### Requirement: P2\`\n\n## MODIFIED Requirements\n### Requirement: P2\nupdated`); - - await archiveCommand.execute(changeName, { yes: true, noValidate: true }); - - // Verify aggregated totals line was printed - expect(console.log).toHaveBeenCalledWith( - expect.stringContaining('Totals: + 1, ~ 1, - 0, β†’ 1') - ); - }); + await fs.rm(tempDir, { recursive: true, force: true }); }); - describe('error handling', () => { - it('should throw error when openspec directory does not exist', async () => { - // Remove openspec directory - await fs.rm(path.join(tempDir, 'openspec'), { recursive: true }); - - await expect( - archiveCommand.execute('any-change', { yes: true }) - ).rejects.toThrow("No OpenSpec changes directory found. Run 'openspec init' first."); - }); + it('should fail if change does not exist', async () => { + process.chdir(tempDir); + await expect(runArchive('nonexistent')).rejects.toThrow(/Change 'nonexistent' not found/); }); - describe('interactive mode', () => { - it('should use select prompt for change selection', async () => { - const { select } = await import('@inquirer/prompts'); - const mockSelect = select as unknown as ReturnType; - - // Create test changes - const change1 = 'feature-a'; - const change2 = 'feature-b'; - await fs.mkdir(path.join(tempDir, 'openspec', 'changes', change1), { recursive: true }); - await fs.mkdir(path.join(tempDir, 'openspec', 'changes', change2), { recursive: true }); - - // Mock select to return first change - mockSelect.mockResolvedValueOnce(change1); - - // Execute without change name - await archiveCommand.execute(undefined, { yes: true }); - - // Verify select was called with correct options (values matter, names may include progress) - expect(mockSelect).toHaveBeenCalledWith(expect.objectContaining({ - message: 'Select a change to archive', - choices: expect.arrayContaining([ - expect.objectContaining({ value: change1 }), - expect.objectContaining({ value: change2 }) - ]) - })); - - // Verify the selected change was archived - const archiveDir = path.join(tempDir, 'openspec', 'changes', 'archive'); - const archives = await fs.readdir(archiveDir); - expect(archives[0]).toContain(change1); - }); + it('should archive a completed change', async () => { + const changesDir = path.join(tempDir, 'openspec', 'changes'); + const changePath = path.join(changesDir, 'my-change'); + await fs.mkdir(changePath, { recursive: true }); + await fs.writeFile(path.join(changePath, 'tasks.md'), '- [x] task 1'); + await fs.writeFile(path.join(changePath, 'proposal.md'), '# Proposal'); - it('should use confirm prompt for task warnings', async () => { - const { confirm } = await import('@inquirer/prompts'); - const mockConfirm = confirm as unknown as ReturnType; - - const changeName = 'incomplete-interactive'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - await fs.mkdir(changeDir, { recursive: true }); - - // Create tasks.md with incomplete tasks - const tasksContent = '- [ ] Task 1'; - await fs.writeFile(path.join(changeDir, 'tasks.md'), tasksContent); - - // Mock confirm to return true (proceed) - mockConfirm.mockResolvedValueOnce(true); - - // Execute without --yes flag - await archiveCommand.execute(changeName); - - // Verify confirm was called - expect(mockConfirm).toHaveBeenCalledWith({ - message: 'Warning: 1 incomplete task(s) found. Continue?', - default: false - }); - }); + process.chdir(tempDir); + const result = await runArchive('my-change', { noValidate: true }); - it('should cancel when user declines task warning', async () => { - const { confirm } = await import('@inquirer/prompts'); - const mockConfirm = confirm as unknown as ReturnType; - - const changeName = 'cancel-test'; - const changeDir = path.join(tempDir, 'openspec', 'changes', changeName); - await fs.mkdir(changeDir, { recursive: true }); - - // Create tasks.md with incomplete tasks - const tasksContent = '- [ ] Task 1'; - await fs.writeFile(path.join(changeDir, 'tasks.md'), tasksContent); - - // Mock confirm to return false (cancel) for validation skip - mockConfirm.mockResolvedValueOnce(false); - // Mock another false for task warning - mockConfirm.mockResolvedValueOnce(false); - - // Execute without --yes flag but skip validation to test task warning - await archiveCommand.execute(changeName, { noValidate: true }); - - // Verify archive was cancelled - expect(console.log).toHaveBeenCalledWith('Archive cancelled.'); - - // Verify change was not archived - await expect(fs.access(changeDir)).resolves.not.toThrow(); - }); + expect(result.changeName).toBe('my-change'); + expect(result.archiveName).toMatch(/\d{4}-\d{2}-\d{2}-my-change/); + + const archivePath = path.join(changesDir, 'archive', result.archiveName); + expect(await fs.stat(archivePath)).toBeDefined(); + expect(await fs.stat(path.join(archivePath, 'tasks.md'))).toBeDefined(); }); -}); +}); \ No newline at end of file diff --git a/test/core/change-logic.test.ts b/test/core/change-logic.test.ts new file mode 100644 index 000000000..f3407c6fc --- /dev/null +++ b/test/core/change-logic.test.ts @@ -0,0 +1,91 @@ +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import path from 'path'; +import { promises as fs } from 'fs'; +import os from 'os'; +import { + getActiveChanges, + getChangeMarkdown, + getChangeJson, + getChangeDetails, + validateChange, + runCreateChange +} from '../../src/core/change-logic.js'; + +describe('Core Change Logic', () => { + let tempRoot: string; + let originalCwd: string; + const changeName = 'demo-change'; + + beforeAll(async () => { + originalCwd = process.cwd(); + tempRoot = path.join(os.tmpdir(), `openspec-core-change-logic-${Date.now()}`); + // Simulate project structure + await fs.mkdir(path.join(tempRoot, 'openspec', 'changes'), { recursive: true }); + // Write a dummy project config to ensure resolveOpenSpecDir works if it checks for project root markers + await fs.writeFile(path.join(tempRoot, 'package.json'), '{}', 'utf-8'); + process.chdir(tempRoot); + + // Create a demo change manually to test retrieval + const changeDir = path.join(tempRoot, 'openspec', 'changes', changeName); + await fs.mkdir(changeDir, { recursive: true }); + const proposal = `# Change: Demo Change\n\n## Why\nTest core logic.\n\n## What Changes\n- **auth:** Add requirement`; + await fs.writeFile(path.join(changeDir, 'proposal.md'), proposal, 'utf-8'); + await fs.writeFile(path.join(changeDir, 'tasks.md'), '- [x] Task 1\n- [ ] Task 2\n', 'utf-8'); + }); + + afterAll(async () => { + process.chdir(originalCwd); + await fs.rm(tempRoot, { recursive: true, force: true }); + }); + + it('getActiveChanges returns list of change IDs', async () => { + const changes = await getActiveChanges(tempRoot); + expect(changes).toContain(changeName); + }); + + it('getChangeMarkdown returns content of proposal.md', async () => { + const content = await getChangeMarkdown(tempRoot, changeName); + expect(content).toContain('# Change: Demo Change'); + }); + + it('getChangeJson returns parsed JSON object', async () => { + const json = await getChangeJson(tempRoot, changeName); + expect(json.id).toBe(changeName); + expect(json.title).toBe('Demo Change'); + expect(json.deltas).toBeDefined(); + // Verify one delta (requirement addition) is parsed if the parser logic works on that markdown + // The dummy markdown: "- **auth:** Add requirement" might be parsed as a delta depending on parser logic. + // The parser usually looks for headers like "## ADDED Requirements" or "## What Changes" mapping. + // Existing parser logic is complex, but we at least check structure. + }); + + it('getChangeDetails returns details with task counts', async () => { + const details = await getChangeDetails(tempRoot, changeName); + expect(details.id).toBe(changeName); + expect(details.title).toBe('Demo Change'); + expect(details.taskStatus).toEqual({ total: 2, completed: 1 }); + }); + + it('validateChange returns a validation report', async () => { + const report = await validateChange(tempRoot, changeName, false); + // It might be invalid because it doesn't strictly follow spec-driven structure (scenarios etc) + // But we just check we got a report object. + expect(report).toHaveProperty('valid'); + expect(report).toHaveProperty('issues'); + }); + + it('runCreateChange scaffolds a new change', async () => { + const newChangeName = 'new-test-change'; + const result = await runCreateChange(tempRoot, newChangeName); + + expect(result.name).toBe(newChangeName); + expect(result.changeDir).toContain(newChangeName); + + // Manually create proposal.md as getActiveChanges requires it + await fs.writeFile(path.join(result.changeDir, 'proposal.md'), '# Change', 'utf-8'); + + // Verify file creation + const changes = await getActiveChanges(tempRoot); + expect(changes).toContain(newChangeName); + }); +}); diff --git a/test/core/init.test.ts b/test/core/init.test.ts index 09f357ea3..4a928c6da 100644 --- a/test/core/init.test.ts +++ b/test/core/init.test.ts @@ -2,883 +2,89 @@ import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'; import { promises as fs } from 'fs'; import path from 'path'; import os from 'os'; -import { InitCommand } from '../../src/core/init.js'; +import { runInit } from '../../src/core/init-logic.js'; +import { InitCommand } from '../../src/commands/init.js'; +import { FileSystemUtils } from '../../src/utils/file-system.js'; +import { ToolRegistry } from '../../src/core/configurators/registry.js'; -const DONE = '__done__'; +const DONE = '__DONE__'; -type SelectionQueue = string[][]; - -let selectionQueue: SelectionQueue = []; - -const mockPrompt = vi.fn(async () => { - if (selectionQueue.length === 0) { - throw new Error('No queued selections provided to init prompt.'); - } - return selectionQueue.shift() ?? []; -}); - -function queueSelections(...values: string[]) { - let current: string[] = []; - values.forEach((value) => { - if (value === DONE) { - selectionQueue.push(current); - current = []; - } else { - current.push(value); - } - }); - - if (current.length > 0) { - selectionQueue.push(current); - } -} - -describe('InitCommand', () => { - let testDir: string; +describe('runInit', () => { + let tempDir: string; + let mockPrompt: any; let initCommand: InitCommand; - let prevCodexHome: string | undefined; + + // Helper for upstream tests + const queueSelections = (...selections: any[]) => { + let callCount = 0; + mockPrompt.mockImplementation(async (config: any) => { + const result = selections[callCount]; + callCount++; + if (result === DONE) { + return []; + } + if (Array.isArray(result)) { + return result; + } + // If it's a string, wrap in array (single selection) + if (typeof result === 'string') { + return [result]; + } + return result || []; + }); + }; + + // Helper to check file existence (from upstream utils) + const fileExists = async (p: string) => { + try { + await fs.stat(p); + return true; + } catch { + return false; + } + }; beforeEach(async () => { - testDir = path.join(os.tmpdir(), `openspec-init-test-${Date.now()}`); - await fs.mkdir(testDir, { recursive: true }); - selectionQueue = []; - mockPrompt.mockReset(); + tempDir = path.join(os.tmpdir(), `openspec-init-test-${Date.now()}`); + await fs.mkdir(tempDir, { recursive: true }); + + // Setup for InitCommand tests + mockPrompt = vi.fn(); initCommand = new InitCommand({ prompt: mockPrompt }); - - // Route Codex global directory into the test sandbox - prevCodexHome = process.env.CODEX_HOME; - process.env.CODEX_HOME = path.join(testDir, '.codex'); - - // Mock console.log to suppress output during tests - vi.spyOn(console, 'log').mockImplementation(() => { }); }); afterEach(async () => { - await fs.rm(testDir, { recursive: true, force: true }); + await fs.rm(tempDir, { recursive: true, force: true }); vi.restoreAllMocks(); - if (prevCodexHome === undefined) delete process.env.CODEX_HOME; - else process.env.CODEX_HOME = prevCodexHome; }); - describe('execute', () => { - it('should create OpenSpec directory structure', async () => { - queueSelections('claude', DONE); - - await initCommand.execute(testDir); - - const openspecPath = path.join(testDir, 'openspec'); - expect(await directoryExists(openspecPath)).toBe(true); - expect(await directoryExists(path.join(openspecPath, 'specs'))).toBe( - true - ); - expect(await directoryExists(path.join(openspecPath, 'changes'))).toBe( - true - ); - expect( - await directoryExists(path.join(openspecPath, 'changes', 'archive')) - ).toBe(true); - }); - - it('should create AGENTS.md and project.md', async () => { - queueSelections('claude', DONE); - - await initCommand.execute(testDir); - - const openspecPath = path.join(testDir, 'openspec'); - expect(await fileExists(path.join(openspecPath, 'AGENTS.md'))).toBe(true); - expect(await fileExists(path.join(openspecPath, 'project.md'))).toBe( - true - ); - - const agentsContent = await fs.readFile( - path.join(openspecPath, 'AGENTS.md'), - 'utf-8' - ); - expect(agentsContent).toContain('OpenSpec Instructions'); - - const projectContent = await fs.readFile( - path.join(openspecPath, 'project.md'), - 'utf-8' - ); - expect(projectContent).toContain('Project Context'); - }); - - it('should create CLAUDE.md when Claude Code is selected', async () => { - queueSelections('claude', DONE); - - await initCommand.execute(testDir); - - const claudePath = path.join(testDir, 'CLAUDE.md'); - expect(await fileExists(claudePath)).toBe(true); - - const content = await fs.readFile(claudePath, 'utf-8'); - expect(content).toContain(''); - expect(content).toContain("@/openspec/AGENTS.md"); - expect(content).toContain('openspec update'); - expect(content).toContain(''); - }); - - it('should update existing CLAUDE.md with markers', async () => { - queueSelections('claude', DONE); - - const claudePath = path.join(testDir, 'CLAUDE.md'); - const existingContent = - '# My Project Instructions\nCustom instructions here'; - await fs.writeFile(claudePath, existingContent); - - await initCommand.execute(testDir); - - const updatedContent = await fs.readFile(claudePath, 'utf-8'); - expect(updatedContent).toContain(''); - expect(updatedContent).toContain("@/openspec/AGENTS.md"); - expect(updatedContent).toContain('openspec update'); - expect(updatedContent).toContain(''); - expect(updatedContent).toContain('Custom instructions here'); - }); - - it('should create CLINE.md when Cline is selected', async () => { - queueSelections('cline', DONE); - - await initCommand.execute(testDir); - - const clinePath = path.join(testDir, 'CLINE.md'); - expect(await fileExists(clinePath)).toBe(true); - - const content = await fs.readFile(clinePath, 'utf-8'); - expect(content).toContain(''); - expect(content).toContain("@/openspec/AGENTS.md"); - expect(content).toContain('openspec update'); - expect(content).toContain(''); - }); - - it('should update existing CLINE.md with markers', async () => { - queueSelections('cline', DONE); - - const clinePath = path.join(testDir, 'CLINE.md'); - const existingContent = - '# My Cline Rules\nCustom Cline instructions here'; - await fs.writeFile(clinePath, existingContent); - - await initCommand.execute(testDir); - - const updatedContent = await fs.readFile(clinePath, 'utf-8'); - expect(updatedContent).toContain(''); - expect(updatedContent).toContain("@/openspec/AGENTS.md"); - expect(updatedContent).toContain('openspec update'); - expect(updatedContent).toContain(''); - expect(updatedContent).toContain('Custom Cline instructions here'); - }); - - it('should create Windsurf workflows when Windsurf is selected', async () => { - queueSelections('windsurf', DONE); - - await initCommand.execute(testDir); - - const wsProposal = path.join( - testDir, - '.windsurf/workflows/openspec-proposal.md' - ); - const wsApply = path.join( - testDir, - '.windsurf/workflows/openspec-apply.md' - ); - const wsArchive = path.join( - testDir, - '.windsurf/workflows/openspec-archive.md' - ); - - expect(await fileExists(wsProposal)).toBe(true); - expect(await fileExists(wsApply)).toBe(true); - expect(await fileExists(wsArchive)).toBe(true); - - const proposalContent = await fs.readFile(wsProposal, 'utf-8'); - expect(proposalContent).toContain('---'); - expect(proposalContent).toContain('description: Scaffold a new OpenSpec change and validate strictly.'); - expect(proposalContent).toContain('auto_execution_mode: 3'); - expect(proposalContent).toContain(''); - expect(proposalContent).toContain('**Guardrails**'); - - const applyContent = await fs.readFile(wsApply, 'utf-8'); - expect(applyContent).toContain('---'); - expect(applyContent).toContain('description: Implement an approved OpenSpec change and keep tasks in sync.'); - expect(applyContent).toContain('auto_execution_mode: 3'); - expect(applyContent).toContain(''); - expect(applyContent).toContain('Work through tasks sequentially'); - - const archiveContent = await fs.readFile(wsArchive, 'utf-8'); - expect(archiveContent).toContain('---'); - expect(archiveContent).toContain('description: Archive a deployed OpenSpec change and update specs.'); - expect(archiveContent).toContain('auto_execution_mode: 3'); - expect(archiveContent).toContain(''); - expect(archiveContent).toContain('Run `openspec archive --yes`'); - }); - - it('should create Antigravity workflows when Antigravity is selected', async () => { - queueSelections('antigravity', DONE); - - await initCommand.execute(testDir); - - const agProposal = path.join( - testDir, - '.agent/workflows/openspec-proposal.md' - ); - const agApply = path.join( - testDir, - '.agent/workflows/openspec-apply.md' - ); - const agArchive = path.join( - testDir, - '.agent/workflows/openspec-archive.md' - ); - - expect(await fileExists(agProposal)).toBe(true); - expect(await fileExists(agApply)).toBe(true); - expect(await fileExists(agArchive)).toBe(true); - - const proposalContent = await fs.readFile(agProposal, 'utf-8'); - expect(proposalContent).toContain('---'); - expect(proposalContent).toContain('description: Scaffold a new OpenSpec change and validate strictly.'); - expect(proposalContent).toContain(''); - expect(proposalContent).toContain('**Guardrails**'); - expect(proposalContent).not.toContain('auto_execution_mode'); - - const applyContent = await fs.readFile(agApply, 'utf-8'); - expect(applyContent).toContain('---'); - expect(applyContent).toContain('description: Implement an approved OpenSpec change and keep tasks in sync.'); - expect(applyContent).toContain(''); - expect(applyContent).toContain('Work through tasks sequentially'); - expect(applyContent).not.toContain('auto_execution_mode'); - - const archiveContent = await fs.readFile(agArchive, 'utf-8'); - expect(archiveContent).toContain('---'); - expect(archiveContent).toContain('description: Archive a deployed OpenSpec change and update specs.'); - expect(archiveContent).toContain(''); - expect(archiveContent).toContain('Run `openspec archive --yes`'); - expect(archiveContent).not.toContain('auto_execution_mode'); - }); - - it('should always create AGENTS.md in project root', async () => { - queueSelections(DONE); - - await initCommand.execute(testDir); - - const rootAgentsPath = path.join(testDir, 'AGENTS.md'); - expect(await fileExists(rootAgentsPath)).toBe(true); - - const content = await fs.readFile(rootAgentsPath, 'utf-8'); - expect(content).toContain(''); - expect(content).toContain("@/openspec/AGENTS.md"); - expect(content).toContain('openspec update'); - expect(content).toContain(''); - - const claudeExists = await fileExists(path.join(testDir, 'CLAUDE.md')); - expect(claudeExists).toBe(false); - }); - - it('should create Claude slash command files with templates', async () => { - queueSelections('claude', DONE); - - await initCommand.execute(testDir); - - const claudeProposal = path.join( - testDir, - '.claude/commands/openspec/proposal.md' - ); - const claudeApply = path.join( - testDir, - '.claude/commands/openspec/apply.md' - ); - const claudeArchive = path.join( - testDir, - '.claude/commands/openspec/archive.md' - ); - - expect(await fileExists(claudeProposal)).toBe(true); - expect(await fileExists(claudeApply)).toBe(true); - expect(await fileExists(claudeArchive)).toBe(true); - - const proposalContent = await fs.readFile(claudeProposal, 'utf-8'); - expect(proposalContent).toContain('name: OpenSpec: Proposal'); - expect(proposalContent).toContain(''); - expect(proposalContent).toContain('**Guardrails**'); - - const applyContent = await fs.readFile(claudeApply, 'utf-8'); - expect(applyContent).toContain('name: OpenSpec: Apply'); - expect(applyContent).toContain('Work through tasks sequentially'); - - const archiveContent = await fs.readFile(claudeArchive, 'utf-8'); - expect(archiveContent).toContain('name: OpenSpec: Archive'); - expect(archiveContent).toContain('openspec archive '); - expect(archiveContent).toContain( - '`--skip-specs` only for tooling-only work' - ); - }); - - it('should create Cursor slash command files with templates', async () => { - queueSelections('cursor', DONE); - - await initCommand.execute(testDir); - - const cursorProposal = path.join( - testDir, - '.cursor/commands/openspec-proposal.md' - ); - const cursorApply = path.join( - testDir, - '.cursor/commands/openspec-apply.md' - ); - const cursorArchive = path.join( - testDir, - '.cursor/commands/openspec-archive.md' - ); - - expect(await fileExists(cursorProposal)).toBe(true); - expect(await fileExists(cursorApply)).toBe(true); - expect(await fileExists(cursorArchive)).toBe(true); - - const proposalContent = await fs.readFile(cursorProposal, 'utf-8'); - expect(proposalContent).toContain('name: /openspec-proposal'); - expect(proposalContent).toContain(''); - - const applyContent = await fs.readFile(cursorApply, 'utf-8'); - expect(applyContent).toContain('id: openspec-apply'); - expect(applyContent).toContain('Work through tasks sequentially'); - - const archiveContent = await fs.readFile(cursorArchive, 'utf-8'); - expect(archiveContent).toContain('name: /openspec-archive'); - expect(archiveContent).toContain('openspec list --specs'); - }); - - it('should create Gemini CLI TOML files when selected', async () => { - queueSelections('gemini', DONE); - - await initCommand.execute(testDir); - - const geminiProposal = path.join( - testDir, - '.gemini/commands/openspec/proposal.toml' - ); - const geminiApply = path.join( - testDir, - '.gemini/commands/openspec/apply.toml' - ); - const geminiArchive = path.join( - testDir, - '.gemini/commands/openspec/archive.toml' - ); - - expect(await fileExists(geminiProposal)).toBe(true); - expect(await fileExists(geminiApply)).toBe(true); - expect(await fileExists(geminiArchive)).toBe(true); - - const proposalContent = await fs.readFile(geminiProposal, 'utf-8'); - expect(proposalContent).toContain('description = "Scaffold a new OpenSpec change and validate strictly."'); - expect(proposalContent).toContain('prompt = """'); - expect(proposalContent).toContain(''); - expect(proposalContent).toContain('**Guardrails**'); - expect(proposalContent).toContain(''); - - const applyContent = await fs.readFile(geminiApply, 'utf-8'); - expect(applyContent).toContain('description = "Implement an approved OpenSpec change and keep tasks in sync."'); - expect(applyContent).toContain('Work through tasks sequentially'); - - const archiveContent = await fs.readFile(geminiArchive, 'utf-8'); - expect(archiveContent).toContain('description = "Archive a deployed OpenSpec change and update specs."'); - expect(archiveContent).toContain('openspec archive '); - }); - - it('should update existing Gemini CLI TOML files with refreshed content', async () => { - queueSelections('gemini', DONE); - - await initCommand.execute(testDir); - - const geminiProposal = path.join( - testDir, - '.gemini/commands/openspec/proposal.toml' - ); - - // Modify the file to simulate user customization - const originalContent = await fs.readFile(geminiProposal, 'utf-8'); - const modifiedContent = originalContent.replace( - '', - '\nCustom instruction added by user\n' - ); - await fs.writeFile(geminiProposal, modifiedContent); - - // Run init again to test update/refresh path - queueSelections('gemini', DONE); - await initCommand.execute(testDir); - - const updatedContent = await fs.readFile(geminiProposal, 'utf-8'); - expect(updatedContent).toContain(''); - expect(updatedContent).toContain('**Guardrails**'); - expect(updatedContent).toContain(''); - expect(updatedContent).not.toContain('Custom instruction added by user'); - }); - - it('should create IFlow CLI slash command files with templates', async () => { - queueSelections('iflow', DONE); - await initCommand.execute(testDir); - - const iflowProposal = path.join( - testDir, - '.iflow/commands/openspec-proposal.md' - ); - const iflowApply = path.join( - testDir, - '.iflow/commands/openspec-apply.md' - ); - const iflowArchive = path.join( - testDir, - '.iflow/commands/openspec-archive.md' - ); - - expect(await fileExists(iflowProposal)).toBe(true); - expect(await fileExists(iflowApply)).toBe(true); - expect(await fileExists(iflowArchive)).toBe(true); - - const proposalContent = await fs.readFile(iflowProposal, 'utf-8'); - expect(proposalContent).toContain('description: Scaffold a new OpenSpec change and validate strictly.'); - expect(proposalContent).toContain(''); - expect(proposalContent).toContain('**Guardrails**'); - expect(proposalContent).toContain(''); - - const applyContent = await fs.readFile(iflowApply, 'utf-8'); - expect(applyContent).toContain('description: Implement an approved OpenSpec change and keep tasks in sync.'); - expect(applyContent).toContain('Work through tasks sequentially'); - - const archiveContent = await fs.readFile(iflowArchive, 'utf-8'); - expect(archiveContent).toContain('description: Archive a deployed OpenSpec change and update specs.'); - expect(archiveContent).toContain('openspec archive '); - }); - - it('should update existing IFLOW.md with markers', async () => { - queueSelections('iflow', DONE); - - const iflowPath = path.join(testDir, 'IFLOW.md'); - const existingContent = '# My IFLOW Instructions\nCustom instructions here'; - await fs.writeFile(iflowPath, existingContent); - - await initCommand.execute(testDir); - - const updatedContent = await fs.readFile(iflowPath, 'utf-8'); - expect(updatedContent).toContain(''); - expect(updatedContent).toContain("@/openspec/AGENTS.md"); - expect(updatedContent).toContain('openspec update'); - expect(updatedContent).toContain(''); - expect(updatedContent).toContain('Custom instructions here'); - }); - - it('should create OpenCode slash command files with templates', async () => { - queueSelections('opencode', DONE); - - await initCommand.execute(testDir); - - const openCodeProposal = path.join( - testDir, - '.opencode/command/openspec-proposal.md' - ); - const openCodeApply = path.join( - testDir, - '.opencode/command/openspec-apply.md' - ); - const openCodeArchive = path.join( - testDir, - '.opencode/command/openspec-archive.md' - ); - - expect(await fileExists(openCodeProposal)).toBe(true); - expect(await fileExists(openCodeApply)).toBe(true); - expect(await fileExists(openCodeArchive)).toBe(true); - - const proposalContent = await fs.readFile(openCodeProposal, 'utf-8'); - expect(proposalContent).not.toContain('agent:'); - expect(proposalContent).toContain( - 'description: Scaffold a new OpenSpec change and validate strictly.' - ); - expect(proposalContent).toContain(''); - - const applyContent = await fs.readFile(openCodeApply, 'utf-8'); - expect(applyContent).not.toContain('agent:'); - expect(applyContent).toContain( - 'description: Implement an approved OpenSpec change and keep tasks in sync.' - ); - expect(applyContent).toContain('Work through tasks sequentially'); - - const archiveContent = await fs.readFile(openCodeArchive, 'utf-8'); - expect(archiveContent).not.toContain('agent:'); - expect(archiveContent).toContain( - 'description: Archive a deployed OpenSpec change and update specs.' - ); - expect(archiveContent).toContain('openspec list --specs'); - }); - - it('should create Qwen configuration and slash command files with templates', async () => { - queueSelections('qwen', DONE); - - await initCommand.execute(testDir); - - const qwenConfigPath = path.join(testDir, 'QWEN.md'); - const proposalPath = path.join( - testDir, - '.qwen/commands/openspec-proposal.toml' - ); - const applyPath = path.join( - testDir, - '.qwen/commands/openspec-apply.toml' - ); - const archivePath = path.join( - testDir, - '.qwen/commands/openspec-archive.toml' - ); - - expect(await fileExists(qwenConfigPath)).toBe(true); - expect(await fileExists(proposalPath)).toBe(true); - expect(await fileExists(applyPath)).toBe(true); - expect(await fileExists(archivePath)).toBe(true); - - const qwenConfigContent = await fs.readFile(qwenConfigPath, 'utf-8'); - expect(qwenConfigContent).toContain(''); - expect(qwenConfigContent).toContain("@/openspec/AGENTS.md"); - expect(qwenConfigContent).toContain(''); - - const proposalContent = await fs.readFile(proposalPath, 'utf-8'); - expect(proposalContent).toContain('description = "Scaffold a new OpenSpec change and validate strictly."'); - expect(proposalContent).toContain('prompt = """'); - expect(proposalContent).toContain(''); - - const applyContent = await fs.readFile(applyPath, 'utf-8'); - expect(applyContent).toContain('description = "Implement an approved OpenSpec change and keep tasks in sync."'); - expect(applyContent).toContain('Work through tasks sequentially'); - - const archiveContent = await fs.readFile(archivePath, 'utf-8'); - expect(archiveContent).toContain('description = "Archive a deployed OpenSpec change and update specs."'); - expect(archiveContent).toContain('openspec archive '); - }); - - it('should update existing QWEN.md with markers', async () => { - queueSelections('qwen', DONE); - - const qwenPath = path.join(testDir, 'QWEN.md'); - const existingContent = '# My Qwen Instructions\nCustom instructions here'; - await fs.writeFile(qwenPath, existingContent); - - await initCommand.execute(testDir); - - const updatedContent = await fs.readFile(qwenPath, 'utf-8'); - expect(updatedContent).toContain(''); - expect(updatedContent).toContain("@/openspec/AGENTS.md"); - expect(updatedContent).toContain('openspec update'); - expect(updatedContent).toContain(''); - expect(updatedContent).toContain('Custom instructions here'); - }); - - it('should create Cline workflow files with templates', async () => { - queueSelections('cline', DONE); - - await initCommand.execute(testDir); - - const clineProposal = path.join( - testDir, - '.clinerules/workflows/openspec-proposal.md' - ); - const clineApply = path.join( - testDir, - '.clinerules/workflows/openspec-apply.md' - ); - const clineArchive = path.join( - testDir, - '.clinerules/workflows/openspec-archive.md' - ); - - expect(await fileExists(clineProposal)).toBe(true); - expect(await fileExists(clineApply)).toBe(true); - expect(await fileExists(clineArchive)).toBe(true); - - const proposalContent = await fs.readFile(clineProposal, 'utf-8'); - expect(proposalContent).toContain('# OpenSpec: Proposal'); - expect(proposalContent).toContain('Scaffold a new OpenSpec change and validate strictly.'); - expect(proposalContent).toContain(''); - expect(proposalContent).toContain('**Guardrails**'); - - const applyContent = await fs.readFile(clineApply, 'utf-8'); - expect(applyContent).toContain('# OpenSpec: Apply'); - expect(applyContent).toContain('Implement an approved OpenSpec change and keep tasks in sync.'); - expect(applyContent).toContain('Work through tasks sequentially'); - - const archiveContent = await fs.readFile(clineArchive, 'utf-8'); - expect(archiveContent).toContain('# OpenSpec: Archive'); - expect(archiveContent).toContain('Archive a deployed OpenSpec change and update specs.'); - expect(archiveContent).toContain('openspec archive '); - }); - - it('should create Factory slash command files with templates', async () => { - queueSelections('factory', DONE); - - await initCommand.execute(testDir); - - const factoryProposal = path.join( - testDir, - '.factory/commands/openspec-proposal.md' - ); - const factoryApply = path.join( - testDir, - '.factory/commands/openspec-apply.md' - ); - const factoryArchive = path.join( - testDir, - '.factory/commands/openspec-archive.md' - ); - - expect(await fileExists(factoryProposal)).toBe(true); - expect(await fileExists(factoryApply)).toBe(true); - expect(await fileExists(factoryArchive)).toBe(true); - - const proposalContent = await fs.readFile(factoryProposal, 'utf-8'); - expect(proposalContent).toContain('description: Scaffold a new OpenSpec change and validate strictly.'); - expect(proposalContent).toContain('argument-hint: request or feature description'); - expect(proposalContent).toContain(''); - expect( - /([\s\S]*?)/u.exec( - proposalContent - )?.[1] - ).toContain('$ARGUMENTS'); - - const applyContent = await fs.readFile(factoryApply, 'utf-8'); - expect(applyContent).toContain('description: Implement an approved OpenSpec change and keep tasks in sync.'); - expect(applyContent).toContain('argument-hint: change-id'); - expect(applyContent).toContain('Work through tasks sequentially'); - expect( - /([\s\S]*?)/u.exec( - applyContent - )?.[1] - ).toContain('$ARGUMENTS'); - - const archiveContent = await fs.readFile(factoryArchive, 'utf-8'); - expect(archiveContent).toContain('description: Archive a deployed OpenSpec change and update specs.'); - expect(archiveContent).toContain('argument-hint: change-id'); - expect(archiveContent).toContain('openspec archive --yes'); - expect( - /([\s\S]*?)/u.exec( - archiveContent - )?.[1] - ).toContain('$ARGUMENTS'); - }); - - it('should create Codex prompts with templates and placeholders', async () => { - queueSelections('codex', DONE); - - await initCommand.execute(testDir); - - const proposalPath = path.join( - testDir, - '.codex/prompts/openspec-proposal.md' - ); - const applyPath = path.join( - testDir, - '.codex/prompts/openspec-apply.md' - ); - const archivePath = path.join( - testDir, - '.codex/prompts/openspec-archive.md' - ); - - expect(await fileExists(proposalPath)).toBe(true); - expect(await fileExists(applyPath)).toBe(true); - expect(await fileExists(archivePath)).toBe(true); - - const proposalContent = await fs.readFile(proposalPath, 'utf-8'); - expect(proposalContent).toContain('description: Scaffold a new OpenSpec change and validate strictly.'); - expect(proposalContent).toContain('argument-hint: request or feature description'); - expect(proposalContent).toContain('$ARGUMENTS'); - expect(proposalContent).toContain(''); - expect(proposalContent).toContain('**Guardrails**'); - - const applyContent = await fs.readFile(applyPath, 'utf-8'); - expect(applyContent).toContain('description: Implement an approved OpenSpec change and keep tasks in sync.'); - expect(applyContent).toContain('argument-hint: change-id'); - expect(applyContent).toContain('$ARGUMENTS'); - expect(applyContent).toContain('Work through tasks sequentially'); - - const archiveContent = await fs.readFile(archivePath, 'utf-8'); - expect(archiveContent).toContain('description: Archive a deployed OpenSpec change and update specs.'); - expect(archiveContent).toContain('argument-hint: change-id'); - expect(archiveContent).toContain('$ARGUMENTS'); - expect(archiveContent).toContain('openspec archive --yes'); - }); - - it('should create Kilo Code workflows with templates', async () => { - queueSelections('kilocode', DONE); - - await initCommand.execute(testDir); - - const proposalPath = path.join( - testDir, - '.kilocode/workflows/openspec-proposal.md' - ); - const applyPath = path.join( - testDir, - '.kilocode/workflows/openspec-apply.md' - ); - const archivePath = path.join( - testDir, - '.kilocode/workflows/openspec-archive.md' - ); - - expect(await fileExists(proposalPath)).toBe(true); - expect(await fileExists(applyPath)).toBe(true); - expect(await fileExists(archivePath)).toBe(true); - - const proposalContent = await fs.readFile(proposalPath, 'utf-8'); - expect(proposalContent).toContain(''); - expect(proposalContent).toContain('**Guardrails**'); - expect(proposalContent).not.toContain('---\n'); - - const applyContent = await fs.readFile(applyPath, 'utf-8'); - expect(applyContent).toContain('Work through tasks sequentially'); - expect(applyContent).not.toContain('---\n'); - - const archiveContent = await fs.readFile(archivePath, 'utf-8'); - expect(archiveContent).toContain('openspec list --specs'); - expect(archiveContent).not.toContain('---\n'); - }); - - it('should create GitHub Copilot prompt files with templates', async () => { - queueSelections('github-copilot', DONE); - - await initCommand.execute(testDir); - - const proposalPath = path.join( - testDir, - '.github/prompts/openspec-proposal.prompt.md' - ); - const applyPath = path.join( - testDir, - '.github/prompts/openspec-apply.prompt.md' - ); - const archivePath = path.join( - testDir, - '.github/prompts/openspec-archive.prompt.md' - ); - - expect(await fileExists(proposalPath)).toBe(true); - expect(await fileExists(applyPath)).toBe(true); - expect(await fileExists(archivePath)).toBe(true); - - const proposalContent = await fs.readFile(proposalPath, 'utf-8'); - expect(proposalContent).toContain('---'); - expect(proposalContent).toContain('description: Scaffold a new OpenSpec change and validate strictly.'); - expect(proposalContent).toContain('$ARGUMENTS'); - expect(proposalContent).toContain(''); - expect(proposalContent).toContain('**Guardrails**'); - - const applyContent = await fs.readFile(applyPath, 'utf-8'); - expect(applyContent).toContain('---'); - expect(applyContent).toContain('description: Implement an approved OpenSpec change and keep tasks in sync.'); - expect(applyContent).toContain('$ARGUMENTS'); - expect(applyContent).toContain('Work through tasks sequentially'); - - const archiveContent = await fs.readFile(archivePath, 'utf-8'); - expect(archiveContent).toContain('---'); - expect(archiveContent).toContain('description: Archive a deployed OpenSpec change and update specs.'); - expect(archiveContent).toContain('$ARGUMENTS'); - expect(archiveContent).toContain('openspec archive --yes'); - }); - - it('should add new tool when OpenSpec already exists', async () => { - queueSelections('claude', DONE, 'cursor', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); - - const cursorProposal = path.join( - testDir, - '.cursor/commands/openspec-proposal.md' - ); - expect(await fileExists(cursorProposal)).toBe(true); - }); - - it('should allow extend mode with no additional native tools', async () => { - queueSelections('claude', DONE, DONE); - await initCommand.execute(testDir); - await expect(initCommand.execute(testDir)).resolves.toBeUndefined(); - }); - - it('should recreate deleted openspec/AGENTS.md in extend mode', async () => { - await testFileRecreationInExtendMode( - testDir, - initCommand, - 'openspec/AGENTS.md', - 'OpenSpec Instructions' - ); - }); - - it('should recreate deleted openspec/project.md in extend mode', async () => { - await testFileRecreationInExtendMode( - testDir, - initCommand, - 'openspec/project.md', - 'Project Context' - ); - }); - - it('should preserve existing template files in extend mode', async () => { - queueSelections('claude', DONE, DONE); - - // First init - await initCommand.execute(testDir); - - const agentsPath = path.join(testDir, 'openspec', 'AGENTS.md'); - const customContent = '# My Custom AGENTS Content\nDo not overwrite this!'; - - // Modify the file with custom content - await fs.writeFile(agentsPath, customContent); - - // Run init again - should NOT overwrite - await initCommand.execute(testDir); - - const content = await fs.readFile(agentsPath, 'utf-8'); - expect(content).toBe(customContent); - expect(content).not.toContain('OpenSpec Instructions'); - }); - - it('should handle non-existent target directory', async () => { - queueSelections('claude', DONE); - - const newDir = path.join(testDir, 'new-project'); - await initCommand.execute(newDir); - - const openspecPath = path.join(newDir, 'openspec'); - expect(await directoryExists(openspecPath)).toBe(true); - }); - - it('should display success message with selected tool name', async () => { - queueSelections('claude', DONE); - const logSpy = vi.spyOn(console, 'log'); - - await initCommand.execute(testDir); - - const calls = logSpy.mock.calls.flat().join('\n'); - expect(calls).toContain('Copy these prompts to Claude Code'); - }); - - it('should reference AGENTS compatible assistants in success message', async () => { - queueSelections(DONE); - const logSpy = vi.spyOn(console, 'log'); - - await initCommand.execute(testDir); + it('should initialize OpenSpec in a directory', async () => { + const result = await runInit(tempDir, { tools: [] }); + + expect(result.projectPath).toBe(path.resolve(tempDir)); + expect(result.openspecDir).toBe('.openspec'); + expect(result.extendMode).toBe(false); + + const openspecPath = path.join(tempDir, '.openspec'); + expect(await fs.stat(openspecPath)).toBeDefined(); + expect(await fs.stat(path.join(openspecPath, 'specs'))).toBeDefined(); + expect(await fs.stat(path.join(openspecPath, 'changes'))).toBeDefined(); + expect(await fs.stat(path.join(openspecPath, 'project.md'))).toBeDefined(); + }); - const calls = logSpy.mock.calls.flat().join('\n'); - expect(calls).toContain( - 'Copy these prompts to your AGENTS.md-compatible assistant' - ); - }); + it('should handle extend mode if openspec directory exists', async () => { + const openspecPath = path.join(tempDir, '.openspec'); + await fs.mkdir(openspecPath, { recursive: true }); + + const result = await runInit(tempDir, { tools: [] }); + expect(result.extendMode).toBe(true); }); describe('AI tool selection', () => { it('should prompt for AI tool selection', async () => { queueSelections('claude', DONE); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); expect(mockPrompt).toHaveBeenCalledWith( expect.objectContaining({ @@ -893,17 +99,17 @@ describe('InitCommand', () => { // For now, only Claude is available, but test the structure queueSelections('claude', DONE); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); // When other tools are added, we'd test their specific configurations here - const claudePath = path.join(testDir, 'CLAUDE.md'); + const claudePath = path.join(tempDir, 'CLAUDE.md'); expect(await fileExists(claudePath)).toBe(true); }); it('should mark existing tools as already configured during extend mode', async () => { queueSelections('claude', DONE, 'cursor', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); + await initCommand.execute(tempDir); const secondRunArgs = mockPrompt.mock.calls[1][0]; const claudeChoice = secondRunArgs.choices.find( @@ -914,8 +120,8 @@ describe('InitCommand', () => { it('should mark Qwen as already configured during extend mode', async () => { queueSelections('qwen', DONE, 'qwen', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); + await initCommand.execute(tempDir); const secondRunArgs = mockPrompt.mock.calls[1][0]; const qwenChoice = secondRunArgs.choices.find( @@ -926,8 +132,8 @@ describe('InitCommand', () => { it('should preselect Kilo Code when workflows already exist', async () => { queueSelections('kilocode', DONE, 'kilocode', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); + await initCommand.execute(tempDir); const secondRunArgs = mockPrompt.mock.calls[1][0]; const preselected = secondRunArgs.initialSelected ?? []; @@ -936,8 +142,8 @@ describe('InitCommand', () => { it('should mark Windsurf as already configured during extend mode', async () => { queueSelections('windsurf', DONE, 'windsurf', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); + await initCommand.execute(tempDir); const secondRunArgs = mockPrompt.mock.calls[1][0]; const wsChoice = secondRunArgs.choices.find( @@ -948,8 +154,8 @@ describe('InitCommand', () => { it('should mark Antigravity as already configured during extend mode', async () => { queueSelections('antigravity', DONE, 'antigravity', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); + await initCommand.execute(tempDir); const secondRunArgs = mockPrompt.mock.calls[1][0]; const antigravityChoice = secondRunArgs.choices.find( @@ -960,8 +166,8 @@ describe('InitCommand', () => { it('should mark Codex as already configured during extend mode', async () => { queueSelections('codex', DONE, 'codex', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); + await initCommand.execute(tempDir); const secondRunArgs = mockPrompt.mock.calls[1][0]; const codexChoice = secondRunArgs.choices.find( @@ -972,8 +178,8 @@ describe('InitCommand', () => { it('should mark Factory Droid as already configured during extend mode', async () => { queueSelections('factory', DONE, 'factory', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); + await initCommand.execute(tempDir); const secondRunArgs = mockPrompt.mock.calls[1][0]; const factoryChoice = secondRunArgs.choices.find( @@ -984,8 +190,8 @@ describe('InitCommand', () => { it('should mark GitHub Copilot as already configured during extend mode', async () => { queueSelections('github-copilot', DONE, 'github-copilot', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); + await initCommand.execute(tempDir); const secondRunArgs = mockPrompt.mock.calls[1][0]; const githubCopilotChoice = secondRunArgs.choices.find( @@ -997,18 +203,18 @@ describe('InitCommand', () => { it('should create Amazon Q Developer prompt files with templates', async () => { queueSelections('amazon-q', DONE); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); const proposalPath = path.join( - testDir, + tempDir, '.amazonq/prompts/openspec-proposal.md' ); const applyPath = path.join( - testDir, + tempDir, '.amazonq/prompts/openspec-apply.md' ); const archivePath = path.join( - testDir, + tempDir, '.amazonq/prompts/openspec-archive.md' ); @@ -1032,8 +238,8 @@ describe('InitCommand', () => { it('should mark Amazon Q Developer as already configured during extend mode', async () => { queueSelections('amazon-q', DONE, 'amazon-q', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); + await initCommand.execute(tempDir); const secondRunArgs = mockPrompt.mock.calls[1][0]; const amazonQChoice = secondRunArgs.choices.find( @@ -1045,18 +251,18 @@ describe('InitCommand', () => { it('should create Auggie slash command files with templates', async () => { queueSelections('auggie', DONE); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); const auggieProposal = path.join( - testDir, + tempDir, '.augment/commands/openspec-proposal.md' ); const auggieApply = path.join( - testDir, + tempDir, '.augment/commands/openspec-apply.md' ); const auggieArchive = path.join( - testDir, + tempDir, '.augment/commands/openspec-archive.md' ); @@ -1086,8 +292,8 @@ describe('InitCommand', () => { it('should mark Auggie as already configured during extend mode', async () => { queueSelections('auggie', DONE, 'auggie', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); + await initCommand.execute(tempDir); const secondRunArgs = mockPrompt.mock.calls[1][0]; const auggieChoice = secondRunArgs.choices.find( @@ -1099,18 +305,18 @@ describe('InitCommand', () => { it('should create CodeBuddy slash command files with templates', async () => { queueSelections('codebuddy', DONE); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); const codeBuddyProposal = path.join( - testDir, + tempDir, '.codebuddy/commands/openspec/proposal.md' ); const codeBuddyApply = path.join( - testDir, + tempDir, '.codebuddy/commands/openspec/apply.md' ); const codeBuddyArchive = path.join( - testDir, + tempDir, '.codebuddy/commands/openspec/archive.md' ); @@ -1141,8 +347,8 @@ describe('InitCommand', () => { it('should mark CodeBuddy as already configured during extend mode', async () => { queueSelections('codebuddy', DONE, 'codebuddy', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); + await initCommand.execute(tempDir); const secondRunArgs = mockPrompt.mock.calls[1][0]; const codeBuddyChoice = secondRunArgs.choices.find( @@ -1154,18 +360,18 @@ describe('InitCommand', () => { it('should create Continue slash command files with templates', async () => { queueSelections('continue', DONE); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); const continueProposal = path.join( - testDir, + tempDir, '.continue/prompts/openspec-proposal.prompt' ); const continueApply = path.join( - testDir, + tempDir, '.continue/prompts/openspec-apply.prompt' ); const continueArchive = path.join( - testDir, + tempDir, '.continue/prompts/openspec-archive.prompt' ); @@ -1196,8 +402,8 @@ describe('InitCommand', () => { it('should mark Continue as already configured during extend mode', async () => { queueSelections('continue', DONE, 'continue', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); + await initCommand.execute(tempDir); const secondRunArgs = mockPrompt.mock.calls[1][0]; const continueChoice = secondRunArgs.choices.find( @@ -1209,9 +415,9 @@ describe('InitCommand', () => { it('should create CODEBUDDY.md when CodeBuddy is selected', async () => { queueSelections('codebuddy', DONE); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); - const codeBuddyPath = path.join(testDir, 'CODEBUDDY.md'); + const codeBuddyPath = path.join(tempDir, 'CODEBUDDY.md'); expect(await fileExists(codeBuddyPath)).toBe(true); const content = await fs.readFile(codeBuddyPath, 'utf-8'); @@ -1224,12 +430,12 @@ describe('InitCommand', () => { it('should update existing CODEBUDDY.md with markers', async () => { queueSelections('codebuddy', DONE); - const codeBuddyPath = path.join(testDir, 'CODEBUDDY.md'); + const codeBuddyPath = path.join(tempDir, 'CODEBUDDY.md'); const existingContent = '# My CodeBuddy Instructions\nCustom instructions here'; await fs.writeFile(codeBuddyPath, existingContent); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); const updatedContent = await fs.readFile(codeBuddyPath, 'utf-8'); expect(updatedContent).toContain(''); @@ -1242,18 +448,18 @@ describe('InitCommand', () => { it('should create Crush slash command files with templates', async () => { queueSelections('crush', DONE); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); const crushProposal = path.join( - testDir, + tempDir, '.crush/commands/openspec/proposal.md' ); const crushApply = path.join( - testDir, + tempDir, '.crush/commands/openspec/apply.md' ); const crushArchive = path.join( - testDir, + tempDir, '.crush/commands/openspec/archive.md' ); @@ -1289,8 +495,8 @@ describe('InitCommand', () => { it('should mark Crush as already configured during extend mode', async () => { queueSelections('crush', DONE, 'crush', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); + await initCommand.execute(tempDir); const secondRunArgs = mockPrompt.mock.calls[1][0]; const crushChoice = secondRunArgs.choices.find( @@ -1302,18 +508,18 @@ describe('InitCommand', () => { it('should create CoStrict slash command files with templates', async () => { queueSelections('costrict', DONE); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); const costrictProposal = path.join( - testDir, + tempDir, '.cospec/openspec/commands/openspec-proposal.md' ); const costrictApply = path.join( - testDir, + tempDir, '.cospec/openspec/commands/openspec-apply.md' ); const costrictArchive = path.join( - testDir, + tempDir, '.cospec/openspec/commands/openspec-archive.md' ); @@ -1343,8 +549,8 @@ describe('InitCommand', () => { it('should mark CoStrict as already configured during extend mode', async () => { queueSelections('costrict', DONE, 'costrict', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); + await initCommand.execute(tempDir); const secondRunArgs = mockPrompt.mock.calls[1][0]; const costrictChoice = secondRunArgs.choices.find( @@ -1356,18 +562,18 @@ describe('InitCommand', () => { it('should create RooCode slash command files with templates', async () => { queueSelections('roocode', DONE); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); const rooProposal = path.join( - testDir, + tempDir, '.roo/commands/openspec-proposal.md' ); const rooApply = path.join( - testDir, + tempDir, '.roo/commands/openspec-apply.md' ); const rooArchive = path.join( - testDir, + tempDir, '.roo/commands/openspec-archive.md' ); @@ -1390,8 +596,8 @@ describe('InitCommand', () => { it('should mark RooCode as already configured during extend mode', async () => { queueSelections('roocode', DONE, 'roocode', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); + await initCommand.execute(tempDir); const secondRunArgs = mockPrompt.mock.calls[1][0]; const rooChoice = secondRunArgs.choices.find( @@ -1403,18 +609,18 @@ describe('InitCommand', () => { it('should create Qoder slash command files with templates', async () => { queueSelections('qoder', DONE); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); const qoderProposal = path.join( - testDir, + tempDir, '.qoder/commands/openspec/proposal.md' ); const qoderApply = path.join( - testDir, + tempDir, '.qoder/commands/openspec/apply.md' ); const qoderArchive = path.join( - testDir, + tempDir, '.qoder/commands/openspec/archive.md' ); @@ -1445,8 +651,8 @@ describe('InitCommand', () => { it('should mark Qoder as already configured during extend mode', async () => { queueSelections('qoder', DONE, 'qoder', DONE); - await initCommand.execute(testDir); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); + await initCommand.execute(tempDir); const secondRunArgs = mockPrompt.mock.calls[1][0]; const qoderChoice = secondRunArgs.choices.find( @@ -1458,9 +664,9 @@ describe('InitCommand', () => { it('should create COSTRICT.md when CoStrict is selected', async () => { queueSelections('costrict', DONE); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); - const costrictPath = path.join(testDir, 'COSTRICT.md'); + const costrictPath = path.join(tempDir, 'COSTRICT.md'); expect(await fileExists(costrictPath)).toBe(true); const content = await fs.readFile(costrictPath, 'utf-8'); @@ -1473,9 +679,9 @@ describe('InitCommand', () => { it('should create QODER.md when Qoder is selected', async () => { queueSelections('qoder', DONE); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); - const qoderPath = path.join(testDir, 'QODER.md'); + const qoderPath = path.join(tempDir, 'QODER.md'); expect(await fileExists(qoderPath)).toBe(true); const content = await fs.readFile(qoderPath, 'utf-8'); @@ -1487,12 +693,12 @@ describe('InitCommand', () => { it('should update existing COSTRICT.md with markers', async () => { queueSelections('costrict', DONE); - const costrictPath = path.join(testDir, 'COSTRICT.md'); + const costrictPath = path.join(tempDir, 'COSTRICT.md'); const existingContent = '# My CoStrict Instructions\nCustom instructions here'; await fs.writeFile(costrictPath, existingContent); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); const updatedContent = await fs.readFile(costrictPath, 'utf-8'); expect(updatedContent).toContain(''); @@ -1503,12 +709,12 @@ describe('InitCommand', () => { it('should update existing QODER.md with markers', async () => { queueSelections('qoder', DONE); - const qoderPath = path.join(testDir, 'QODER.md'); + const qoderPath = path.join(tempDir, 'QODER.md'); const existingContent = '# My Qoder Instructions\nCustom instructions here'; await fs.writeFile(qoderPath, existingContent); - await initCommand.execute(testDir); + await initCommand.execute(tempDir); const updatedContent = await fs.readFile(qoderPath, 'utf-8'); expect(updatedContent).toContain(''); @@ -1519,247 +725,15 @@ describe('InitCommand', () => { }); }); - describe('non-interactive mode', () => { - it('should select all available tools with --tools all option', async () => { - const nonInteractiveCommand = new InitCommand({ tools: 'all' }); - - await nonInteractiveCommand.execute(testDir); - - // Should create configurations for all available tools - const claudePath = path.join(testDir, 'CLAUDE.md'); - const cursorProposal = path.join( - testDir, - '.cursor/commands/openspec-proposal.md' - ); - const windsurfProposal = path.join( - testDir, - '.windsurf/workflows/openspec-proposal.md' - ); - - expect(await fileExists(claudePath)).toBe(true); - expect(await fileExists(cursorProposal)).toBe(true); - expect(await fileExists(windsurfProposal)).toBe(true); - }); - - it('should select specific tools with --tools option', async () => { - const nonInteractiveCommand = new InitCommand({ tools: 'claude,cursor' }); - - await nonInteractiveCommand.execute(testDir); - - const claudePath = path.join(testDir, 'CLAUDE.md'); - const cursorProposal = path.join( - testDir, - '.cursor/commands/openspec-proposal.md' - ); - const windsurfProposal = path.join( - testDir, - '.windsurf/workflows/openspec-proposal.md' - ); - - expect(await fileExists(claudePath)).toBe(true); - expect(await fileExists(cursorProposal)).toBe(true); - expect(await fileExists(windsurfProposal)).toBe(false); // Not selected - }); - - it('should skip tool configuration with --tools none option', async () => { - const nonInteractiveCommand = new InitCommand({ tools: 'none' }); - - await nonInteractiveCommand.execute(testDir); - - const claudePath = path.join(testDir, 'CLAUDE.md'); - const cursorProposal = path.join( - testDir, - '.cursor/commands/openspec-proposal.md' - ); - - // Should still create AGENTS.md but no tool-specific files - const rootAgentsPath = path.join(testDir, 'AGENTS.md'); - expect(await fileExists(rootAgentsPath)).toBe(true); - expect(await fileExists(claudePath)).toBe(false); - expect(await fileExists(cursorProposal)).toBe(false); - }); - - it('should throw error for invalid tool names', async () => { - const nonInteractiveCommand = new InitCommand({ tools: 'invalid-tool' }); - - await expect(nonInteractiveCommand.execute(testDir)).rejects.toThrow( - /Invalid tool\(s\): invalid-tool\. Available values: / - ); - }); - - it('should handle comma-separated tool names with spaces', async () => { - const nonInteractiveCommand = new InitCommand({ tools: 'claude, cursor' }); - - await nonInteractiveCommand.execute(testDir); - - const claudePath = path.join(testDir, 'CLAUDE.md'); - const cursorProposal = path.join( - testDir, - '.cursor/commands/openspec-proposal.md' - ); - - expect(await fileExists(claudePath)).toBe(true); - expect(await fileExists(cursorProposal)).toBe(true); - }); - - it('should reject combining reserved keywords with explicit tool ids', async () => { - const nonInteractiveCommand = new InitCommand({ tools: 'all,claude' }); - - await expect(nonInteractiveCommand.execute(testDir)).rejects.toThrow( - /Cannot combine reserved values "all" or "none" with specific tool IDs/ - ); - }); - }); - - describe('already configured detection', () => { - it('should NOT show tools as already configured in fresh project with existing CLAUDE.md', async () => { - // Simulate user having their own CLAUDE.md before running openspec init - const claudePath = path.join(testDir, 'CLAUDE.md'); - await fs.writeFile(claudePath, '# My Custom Claude Instructions\n'); - - queueSelections('claude', DONE); - - await initCommand.execute(testDir); + it('should migrate legacy directory if requested', async () => { + const legacyPath = path.join(tempDir, 'openspec'); // This is the LEGACY name + await fs.mkdir(legacyPath, { recursive: true }); - // In the first run (non-interactive mode via queueSelections), - // the prompt is called with configured: false for claude - const firstCallArgs = mockPrompt.mock.calls[0][0]; - const claudeChoice = firstCallArgs.choices.find( - (choice: any) => choice.value === 'claude' - ); - - expect(claudeChoice.configured).toBe(false); - }); - - it('should NOT show tools as already configured in fresh project with existing slash commands', async () => { - // Simulate user having their own custom slash commands - const customCommandDir = path.join(testDir, '.claude/commands/custom'); - await fs.mkdir(customCommandDir, { recursive: true }); - await fs.writeFile( - path.join(customCommandDir, 'mycommand.md'), - '# My Custom Command\n' - ); - - queueSelections('claude', DONE); - - await initCommand.execute(testDir); - - const firstCallArgs = mockPrompt.mock.calls[0][0]; - const claudeChoice = firstCallArgs.choices.find( - (choice: any) => choice.value === 'claude' - ); - - expect(claudeChoice.configured).toBe(false); - }); - - it('should show tools as already configured in extend mode', async () => { - // First initialization - queueSelections('claude', DONE); - await initCommand.execute(testDir); - - // Second initialization (extend mode) - queueSelections('cursor', DONE); - await initCommand.execute(testDir); - - const secondCallArgs = mockPrompt.mock.calls[1][0]; - const claudeChoice = secondCallArgs.choices.find( - (choice: any) => choice.value === 'claude' - ); - - expect(claudeChoice.configured).toBe(true); - }); - - it('should NOT show already configured for Codex in fresh init even with global prompts', async () => { - // Create global Codex prompts (simulating previous installation) - const codexPromptsDir = path.join(testDir, '.codex/prompts'); - await fs.mkdir(codexPromptsDir, { recursive: true }); - await fs.writeFile( - path.join(codexPromptsDir, 'openspec-proposal.md'), - '# Existing prompt\n' - ); - - queueSelections('claude', DONE); - - await initCommand.execute(testDir); - - const firstCallArgs = mockPrompt.mock.calls[0][0]; - const codexChoice = firstCallArgs.choices.find( - (choice: any) => choice.value === 'codex' - ); - - // In fresh init, even global tools should not show as configured - expect(codexChoice.configured).toBe(false); - }); - }); - - describe('error handling', () => { - it('should provide helpful error for insufficient permissions', async () => { - // This is tricky to test cross-platform, but we can test the error message - const readOnlyDir = path.join(testDir, 'readonly'); - await fs.mkdir(readOnlyDir); - - // Mock the permission check to fail - const originalCheck = fs.writeFile; - vi.spyOn(fs, 'writeFile').mockImplementation( - async (filePath: any, ...args: any[]) => { - if ( - typeof filePath === 'string' && - filePath.includes('.openspec-test-') - ) { - throw new Error('EACCES: permission denied'); - } - return originalCheck.call(fs, filePath, ...args); - } - ); - - queueSelections('claude', DONE); - await expect(initCommand.execute(readOnlyDir)).rejects.toThrow( - /Insufficient permissions/ - ); - }); + const result = await runInit(tempDir, { tools: [], shouldMigrate: true }); + + expect(result.migrated).toBe(true); + expect(result.openspecDir).toBe('.openspec'); + expect(await fs.stat(path.join(tempDir, '.openspec'))).toBeDefined(); }); }); -async function testFileRecreationInExtendMode( - testDir: string, - initCommand: InitCommand, - relativePath: string, - expectedContent: string -): Promise { - queueSelections('claude', DONE, DONE); - - // First init - await initCommand.execute(testDir); - - const filePath = path.join(testDir, relativePath); - expect(await fileExists(filePath)).toBe(true); - - // Delete the file - await fs.unlink(filePath); - expect(await fileExists(filePath)).toBe(false); - - // Run init again - should recreate the file - await initCommand.execute(testDir); - expect(await fileExists(filePath)).toBe(true); - - const content = await fs.readFile(filePath, 'utf-8'); - expect(content).toContain(expectedContent); -} - -async function fileExists(filePath: string): Promise { - try { - await fs.access(filePath); - return true; - } catch { - return false; - } -} - -async function directoryExists(dirPath: string): Promise { - try { - const stats = await fs.stat(dirPath); - return stats.isDirectory(); - } catch { - return false; - } -} diff --git a/test/core/list.test.ts b/test/core/list.test.ts index 5a678919a..df29dd568 100644 --- a/test/core/list.test.ts +++ b/test/core/list.test.ts @@ -2,39 +2,25 @@ import { describe, it, expect, beforeEach, afterEach } from 'vitest'; import { promises as fs } from 'fs'; import path from 'path'; import os from 'os'; -import { ListCommand } from '../../src/core/list.js'; +import { listChanges } from '../../src/core/list.js'; -describe('ListCommand', () => { +describe('listChanges', () => { let tempDir: string; - let originalLog: typeof console.log; - let logOutput: string[] = []; beforeEach(async () => { // Create temp directory tempDir = path.join(os.tmpdir(), `openspec-list-test-${Date.now()}`); await fs.mkdir(tempDir, { recursive: true }); - - // Mock console.log to capture output - originalLog = console.log; - console.log = (...args: any[]) => { - logOutput.push(args.join(' ')); - }; - logOutput = []; }); afterEach(async () => { - // Restore console.log - console.log = originalLog; - // Clean up temp directory await fs.rm(tempDir, { recursive: true, force: true }); }); describe('execute', () => { it('should handle missing openspec/changes directory', async () => { - const listCommand = new ListCommand(); - - await expect(listCommand.execute(tempDir, 'changes')).rejects.toThrow( + await expect(listChanges(tempDir)).rejects.toThrow( "No OpenSpec changes directory found. Run 'openspec init' first." ); }); @@ -43,123 +29,94 @@ describe('ListCommand', () => { const changesDir = path.join(tempDir, 'openspec', 'changes'); await fs.mkdir(changesDir, { recursive: true }); - const listCommand = new ListCommand(); - await listCommand.execute(tempDir, 'changes'); - - expect(logOutput).toEqual(['No active changes found.']); + const changes = await listChanges(tempDir); + expect(changes).toEqual([]); }); it('should exclude archive directory', async () => { const changesDir = path.join(tempDir, 'openspec', 'changes'); + await fs.mkdir(changesDir, { recursive: true }); + await fs.mkdir(path.join(changesDir, 'active-change'), { recursive: true }); await fs.mkdir(path.join(changesDir, 'archive'), { recursive: true }); - await fs.mkdir(path.join(changesDir, 'my-change'), { recursive: true }); - - // Create tasks.md with some tasks - await fs.writeFile( - path.join(changesDir, 'my-change', 'tasks.md'), - '- [x] Task 1\n- [ ] Task 2\n' - ); - const listCommand = new ListCommand(); - await listCommand.execute(tempDir, 'changes'); - - expect(logOutput).toContain('Changes:'); - expect(logOutput.some(line => line.includes('my-change'))).toBe(true); - expect(logOutput.some(line => line.includes('archive'))).toBe(false); + const changes = await listChanges(tempDir); + expect(changes.length).toBe(1); + expect(changes[0].name).toBe('active-change'); }); it('should count tasks correctly', async () => { const changesDir = path.join(tempDir, 'openspec', 'changes'); - await fs.mkdir(path.join(changesDir, 'test-change'), { recursive: true }); + const changePath = path.join(changesDir, 'my-change'); + await fs.mkdir(changePath, { recursive: true }); - await fs.writeFile( - path.join(changesDir, 'test-change', 'tasks.md'), - `# Tasks -- [x] Completed task 1 -- [x] Completed task 2 -- [ ] Incomplete task 1 -- [ ] Incomplete task 2 -- [ ] Incomplete task 3 -Regular text that should be ignored -` - ); - - const listCommand = new ListCommand(); - await listCommand.execute(tempDir, 'changes'); - - expect(logOutput.some(line => line.includes('2/5 tasks'))).toBe(true); + const tasksContent = ` +- [x] task 1 +- [ ] task 2 +- [ ] task 3 +`; + await fs.writeFile(path.join(changePath, 'tasks.md'), tasksContent); + + const changes = await listChanges(tempDir); + expect(changes[0].completedTasks).toBe(1); + expect(changes[0].totalTasks).toBe(3); }); it('should show complete status for fully completed changes', async () => { const changesDir = path.join(tempDir, 'openspec', 'changes'); - await fs.mkdir(path.join(changesDir, 'completed-change'), { recursive: true }); + const changePath = path.join(changesDir, 'done-change'); + await fs.mkdir(changePath, { recursive: true }); - await fs.writeFile( - path.join(changesDir, 'completed-change', 'tasks.md'), - '- [x] Task 1\n- [x] Task 2\n- [x] Task 3\n' - ); - - const listCommand = new ListCommand(); - await listCommand.execute(tempDir, 'changes'); - - expect(logOutput.some(line => line.includes('βœ“ Complete'))).toBe(true); + const tasksContent = ` +- [x] task 1 +- [x] task 2 +`; + await fs.writeFile(path.join(changePath, 'tasks.md'), tasksContent); + + const changes = await listChanges(tempDir); + expect(changes[0].completedTasks).toBe(2); + expect(changes[0].totalTasks).toBe(2); }); it('should handle changes without tasks.md', async () => { const changesDir = path.join(tempDir, 'openspec', 'changes'); await fs.mkdir(path.join(changesDir, 'no-tasks'), { recursive: true }); - const listCommand = new ListCommand(); - await listCommand.execute(tempDir, 'changes'); - - expect(logOutput.some(line => line.includes('no-tasks') && line.includes('No tasks'))).toBe(true); + const changes = await listChanges(tempDir); + expect(changes[0].completedTasks).toBe(0); + expect(changes[0].totalTasks).toBe(0); }); it('should sort changes alphabetically when sort=name', async () => { const changesDir = path.join(tempDir, 'openspec', 'changes'); await fs.mkdir(path.join(changesDir, 'zebra'), { recursive: true }); - await fs.mkdir(path.join(changesDir, 'alpha'), { recursive: true }); + await fs.mkdir(path.join(changesDir, 'apple'), { recursive: true }); await fs.mkdir(path.join(changesDir, 'middle'), { recursive: true }); - const listCommand = new ListCommand(); - await listCommand.execute(tempDir, 'changes', { sort: 'name' }); - - const changeLines = logOutput.filter(line => - line.includes('alpha') || line.includes('middle') || line.includes('zebra') - ); - - expect(changeLines[0]).toContain('alpha'); - expect(changeLines[1]).toContain('middle'); - expect(changeLines[2]).toContain('zebra'); + const changes = await listChanges(tempDir, 'name'); + expect(changes.map(c => c.name)).toEqual(['apple', 'middle', 'zebra']); }); it('should handle multiple changes with various states', async () => { const changesDir = path.join(tempDir, 'openspec', 'changes'); - // Complete change - await fs.mkdir(path.join(changesDir, 'completed'), { recursive: true }); - await fs.writeFile( - path.join(changesDir, 'completed', 'tasks.md'), - '- [x] Task 1\n- [x] Task 2\n' - ); - - // Partial change - await fs.mkdir(path.join(changesDir, 'partial'), { recursive: true }); - await fs.writeFile( - path.join(changesDir, 'partial', 'tasks.md'), - '- [x] Done\n- [ ] Not done\n- [ ] Also not done\n' - ); - - // No tasks + // Change 1: In progress + const c1 = path.join(changesDir, 'active'); + await fs.mkdir(c1, { recursive: true }); + await fs.writeFile(path.join(c1, 'tasks.md'), '- [x] t1\n- [ ] t2'); + + // Change 2: Done + const c2 = path.join(changesDir, 'done'); + await fs.mkdir(c2, { recursive: true }); + await fs.writeFile(path.join(c2, 'tasks.md'), '- [x] t1'); + + // Change 3: No tasks await fs.mkdir(path.join(changesDir, 'no-tasks'), { recursive: true }); - const listCommand = new ListCommand(); - await listCommand.execute(tempDir); - - expect(logOutput).toContain('Changes:'); - expect(logOutput.some(line => line.includes('completed') && line.includes('βœ“ Complete'))).toBe(true); - expect(logOutput.some(line => line.includes('partial') && line.includes('1/3 tasks'))).toBe(true); - expect(logOutput.some(line => line.includes('no-tasks') && line.includes('No tasks'))).toBe(true); + const changes = await listChanges(tempDir, 'name'); + expect(changes.length).toBe(3); + expect(changes.find(c => c.name === 'active')?.completedTasks).toBe(1); + expect(changes.find(c => c.name === 'done')?.completedTasks).toBe(1); + expect(changes.find(c => c.name === 'no-tasks')?.completedTasks).toBe(0); }); }); -}); \ No newline at end of file +}); diff --git a/test/core/update.test.ts b/test/core/update.test.ts index 83994b882..1b80f5985 100644 --- a/test/core/update.test.ts +++ b/test/core/update.test.ts @@ -1,5 +1,6 @@ import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'; -import { UpdateCommand } from '../../src/core/update.js'; +import { runUpdate } from '../../src/core/update-logic.js'; +import { UpdateCommand } from '../../src/commands/update.js'; import { FileSystemUtils } from '../../src/utils/file-system.js'; import { ToolRegistry } from '../../src/core/configurators/registry.js'; import path from 'path'; @@ -7,13 +8,12 @@ import fs from 'fs/promises'; import os from 'os'; import { randomUUID } from 'crypto'; -describe('UpdateCommand', () => { +describe('runUpdate', () => { let testDir: string; let updateCommand: UpdateCommand; let prevCodexHome: string | undefined; beforeEach(async () => { - // Create a temporary test directory testDir = path.join(os.tmpdir(), `openspec-test-${randomUUID()}`); await fs.mkdir(testDir, { recursive: true }); @@ -29,48 +29,30 @@ describe('UpdateCommand', () => { }); afterEach(async () => { - // Clean up test directory await fs.rm(testDir, { recursive: true, force: true }); if (prevCodexHome === undefined) delete process.env.CODEX_HOME; else process.env.CODEX_HOME = prevCodexHome; + vi.restoreAllMocks(); }); - it('should update only existing CLAUDE.md file', async () => { - // Create CLAUDE.md file with initial content - const claudePath = path.join(testDir, 'CLAUDE.md'); - const initialContent = `# Project Instructions - -Some existing content here. - - -Old OpenSpec content - - -More content after.`; - await fs.writeFile(claudePath, initialContent); - - const consoleSpy = vi.spyOn(console, 'log'); - - // Execute update command - await updateCommand.execute(testDir); - - // Check that CLAUDE.md was updated - const updatedContent = await fs.readFile(claudePath, 'utf-8'); - expect(updatedContent).toContain(''); - expect(updatedContent).toContain(''); - expect(updatedContent).toContain("@/openspec/AGENTS.md"); - expect(updatedContent).toContain('openspec update'); - expect(updatedContent).toContain('Some existing content here'); - expect(updatedContent).toContain('More content after'); + it('should fail if OpenSpec is not initialized', async () => { + // Remove openspec directory from beforeEach + await fs.rm(path.join(testDir, 'openspec'), { recursive: true, force: true }); + await expect(runUpdate(testDir)).rejects.toThrow(/No OpenSpec directory found/); + }); - // Check console output - const [logMessage] = consoleSpy.mock.calls[0]; - expect(logMessage).toContain( - 'Updated OpenSpec instructions (openspec/AGENTS.md' - ); - expect(logMessage).toContain('AGENTS.md (created)'); - expect(logMessage).toContain('Updated AI tool files: CLAUDE.md'); - consoleSpy.mockRestore(); + it('should update AGENTS.md', async () => { + // Remove openspec directory from beforeEach + await fs.rm(path.join(testDir, 'openspec'), { recursive: true, force: true }); + + const openspecPath = path.join(testDir, '.openspec'); + await fs.mkdir(openspecPath, { recursive: true }); + + const result = await runUpdate(testDir); + + expect(result.updatedFiles).toContain('AGENTS.md'); + const agentsContent = await fs.readFile(path.join(openspecPath, 'AGENTS.md'), 'utf-8'); + expect(agentsContent).toContain('# OpenSpec Instructions'); }); it('should update only existing QWEN.md file', async () => { diff --git a/test/core/view.test.ts b/test/core/view.test.ts index b8b56df1e..ab71820a6 100644 --- a/test/core/view.test.ts +++ b/test/core/view.test.ts @@ -2,128 +2,58 @@ import { describe, it, expect, beforeEach, afterEach } from 'vitest'; import { promises as fs } from 'fs'; import path from 'path'; import os from 'os'; -import { ViewCommand } from '../../src/core/view.js'; +import { getViewData } from '../../src/core/view-logic.js'; -const stripAnsi = (input: string): string => input.replace(/\u001b\[[0-9;]*m/g, ''); - -describe('ViewCommand', () => { +describe('getViewData', () => { let tempDir: string; - let originalLog: typeof console.log; - let logOutput: string[] = []; beforeEach(async () => { tempDir = path.join(os.tmpdir(), `openspec-view-test-${Date.now()}`); await fs.mkdir(tempDir, { recursive: true }); - - originalLog = console.log; - console.log = (...args: any[]) => { - logOutput.push(args.join(' ')); - }; - - logOutput = []; }); afterEach(async () => { - console.log = originalLog; await fs.rm(tempDir, { recursive: true, force: true }); }); - it('shows changes with no tasks in Draft section, not Completed', async () => { - const changesDir = path.join(tempDir, 'openspec', 'changes'); - await fs.mkdir(changesDir, { recursive: true }); - - // Empty change (no tasks.md) - should show in Draft - await fs.mkdir(path.join(changesDir, 'empty-change'), { recursive: true }); - - // Change with tasks.md but no tasks - should show in Draft - await fs.mkdir(path.join(changesDir, 'no-tasks-change'), { recursive: true }); - await fs.writeFile(path.join(changesDir, 'no-tasks-change', 'tasks.md'), '# Tasks\n\nNo tasks yet.'); - - // Change with all tasks complete - should show in Completed - await fs.mkdir(path.join(changesDir, 'completed-change'), { recursive: true }); - await fs.writeFile( - path.join(changesDir, 'completed-change', 'tasks.md'), - '- [x] Done task\n' - ); - - const viewCommand = new ViewCommand(); - await viewCommand.execute(tempDir); - - const output = logOutput.map(stripAnsi).join('\n'); - - // Draft section should contain empty and no-tasks changes - expect(output).toContain('Draft Changes'); - expect(output).toContain('empty-change'); - expect(output).toContain('no-tasks-change'); - - // Completed section should only contain changes with all tasks done - expect(output).toContain('Completed Changes'); - expect(output).toContain('completed-change'); - - // Verify empty-change and no-tasks-change are in Draft section (marked with β—‹) - const draftLines = logOutput - .map(stripAnsi) - .filter((line) => line.includes('β—‹')); - const draftNames = draftLines.map((line) => line.trim().replace('β—‹ ', '')); - expect(draftNames).toContain('empty-change'); - expect(draftNames).toContain('no-tasks-change'); + it('should fail if OpenSpec is not initialized', async () => { + await expect(getViewData(tempDir)).rejects.toThrow(/No OpenSpec directory found/); + }); - // Verify completed-change is in Completed section (marked with βœ“) - const completedLines = logOutput - .map(stripAnsi) - .filter((line) => line.includes('βœ“')); - const completedNames = completedLines.map((line) => line.trim().replace('βœ“ ', '')); - expect(completedNames).toContain('completed-change'); - expect(completedNames).not.toContain('empty-change'); - expect(completedNames).not.toContain('no-tasks-change'); + it('should return empty dashboard data for new project', async () => { + const openspecPath = path.join(tempDir, 'openspec'); + await fs.mkdir(openspecPath, { recursive: true }); + await fs.mkdir(path.join(openspecPath, 'changes'), { recursive: true }); + await fs.mkdir(path.join(openspecPath, 'specs'), { recursive: true }); + + const data = await getViewData(tempDir); + expect(data.changes.draft).toEqual([]); + expect(data.changes.active).toEqual([]); + expect(data.changes.completed).toEqual([]); + expect(data.specs).toEqual([]); }); - it('sorts active changes by completion percentage ascending with deterministic tie-breakers', async () => { - const changesDir = path.join(tempDir, 'openspec', 'changes'); + it('should categorize changes correctly', async () => { + const openspecPath = path.join(tempDir, 'openspec'); + const changesDir = path.join(openspecPath, 'changes'); await fs.mkdir(changesDir, { recursive: true }); - await fs.mkdir(path.join(changesDir, 'gamma-change'), { recursive: true }); - await fs.writeFile( - path.join(changesDir, 'gamma-change', 'tasks.md'), - '- [x] Done\n- [x] Also done\n- [ ] Not done\n' - ); - - await fs.mkdir(path.join(changesDir, 'beta-change'), { recursive: true }); - await fs.writeFile( - path.join(changesDir, 'beta-change', 'tasks.md'), - '- [x] Task 1\n- [ ] Task 2\n' - ); + // Draft (no tasks) + await fs.mkdir(path.join(changesDir, 'draft-change'), { recursive: true }); - await fs.mkdir(path.join(changesDir, 'delta-change'), { recursive: true }); - await fs.writeFile( - path.join(changesDir, 'delta-change', 'tasks.md'), - '- [x] Task 1\n- [ ] Task 2\n' - ); + // Active (partially complete) + const activeDir = path.join(changesDir, 'active-change'); + await fs.mkdir(activeDir, { recursive: true }); + await fs.writeFile(path.join(activeDir, 'tasks.md'), '- [x] done\n- [ ] pending'); - await fs.mkdir(path.join(changesDir, 'alpha-change'), { recursive: true }); - await fs.writeFile( - path.join(changesDir, 'alpha-change', 'tasks.md'), - '- [ ] Task 1\n- [ ] Task 2\n' - ); + // Completed + const doneDir = path.join(changesDir, 'done-change'); + await fs.mkdir(doneDir, { recursive: true }); + await fs.writeFile(path.join(doneDir, 'tasks.md'), '- [x] all done'); - const viewCommand = new ViewCommand(); - await viewCommand.execute(tempDir); - - const activeLines = logOutput - .map(stripAnsi) - .filter(line => line.includes('β—‰')); - - const activeOrder = activeLines.map(line => { - const afterBullet = line.split('β—‰')[1] ?? ''; - return afterBullet.split('[')[0]?.trim(); - }); - - expect(activeOrder).toEqual([ - 'alpha-change', - 'beta-change', - 'delta-change', - 'gamma-change' - ]); + const data = await getViewData(tempDir); + expect(data.changes.draft.map(c => c.name)).toContain('draft-change'); + expect(data.changes.active.map(c => c.name)).toContain('active-change'); + expect(data.changes.completed.map(c => c.name)).toContain('done-change'); }); -}); - +}); \ No newline at end of file diff --git a/test/mcp/prompts.test.ts b/test/mcp/prompts.test.ts new file mode 100644 index 000000000..14d268d06 --- /dev/null +++ b/test/mcp/prompts.test.ts @@ -0,0 +1,46 @@ +import { describe, it, expect, beforeEach } from 'vitest'; +import { registerPrompts } from '../../src/mcp/prompts.js'; +import { FastMCP } from 'fastmcp'; + +class MockFastMCP { + prompts: any[] = []; + addPrompt(prompt: any) { + this.prompts.push(prompt); + } +} + +describe('MCP Prompts', () => { + let server: MockFastMCP; + + beforeEach(() => { + server = new MockFastMCP(); + }); + + it('registers expected prompts', () => { + registerPrompts(server as unknown as FastMCP); + + const names = server.prompts.map(p => p.name); + expect(names).toContain('openspec_proposal'); + expect(names).toContain('openspec_apply'); + expect(names).toContain('openspec_archive'); + }); + + it('prompts load function returns messages with MCP tool instructions', async () => { + registerPrompts(server as unknown as FastMCP); + + const proposalPrompt = server.prompts.find(p => p.name === 'openspec_proposal'); + const result = await proposalPrompt.load(); + + expect(result.messages).toHaveLength(1); + const text = result.messages[0].content.text; + + // Check for replacement of CLI commands with MCP tools + expect(text).toContain('openspec_list_changes'); + expect(text).not.toContain('openspec list'); // Should be replaced/not present as primary instruction ideally, + // but regex replacement might leave some if strictly looking for full command lines. + // The toMcpInstructions function replaces specific patterns. + + // Check for specific replacements + expect(text).toContain('openspec_validate_change(name: "'); + }); +}); diff --git a/test/mcp/resources.test.ts b/test/mcp/resources.test.ts new file mode 100644 index 000000000..02b2950d9 --- /dev/null +++ b/test/mcp/resources.test.ts @@ -0,0 +1,34 @@ +import { describe, it, expect, beforeEach } from 'vitest'; +import { registerResources } from '../../src/mcp/resources.js'; +import { FastMCP } from 'fastmcp'; + +class MockFastMCP { + resources: any[] = []; + addResourceTemplate(resource: any) { + this.resources.push(resource); + } +} + +describe('MCP Resources', () => { + let server: MockFastMCP; + + beforeEach(() => { + server = new MockFastMCP(); + }); + + it('registers expected resource templates', () => { + registerResources(server as unknown as FastMCP); + + const templates = server.resources.map(r => r.uriTemplate); + expect(templates).toContain('openspec://changes/{name}/proposal'); + expect(templates).toContain('openspec://changes/{name}/tasks'); + expect(templates).toContain('openspec://specs/{id}'); + }); + + it('resource templates have load functions', () => { + registerResources(server as unknown as FastMCP); + server.resources.forEach(r => { + expect(r.load).toBeInstanceOf(Function); + }); + }); +}); diff --git a/test/mcp/server.test.ts b/test/mcp/server.test.ts new file mode 100644 index 000000000..9a98e9502 --- /dev/null +++ b/test/mcp/server.test.ts @@ -0,0 +1,11 @@ +import { describe, it, expect } from 'vitest'; +import { OpenSpecMCPServer } from '../../src/mcp/server.js'; + +describe('OpenSpecMCPServer', () => { + it('can be instantiated', () => { + const server = new OpenSpecMCPServer(); + expect(server).toBeDefined(); + // accessing private 'server' property is not easy in TS without casting + expect((server as any).server).toBeDefined(); + }); +}); diff --git a/test/mcp/tools.test.ts b/test/mcp/tools.test.ts new file mode 100644 index 000000000..bde2e159e --- /dev/null +++ b/test/mcp/tools.test.ts @@ -0,0 +1,55 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { registerTools } from '../../src/mcp/tools.js'; +import { FastMCP } from 'fastmcp'; + +// Mock FastMCP since we only need the addTool method +class MockFastMCP { + tools: any[] = []; + addTool(tool: any) { + this.tools.push(tool); + } +} + +describe('MCP Tools', () => { + let server: MockFastMCP; + + beforeEach(() => { + server = new MockFastMCP(); + }); + + it('registers all expected tools', () => { + registerTools(server as unknown as FastMCP); + + const toolNames = server.tools.map(t => t.name); + expect(toolNames).toContain('openspec_init'); + expect(toolNames).toContain('openspec_update'); + expect(toolNames).toContain('openspec_view'); + expect(toolNames).toContain('openspec_create_change'); + expect(toolNames).toContain('openspec_list_changes'); + expect(toolNames).toContain('openspec_list_specs'); + expect(toolNames).toContain('openspec_show_change'); + expect(toolNames).toContain('openspec_show_spec'); + expect(toolNames).toContain('openspec_validate_change'); + expect(toolNames).toContain('openspec_validate_all'); + expect(toolNames).toContain('openspec_archive_change'); + expect(toolNames).toContain('openspec_config_get'); + expect(toolNames).toContain('openspec_config_set'); + expect(toolNames).toContain('openspec_config_list'); + expect(toolNames).toContain('openspec_artifact_status'); + expect(toolNames).toContain('openspec_artifact_instructions'); + expect(toolNames).toContain('openspec_apply_instructions'); + expect(toolNames).toContain('openspec_list_schemas'); + }); + + it('openspec_create_change has correct schema', () => { + registerTools(server as unknown as FastMCP); + const tool = server.tools.find(t => t.name === 'openspec_create_change'); + expect(tool).toBeDefined(); + expect(tool.parameters).toBeDefined(); + // Zod schema parsing is internal, but we can check if it exists + }); + + // We can add integration tests here by invoking tool.execute(args) + // but that would duplicate core logic tests. + // The main value here is verifying the mapping exists. +});