diff --git a/.github/workflows/individual-tool-integration-tests.yml b/.github/workflows/individual-tool-integration-tests.yml index 5171458ae..7d452022c 100644 --- a/.github/workflows/individual-tool-integration-tests.yml +++ b/.github/workflows/individual-tool-integration-tests.yml @@ -615,9 +615,9 @@ jobs: path: verify-deploy-test-results.json retention-days: 7 - # Test build-image with multi-language scenarios - test-build-image: - name: build-image Multi-Language Tests (${{ matrix.os }}) + # Test build-image-context with multi-language scenarios + test-build-image-context: + name: build-image-context Multi-Language Tests (${{ matrix.os }}) runs-on: ${{ matrix.os }} timeout-minutes: 20 strategy: @@ -670,7 +670,7 @@ jobs: ls -la test/fixtures/build-scenarios/dotnet/ echo "βœ… All test fixtures found" - - name: Run build-image integration test (Linux only) + - name: Run build-image-context integration test (Linux only) if: runner.os == 'Linux' id: build-tests run: | @@ -692,7 +692,7 @@ jobs: echo "# πŸ”¨ Multi-Language Build Integration Tests" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY echo "## Tool Tested" >> $GITHUB_STEP_SUMMARY - echo "- \`build-image\` - Builds Docker images from Dockerfiles" >> $GITHUB_STEP_SUMMARY + echo "- \`build-image-context\` - Returns Docker build commands for agents to execute" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY echo "**Platform:** ${{ matrix.os }}" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY @@ -704,10 +704,10 @@ jobs: echo "**Expected:** \`//./pipe/docker_engine\` (Windows named pipe)" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY echo "**Note:** Windows runners only validate socket path. Full build tests run on Linux." >> $GITHUB_STEP_SUMMARY - elif [ -f "build-image-test-results.json" ]; then - TOTAL=$(jq -r '.total' build-image-test-results.json) - PASSED=$(jq -r '.passed' build-image-test-results.json) - FAILED=$(jq -r '.failed' build-image-test-results.json) + elif [ -f "build-image-context-test-results.json" ]; then + TOTAL=$(jq -r '.total' build-image-context-test-results.json) + PASSED=$(jq -r '.passed' build-image-context-test-results.json) + FAILED=$(jq -r '.failed' build-image-context-test-results.json) echo "## Results" >> $GITHUB_STEP_SUMMARY echo "- **Total Tests**: $TOTAL" >> $GITHUB_STEP_SUMMARY @@ -716,7 +716,7 @@ jobs: echo "" >> $GITHUB_STEP_SUMMARY echo "## Test Cases" >> $GITHUB_STEP_SUMMARY - jq -r '.results[] | "- " + (if .passed then "βœ…" else "❌" end) + " **" + .name + "**" + (if .imageSize then " (" + ((.imageSize / 1000000) | floor | tostring) + "MB)" else "" end)' build-image-test-results.json >> $GITHUB_STEP_SUMMARY + jq -r '.results[] | "- " + (if .passed then "βœ…" else "❌" end) + " **" + .name + "**" + (if .imageSize then " (" + ((.imageSize / 1000000) | floor | tostring) + "MB)" else "" end)' build-image-context-test-results.json >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY if [ "${{ steps.build-tests.outcome }}" = "success" ]; then @@ -725,7 +725,7 @@ jobs: echo "### ❌ Some Tests Failed" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY echo "**Failed test details:**" >> $GITHUB_STEP_SUMMARY - jq -r '.results[] | select(.passed == false) | "- **" + .name + "**: " + .message' build-image-test-results.json >> $GITHUB_STEP_SUMMARY + jq -r '.results[] | select(.passed == false) | "- **" + .name + "**: " + .message' build-image-context-test-results.json >> $GITHUB_STEP_SUMMARY fi echo "" >> $GITHUB_STEP_SUMMARY @@ -746,8 +746,8 @@ jobs: if: always() && runner.os == 'Linux' uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v4 with: - name: build-image-test-results-${{ matrix.os }} - path: build-image-test-results.json + name: build-image-context-test-results-${{ matrix.os }} + path: build-image-context-test-results.json retention-days: 7 # Complete E2E Workflow Test - Tests entire containerization pipeline @@ -831,7 +831,7 @@ jobs: echo "" >> $GITHUB_STEP_SUMMARY echo "## Workflow Pipeline Tested" >> $GITHUB_STEP_SUMMARY echo "\`\`\`" >> $GITHUB_STEP_SUMMARY - echo "analyze-repo β†’ generate-dockerfile β†’ build-image β†’ scan-image" >> $GITHUB_STEP_SUMMARY + echo "analyze-repo β†’ generate-dockerfile β†’ build-image-context β†’ scan-image" >> $GITHUB_STEP_SUMMARY echo " β†’ tag-image β†’ prepare-cluster β†’ push-image β†’ deploy β†’ verify-deploy" >> $GITHUB_STEP_SUMMARY echo "\`\`\`" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY @@ -873,7 +873,7 @@ jobs: echo "## Tools Tested" >> $GITHUB_STEP_SUMMARY echo "1. **analyze-repo** - Repository analysis" >> $GITHUB_STEP_SUMMARY echo "2. **generate-dockerfile** - Dockerfile generation" >> $GITHUB_STEP_SUMMARY - echo "3. **build-image** - Docker image building" >> $GITHUB_STEP_SUMMARY + echo "3. **build-image-context** - Docker build command generation" >> $GITHUB_STEP_SUMMARY echo "4. **scan-image** - Security vulnerability scanning" >> $GITHUB_STEP_SUMMARY echo "5. **prepare-cluster** - Kubernetes cluster setup" >> $GITHUB_STEP_SUMMARY echo "6. **tag-image** - Image tagging" >> $GITHUB_STEP_SUMMARY diff --git a/README.md b/README.md index 9a721b416..35bd302be 100644 --- a/README.md +++ b/README.md @@ -77,7 +77,7 @@ Restart VS Code to enable the MCP server in GitHub Copilot. For direct tool usage without MCP protocol (e.g., VS Code extensions, programmatic access): ```typescript -import { analyzeRepo, buildImage, scanImage } from 'containerization-assist-mcp/sdk'; +import { analyzeRepo, buildImageContext, scanImage } from 'containerization-assist-mcp/sdk'; // Simple function calls - no MCP server needed const analysis = await analyzeRepo({ repositoryPath: './myapp' }); @@ -85,7 +85,8 @@ if (analysis.ok) { console.log('Detected:', analysis.value.modules); } -const build = await buildImage({ path: './myapp', imageName: 'myapp:v1' }); +const buildContext = await buildImageContext({ path: './myapp', imageName: 'myapp:v1', platform: 'linux/amd64' }); +// buildContext returns build commands for the agent to execute const scan = await scanImage({ imageId: 'myapp:v1' }); ``` @@ -212,7 +213,7 @@ The server provides 13 MCP tools organized by functionality: ### Image Operations | Tool | Description | |------|-------------| -| `build-image` | Build Docker images from Dockerfiles with security analysis | +| `build-image-context` | Prepare Docker build context with security analysis and return build commands | | `scan-image` | Scan Docker images for security vulnerabilities with remediation guidance (uses Trivy CLI) | | `tag-image` | Tag Docker images with version and registry information | | `push-image` | Push Docker images to a registry | diff --git a/docs/adr/001-result-pattern.md b/docs/adr/001-result-pattern.md index e5a09c230..4760ea841 100644 --- a/docs/adr/001-result-pattern.md +++ b/docs/adr/001-result-pattern.md @@ -17,14 +17,14 @@ type Result = | { ok: false; error: E }; // Usage example -async function buildImage(config: BuildConfig): Promise> { +async function buildImageContext(config: BuildConfig): Promise> { const docker = await createDockerClient(); if (!docker.ok) { return Failure('Docker client unavailable', docker.error); } - // ... build logic - return Success(image); + // ... prepare build context + return Success(buildContext); } ``` diff --git a/docs/adr/002-tool-interface.md b/docs/adr/002-tool-interface.md index b5835bd07..9c7ac6258 100644 --- a/docs/adr/002-tool-interface.md +++ b/docs/adr/002-tool-interface.md @@ -24,12 +24,12 @@ interface Tool, TOut> { ) => Promise>; } -// src/tools/build-image/tool.ts -const tool: Tool = { - name: 'build-image', - description: 'Build Docker image with progress tracking', +// src/tools/build-image-context/tool.ts +const tool: Tool = { + name: 'build-image-context', + description: 'Prepare Docker build context with build command', version: '2.0.0', - schema: buildImageSchema, + schema: buildImageContextSchema, run: async (input, ctx) => { // Implementation return Success(result); @@ -43,7 +43,7 @@ export default tool; ``` src/tools/ -β”œβ”€β”€ build-image/ +β”œβ”€β”€ build-image-context/ β”‚ β”œβ”€β”€ tool.ts # Tool implementation β”‚ β”œβ”€β”€ schema.ts # Zod schema β”‚ └── index.ts # Re-export diff --git a/docs/adr/005-mcp-integration.md b/docs/adr/005-mcp-integration.md index 9a12b9a83..2d6aa4db5 100644 --- a/docs/adr/005-mcp-integration.md +++ b/docs/adr/005-mcp-integration.md @@ -95,7 +95,7 @@ export function createMCPServer( 2. **Developer Experience** - Natural language interface: "Build and scan my Java application" - - AI automatically routes to correct tools (analyze-repo β†’ generate-dockerfile β†’ build-image β†’ scan-image) + - AI automatically routes to correct tools (analyze-repo β†’ generate-dockerfile β†’ build-image-context β†’ scan-image) - Real-time progress updates during long operations 3. **Discoverability** diff --git a/docs/adr/006-infrastructure-organization.md b/docs/adr/006-infrastructure-organization.md index d7ffdeb61..0d3d02e44 100644 --- a/docs/adr/006-infrastructure-organization.md +++ b/docs/adr/006-infrastructure-organization.md @@ -202,7 +202,7 @@ src/ β”œβ”€β”€ image-building/ β”‚ β”œβ”€β”€ docker-client.ts β”‚ β”œβ”€β”€ generate-dockerfile.ts -β”‚ β”œβ”€β”€ build-image.ts +β”‚ β”œβ”€β”€ build-image-context.ts β”‚ └── scan-image.ts └── deployment/ β”œβ”€β”€ kubernetes-client.ts @@ -282,7 +282,7 @@ src/ β”‚ β”œβ”€β”€ tool-helpers.ts # Tool execution helpers β”‚ └── platform.ts # Platform detection β”œβ”€β”€ tools/ # MCP tools -β”‚ β”œβ”€β”€ build-image/ +β”‚ β”œβ”€β”€ build-image-context/ β”‚ β”‚ β”œβ”€β”€ tool.ts # Uses @infra/docker + @lib/* β”‚ β”‚ β”œβ”€β”€ schema.ts β”‚ β”‚ └── index.ts @@ -311,9 +311,9 @@ src/ } ``` -### Example: Build Image Tool +### Example: Build Image Context Tool -**src/tools/build-image/tool.ts:** +**src/tools/build-image-context/tool.ts:** ```typescript // Infrastructure imports import { createDockerClient, type DockerBuildOptions } from '@infra/docker/client'; @@ -329,12 +329,12 @@ import type { ToolContext } from '@mcp/context'; import { type Result, Success, Failure } from '@types'; // Local imports -import { type BuildImageParams, buildImageSchema } from './schema'; +import { type BuildImageContextParams, buildImageContextSchema } from './schema'; async function run( - input: BuildImageParams, + input: BuildImageContextParams, ctx: ToolContext -): Promise> { +): Promise> { // 1. Use @lib utilities for validation const pathValidation = await validatePathOrFail(input.path); if (!pathValidation.ok) return pathValidation; @@ -346,7 +346,7 @@ async function run( const docker = dockerResult.value; // 3. Orchestrate infrastructure and utilities - const buildResult = await docker.buildImage({ ... }); + const buildContext = await docker.prepareBuildContext({ ... }); return Success({ ... }); } @@ -393,7 +393,7 @@ The architecture is enforced through: - [CLAUDE.md](../../CLAUDE.md) - Path alias conventions and import guidelines - [src/infra/docker/client.ts](../../src/infra/docker/client.ts) - Example infrastructure client - [src/lib/validation-helpers.ts](../../src/lib/validation-helpers.ts) - Example pure utility -- [src/tools/build-image/tool.ts](../../src/tools/build-image/tool.ts) - Example tool orchestration +- [src/tools/build-image-context/tool.ts](../../src/tools/build-image-context/tool.ts) - Example tool orchestration - [ADR-002: Unified Tool Interface](./002-tool-interface.md) - Related decision on tool structure ## Related Decisions diff --git a/docs/adr/007-sdk-decoupling.md b/docs/adr/007-sdk-decoupling.md index 128472d80..391a96f98 100644 --- a/docs/adr/007-sdk-decoupling.md +++ b/docs/adr/007-sdk-decoupling.md @@ -98,7 +98,7 @@ This decision complements, not replaces, ADR-005 (MCP Protocol Integration): - Only pull in what you need 4. **Simpler Mental Model** - - SDK: `import { buildImage } from 'pkg/sdk'; await buildImage({...})` + - SDK: `import { buildImageContext } from 'pkg/sdk'; await buildImageContext({...})` - MCP: Full server with protocol, transport, notifications - Clear separation of concerns @@ -220,14 +220,14 @@ See `docs/implementation-plans/sdk-decoupling-from-mcp-detailed.md` for step-by- import { analyzeRepo, generateDockerfile, - buildImage, + buildImageContext, scanImage, // ... all 11 tools } from 'containerization-assist-mcp/sdk'; // Simple function calls const analysis = await analyzeRepo({ repositoryPath: './myapp' }); -const build = await buildImage({ path: './myapp', imageName: 'myapp:v1' }); +const buildCtx = await buildImageContext({ path: './myapp', imageName: 'myapp:v1' }); // With options const result = await scanImage( @@ -240,7 +240,7 @@ const result = await scanImage( // Advanced: direct tool access import { tools, executeTool } from 'containerization-assist-mcp/sdk'; -const result = await executeTool(tools.buildImage, params, options); +const result = await executeTool(tools.buildImageContext, params, options); ``` ## Related Decisions diff --git a/docs/examples/README.md b/docs/examples/README.md index 20abfbb2d..a11314aa8 100644 --- a/docs/examples/README.md +++ b/docs/examples/README.md @@ -54,28 +54,27 @@ import { createToolHandler } from 'containerization-assist'; // Using a literal tool name gives you fully-typed callbacks! server.tool( - 'build-image', - buildImageTool.description, - buildImageTool.inputSchema, - createToolHandler(app, 'build-image', { + 'build-image-context', + buildImageContextTool.description, + buildImageContextTool.inputSchema, + createToolHandler(app, 'build-image-context', { onSuccess: (result, toolName, params) => { - // βœ… result is typed as BuildImageResult - console.log(`Built: ${result.imageId}, size: ${result.size} bytes`); + // βœ… result is typed as BuildImageContextResult + console.log(`Build command: ${result.nextAction.buildCommand.command}`); - // βœ… params is typed as BuildImageInput + // βœ… params is typed as BuildImageContextInput console.log(`Image name: ${params.imageName}`); // βœ… Full IntelliSense support! telemetry.track({ tool: toolName, - imageSize: result.size, - buildTime: result.buildTime, - tags: result.tags + imageName: params.imageName, + contextPath: params.path }); }, onError: (error, toolName, params) => { // βœ… params is also typed in error handler - console.error(`Failed to build ${params.imageName}`); + console.error(`Failed to prepare build for ${params.imageName}`); } }) ); @@ -196,7 +195,7 @@ import { analyzeRepoTool, // Repository analysis and framework detection generateDockerfileTool, // AI-powered Dockerfile generation fixDockerfileTool, // Fix and optimize existing Dockerfiles - buildImageTool, // Docker image building with progress + buildImageContextTool, // Docker build context provider (returns command) scanImageTool, // Security vulnerability scanning tagImageTool, // Docker image tagging pushImageTool, // Push images to registry @@ -214,7 +213,7 @@ Use these string names when defining aliases: - `'analyze-repo'` - Repository analysis - `'generate-dockerfile'` - Dockerfile generation - `'fix-dockerfile'` - Dockerfile fixes -- `'build-image'` - Docker build +- `'build-image-context'` - Docker build context provider - `'scan-image'` - Security scanning - `'tag-image'` - Image tagging - `'push-image'` - Registry push diff --git a/docs/guides/policy-example/PLATFORM_AND_TAG_POLICY_USAGE.md b/docs/guides/policy-example/PLATFORM_AND_TAG_POLICY_USAGE.md index 39b47cfc6..40300cc0f 100644 --- a/docs/guides/policy-example/PLATFORM_AND_TAG_POLICY_USAGE.md +++ b/docs/guides/policy-example/PLATFORM_AND_TAG_POLICY_USAGE.md @@ -169,10 +169,10 @@ All 11 tests pass: 2. Tags are applied when running `docker build -t myimage:demo` 3. The LABEL serves as documentation and policy enforcement -When using the `build-image` tool, make sure to specify the tag: +When using the `build-image-context` tool, make sure to specify the tag: ```bash -# The build-image tool should tag the image as "demo" +# The build-image-context tool should tag the image as "demo" # Policy ensures the Dockerfile has LABEL tag="demo" for consistency ``` diff --git a/docs/guides/vscode-extension-integration.md b/docs/guides/vscode-extension-integration.md index a13ad8dc7..1b9211d79 100644 --- a/docs/guides/vscode-extension-integration.md +++ b/docs/guides/vscode-extension-integration.md @@ -22,7 +22,7 @@ The SDK provides everything needed to create VS Code Language Model Tools: | Export | Purpose | |--------|---------| -| `analyzeRepo()`, `buildImage()`, etc. | Core tool functions | +| `analyzeRepo()`, `buildImageContext()`, etc. | Core tool functions | | `jsonSchemas` | JSON Schemas for package.json `inputSchema` | | `toolMetadata` | Descriptions, icons, confirmations for tool config | | `resultFormatters` | Convert results to LLM-friendly text | @@ -196,7 +196,7 @@ Register all tools in your extension's `activate` function: // src/extension.ts import * as vscode from 'vscode'; import { AnalyzeRepoTool } from './tools/analyze-repo'; -import { BuildImageTool } from './tools/build-image'; +import { BuildImageContextTool } from './tools/build-image-context'; import { GenerateDockerfileTool } from './tools/generate-dockerfile'; import { FixDockerfileTool } from './tools/fix-dockerfile'; import { ScanImageTool } from './tools/scan-image'; @@ -211,7 +211,7 @@ export function activate(context: vscode.ExtensionContext) { // Register all containerization tools const tools = [ ['analyze_repo', new AnalyzeRepoTool()], - ['build_image', new BuildImageTool()], + ['build_image_context', new BuildImageContextTool()], ['generate_dockerfile', new GenerateDockerfileTool()], ['fix_dockerfile', new FixDockerfileTool()], ['scan_image', new ScanImageTool()], @@ -242,20 +242,20 @@ export function deactivate() {} ```typescript import * as vscode from 'vscode'; import { - buildImage, + buildImageContext, toolMetadata, resultFormatters, createAbortSignalFromToken, formatErrorForLLM, resolveWorkspacePath, - type BuildImageInput, + type BuildImageContextInput, } from 'containerization-assist-mcp/sdk'; -export class BuildImageTool - implements vscode.LanguageModelTool +export class BuildImageContextTool + implements vscode.LanguageModelTool { async invoke( - options: vscode.LanguageModelToolInvocationOptions, + options: vscode.LanguageModelToolInvocationOptions, token: vscode.CancellationToken ): Promise { const { signal, dispose } = createAbortSignalFromToken(token); @@ -269,12 +269,12 @@ export class BuildImageTool input.path = resolveWorkspacePath(input.path, workspaceRoot); } - // Show progress (VS Code doesn't have built-in progress for tools) - const result = await buildImage(input, { + // Get build context (returns command to execute, doesn't build directly) + const result = await buildImageContext(input, { signal, onProgress: (message, progress, total) => { // Could show in output channel if needed - console.log(`Build: ${message} (${progress}/${total})`); + console.log(`Build context: ${message} (${progress}/${total})`); }, }); @@ -284,7 +284,7 @@ export class BuildImageTool return new vscode.LanguageModelToolResult([ new vscode.LanguageModelTextPart( - resultFormatters.buildImage(result.value) + resultFormatters.buildImageContext(result.value) ), ]); } finally { @@ -293,10 +293,10 @@ export class BuildImageTool } async prepareInvocation( - options: vscode.LanguageModelToolInvocationPrepareOptions, + options: vscode.LanguageModelToolInvocationPrepareOptions, _token: vscode.CancellationToken ): Promise { - const meta = toolMetadata.buildImage; + const meta = toolMetadata.buildImageContext; const { path, imageName } = options.input; return { @@ -469,7 +469,7 @@ describe('AnalyzeRepoTool', () => { | `analyzeRepo(input, options?)` | Analyze repository structure | | `generateDockerfile(input, options?)` | Generate Dockerfile plan | | `fixDockerfile(input, options?)` | Fix existing Dockerfile | -| `buildImage(input, options?)` | Build Docker image | +| `buildImageContext(input, options?)` | Get build context/command | | `scanImage(input, options?)` | Scan for vulnerabilities | | `tagImage(input, options?)` | Tag Docker image | | `pushImage(input, options?)` | Push to registry | @@ -485,7 +485,7 @@ import { jsonSchemas } from 'containerization-assist-mcp/sdk'; // Access individual schemas jsonSchemas.analyzeRepo // JSON Schema for analyze-repo input -jsonSchemas.buildImage // JSON Schema for build-image input +jsonSchemas.buildImageContext // JSON Schema for build-image-context input // ... etc ``` @@ -556,7 +556,7 @@ import { standardWorkflow, toolMetadata } from 'containerization-assist-mcp/sdk' // standardWorkflow = [ // 'analyzeRepo', // 'generateDockerfile', -// 'buildImage', +// 'buildImageContext', // 'scanImage', // 'tagImage', // 'pushImage', @@ -567,7 +567,7 @@ import { standardWorkflow, toolMetadata } from 'containerization-assist-mcp/sdk' // Each tool's suggestedNextTools points to the next step toolMetadata.analyzeRepo.suggestedNextTools // ['generate_dockerfile'] -toolMetadata.buildImage.suggestedNextTools // ['scan_image', 'tag_image'] +toolMetadata.buildImageContext.suggestedNextTools // ['scan_image', 'tag_image'] ``` --- @@ -609,7 +609,7 @@ Ensure proper cleanup when operations are cancelled: const { signal, dispose } = createAbortSignalFromToken(token); try { - const result = await buildImage(input, { signal }); + const result = await buildImageContext(input, { signal }); // ... } catch (error) { if (signal.aborted) { diff --git a/scripts/integration-test-build-image.ts b/scripts/integration-test-build-image.ts index ac6329330..8cbb376be 100644 --- a/scripts/integration-test-build-image.ts +++ b/scripts/integration-test-build-image.ts @@ -4,9 +4,9 @@ * Tests the complete flow of: * 1. Building Java application with multi-stage Dockerfile * 2. Building .NET application with multi-stage Dockerfile - * 3. Verifying build success, image metadata, and layer counts - * 4. Testing build arguments injection - * 5. Validating image sizes are reasonable + * 3. Verifying build context output, security analysis, and build command generation + * 4. Validating security warnings and BuildKit recommendations + * 5. Ensuring generated docker build command passes validation and includes expected flags * * Prerequisites: * - Docker installed and running @@ -16,13 +16,13 @@ * tsx scripts/integration-test-build-image.ts */ +import { join } from 'node:path'; +import { existsSync, writeFileSync } from 'node:fs'; +import { execSync } from 'node:child_process'; + import { createToolContext } from '../dist/src/mcp/context.js'; -import buildImageTool from '../dist/src/tools/build-image/tool.js'; -import { execSync } from 'child_process'; +import buildImageContextTool from '../dist/src/tools/build-image-context/tool.js'; import { createLogger } from '../dist/src/lib/logger.js'; -import { existsSync } from 'fs'; -import { join } from 'path'; -import { writeFileSync } from 'fs'; const logger = createLogger({ name: 'build-image-test', level: 'error' }); @@ -33,11 +33,12 @@ interface BuildTestCase { name: string; dockerContext: string; dockerfile?: string; + imageName?: string; tags: string[]; buildArgs?: Record; - expectedSize?: { min: number; max: number }; // bytes - expectedLayers?: { min: number; max?: number }; - shouldSucceed: boolean; + expectedWarnings?: string[]; + expectBuildKit?: boolean; + expectedCommandFlags?: string[]; description: string; } @@ -48,23 +49,30 @@ interface TestResult { name: string; passed: boolean; message: string; - imageSize?: number; - layers?: number; - buildTime?: number; + warnings?: number; + buildKit?: boolean; + command?: string; } /** - * Test cases for multi-language builds + * Test cases for context-only builds + * + * These tests validate the build-image tool's context preparation output: + * - Security analysis (warnings) + * - BuildKit feature detection + * - Generated docker build command + * + * No actual Docker builds are performed - only output validation. */ const TEST_CASES: BuildTestCase[] = [ { - name: 'Java Multi-Stage Build', + name: 'Java Multi-Stage Context', dockerContext: 'test/fixtures/build-scenarios/java', tags: ['test-build:java-app'], - expectedSize: { min: 100_000_000, max: 400_000_000 }, // 100MB - 400MB for Alpine - expectedLayers: { min: 5, max: 30 }, - shouldSucceed: true, - description: 'Tests Java build with Eclipse Temurin JRE and multi-stage', + expectBuildKit: true, + // Java fixture uses pinned images and non-root user, so no security warnings expected + expectedCommandFlags: ['docker build', '-t test-build:java-app'], + description: 'Validates Java multi-stage Dockerfile analysis and BuildKit recommendation', }, { name: 'Java with Build Args', @@ -73,175 +81,132 @@ const TEST_CASES: BuildTestCase[] = [ buildArgs: { VERSION: '2.0.0', }, - expectedSize: { min: 100_000_000, max: 400_000_000 }, // 100MB - 400MB for Alpine - shouldSucceed: true, - description: 'Tests Java build argument injection', + expectedCommandFlags: ['--build-arg VERSION=2.0.0'], + description: 'Validates build args are included in generated build command', }, { - name: '.NET Multi-Stage Build', + name: '.NET Multi-Stage Context', dockerContext: 'test/fixtures/build-scenarios/dotnet', tags: ['test-build:dotnet-app'], - expectedSize: { min: 80_000_000, max: 300_000_000 }, // 80MB - 300MB for Alpine - expectedLayers: { min: 5, max: 30 }, - shouldSucceed: true, - description: 'Tests .NET 8 build with ASP.NET runtime and multi-stage', + expectBuildKit: true, + // .NET fixture uses pinned images and non-root user, so no security warnings expected + expectedCommandFlags: ['docker build', '-t test-build:dotnet-app'], + description: 'Validates .NET multi-stage Dockerfile analysis and BuildKit recommendation', }, { - name: '.NET with Build Args', + name: '.NET Custom Image Name', dockerContext: 'test/fixtures/build-scenarios/dotnet', - tags: ['test-build:dotnet-args'], - buildArgs: { - VERSION: '3.0.0', - }, - expectedSize: { min: 80_000_000, max: 300_000_000 }, // 80MB - 300MB for Alpine - shouldSucceed: true, - description: 'Tests .NET build with version argument', + imageName: 'custom/dotnet-app', + tags: ['v3.1.0'], + expectedCommandFlags: ['-t custom/dotnet-app:v3.1.0'], + description: 'Validates imageName + tag composition in final build command', }, ]; /** - * Verify Docker is installed and running + * Check if Docker is available (optional - not required for context-only tests) */ -function verifyDockerInstalled(): boolean { - console.log(' Checking Docker...'); +function checkDockerAvailable(): boolean { try { const output = execSync('docker --version', { encoding: 'utf-8', stdio: 'pipe' }); - console.log(` βœ… Docker: ${output.trim()}`); - - // Verify Docker daemon is running - execSync('docker info', { stdio: 'pipe' }); - console.log(' βœ… Docker daemon is running'); + console.log(` ℹ️ Docker available: ${output.trim()}`); return true; } catch { - console.log(' ❌ Docker not found or not running'); + console.log(' ℹ️ Docker not available (not required for context tests)'); return false; } } /** - * Get image metadata (size, layers) + * Validate security warnings include expected IDs */ -function getImageMetadata(imageTag: string): { size: number; layers: number } | null { - try { - // Get image size - const sizeOutput = execSync( - `docker inspect --format='{{.Size}}' ${imageTag}`, - { encoding: 'utf-8', stdio: 'pipe' }, - ); - const size = parseInt(sizeOutput.trim(), 10); - - // Get layer count (count lines in JavaScript instead of shell pipeline) - const layersOutput = execSync( - `docker history ${imageTag} --format='{{.ID}}'`, - { encoding: 'utf-8', stdio: 'pipe' }, - ); - const layers = layersOutput - .split('\n') - .map((line) => line.trim()) - .filter((line) => line.length > 0).length; - - return { size, layers }; - } catch { - return null; +function validateWarnings( + testCase: BuildTestCase, + warnings: { id: string }[], +): { + passed: boolean; + messages: string[]; +} { + if (!testCase.expectedWarnings || testCase.expectedWarnings.length === 0) { + return { passed: true, messages: [] }; } -} -/** - * Format bytes to human-readable size - */ -function formatSize(bytes: number): string { - if (bytes >= 1_000_000_000) { - return `${(bytes / 1_000_000_000).toFixed(2)}GB`; - } - if (bytes >= 1_000_000) { - return `${(bytes / 1_000_000).toFixed(2)}MB`; - } - if (bytes >= 1_000) { - return `${(bytes / 1_000).toFixed(2)}KB`; + const warningIds = warnings.map((w) => w.id); + const missing = testCase.expectedWarnings.filter((id) => !warningIds.includes(id)); + if (missing.length === 0) { + return { passed: true, messages: [] }; } - return `${bytes}B`; + return { + passed: false, + messages: missing.map((id) => `Missing expected warning: ${id}`), + }; } /** - * Cleanup test images + * Validate build command contains expected flags */ -function cleanupTestImages(tags: string[]): void { - console.log(' Removing test images...'); - for (const tag of tags) { - try { - execSync(`docker rmi -f ${tag}`, { stdio: 'pipe' }); - } catch { - // Ignore cleanup errors - } +function validateCommandFlags( + testCase: BuildTestCase, + command: string, +): { + passed: boolean; + messages: string[]; +} { + if (!testCase.expectedCommandFlags || testCase.expectedCommandFlags.length === 0) { + return { passed: true, messages: [] }; } - console.log(` βœ… Cleaned up ${tags.length} test images`); + const missing = testCase.expectedCommandFlags.filter((flag) => !command.includes(flag)); + if (missing.length === 0) { + return { passed: true, messages: [] }; + } + return { + passed: false, + messages: missing.map((flag) => `Build command missing flag: ${flag}`), + }; } /** - * Validate image against expected constraints + * Create JSON summary payload for CI upload */ -function validateImage( - testCase: BuildTestCase, - metadata: { size: number; layers: number }, -): { passed: boolean; messages: string[] } { - const messages: string[] = []; - let passed = true; - - // Validate size - if (testCase.expectedSize) { - const { min, max } = testCase.expectedSize; - if (metadata.size < min) { - messages.push(`Image size ${formatSize(metadata.size)} is smaller than expected min ${formatSize(min)}`); - passed = false; - } - if (metadata.size > max) { - messages.push(`Image size ${formatSize(metadata.size)} exceeds expected max ${formatSize(max)}`); - passed = false; - } - } - - // Validate layers - if (testCase.expectedLayers) { - const { min, max } = testCase.expectedLayers; - if (metadata.layers < min) { - messages.push(`Layer count ${metadata.layers} is less than expected min ${min}`); - passed = false; - } - if (max !== undefined && metadata.layers > max) { - messages.push(`Layer count ${metadata.layers} exceeds expected max ${max}`); - passed = false; - } - } +function writeResultsSummary(results: TestResult[]) { + const summary = { + total: results.length, + passed: results.filter((r) => r.passed).length, + failed: results.filter((r) => !r.passed).length, + timestamp: new Date().toISOString(), + results, + }; - return { passed, messages }; + writeFileSync('build-image-context-test-results.json', JSON.stringify(summary, null, 2)); } /** * Main test execution */ async function main() { - console.log('πŸ”¨ Testing build-image with Multi-Language Scenarios\n'); + console.log('πŸ”¨ Testing build-image context generation scenarios\n'); console.log('='.repeat(60)); const results: TestResult[] = []; let passCount = 0; let failCount = 0; - const builtTags: string[] = []; // ───────────────────────────────────────────────────────────── // Step 1: Verify Prerequisites // ───────────────────────────────────────────────────────────── console.log('\nπŸ“‹ Step 1: Verifying prerequisites...\n'); - if (!verifyDockerInstalled()) { - console.error('\n❌ Docker is required but not installed or running.'); - process.exit(1); - } + // Docker check is informational only - not required for context tests + checkDockerAvailable(); // Verify test fixtures exist console.log('\n Checking test fixtures...'); for (const testCase of TEST_CASES) { - const dockerfilePath = join(process.cwd(), testCase.dockerContext, 'Dockerfile'); + const dockerfilePath = join( + process.cwd(), + testCase.dockerContext, + testCase.dockerfile || 'Dockerfile', + ); if (!existsSync(dockerfilePath)) { console.error(` ❌ Missing Dockerfile: ${dockerfilePath}`); process.exit(1); @@ -249,15 +214,15 @@ async function main() { console.log(` βœ… ${testCase.name}: Dockerfile found`); } + const ctx = createToolContext(logger); + // ───────────────────────────────────────────────────────────── - // Step 2: Run Build Tests + // Step 2: Run Context Tests // ───────────────────────────────────────────────────────────── - console.log('\nπŸ”¨ Step 2: Running build tests...\n'); - - const ctx = createToolContext(logger); + console.log('\n🧱 Step 2: Running build context tests...\n'); for (const testCase of TEST_CASES) { - console.log(`\n πŸ“¦ Building: ${testCase.name}`); + console.log(`\n πŸ“¦ Testing: ${testCase.name}`); console.log(` Description: ${testCase.description}`); console.log(` Context: ${testCase.dockerContext}`); console.log(` Tags: ${testCase.tags.join(', ')}`); @@ -265,145 +230,106 @@ async function main() { console.log(` Build Args: ${JSON.stringify(testCase.buildArgs)}`); } - const startTime = Date.now(); - - try { - const contextPath = join(process.cwd(), testCase.dockerContext); - - let platform: 'linux/amd64' | 'linux/arm64' = 'linux/amd64'; - const dockerfile = testCase.dockerfile || 'Dockerfile'; - - try { - const arch = process.platform === 'win32' - ? 'x86_64' // Windows Docker Desktop defaults to linux/amd64 - : execSync('uname -m', { encoding: 'utf-8' }).trim(); + const contextPath = join(process.cwd(), testCase.dockerContext); + const dockerfile = testCase.dockerfile || 'Dockerfile'; - if (arch === 'arm64' || arch === 'aarch64') { - platform = 'linux/arm64'; - } - - console.log(` Platform: ${platform}`); - } catch { - // Default to linux/amd64 - } + // Detect platform from system architecture + let platform: 'linux/amd64' | 'linux/arm64' = 'linux/amd64'; + if (process.arch === 'arm64') { + platform = 'linux/arm64'; + } - const result = await buildImageTool.handler( + try { + const result = await buildImageContextTool.handler( { path: contextPath, dockerfilePath: join(contextPath, dockerfile), + imageName: testCase.imageName, tags: testCase.tags, buildArgs: testCase.buildArgs, platform, - strictPlatformValidation: false, }, ctx, ); - const buildTime = Date.now() - startTime; - if (!result.ok) { - if (testCase.shouldSucceed) { - console.log(` ❌ Build failed: ${result.error}`); - results.push({ - name: testCase.name, - passed: false, - message: `Build error: ${result.error}`, - buildTime, - }); - failCount++; - } else { - console.log(` βœ… Build failed as expected`); - results.push({ - name: testCase.name, - passed: true, - message: 'Build failed as expected', - buildTime, - }); - passCount++; - } - continue; - } - - if (!testCase.shouldSucceed) { - console.log(` ❌ Build should have failed but succeeded`); + console.log(` ❌ Tool failed: ${result.error}`); + failCount++; results.push({ name: testCase.name, passed: false, - message: 'Expected build to fail', - buildTime, + message: result.error, }); - failCount++; continue; } - // Track built images for cleanup - builtTags.push(...testCase.tags); - const buildResult = result.value; - console.log(` Build completed in ${(buildTime / 1000).toFixed(1)}s`); - console.log(` Image ID: ${buildResult.imageId.substring(0, 20)}...`); - console.log(` Image size: ${formatSize(buildResult.size)}`); - console.log(` Layers: ${buildResult.layers || 'N/A'}`); - - // Get additional metadata from Docker - const metadata = getImageMetadata(testCase.tags[0]); - if (metadata) { - console.log(` Docker size: ${formatSize(metadata.size)}`); - console.log(` Docker layers: ${metadata.layers}`); + const warningValidation = validateWarnings(testCase, buildResult.securityAnalysis.warnings); + const commandValidation = validateCommandFlags( + testCase, + buildResult.nextAction.buildCommand.command, + ); + const buildKitFlagPassed = + testCase.expectBuildKit === undefined + ? true + : buildResult.buildKitAnalysis.recommended === testCase.expectBuildKit; + + const failureMessages: string[] = []; + if (!warningValidation.passed) { + failureMessages.push(...warningValidation.messages); + } + if (!commandValidation.passed) { + failureMessages.push(...commandValidation.messages); + } + if (!buildKitFlagPassed) { + failureMessages.push( + `BuildKit recommendation mismatch. Expected: ${testCase.expectBuildKit}`, + ); } - // Validate image constraints - const validation = validateImage(testCase, metadata || { size: buildResult.size, layers: buildResult.layers || 0 }); - - if (validation.passed) { - console.log(` βœ… PASSED`); + if (failureMessages.length === 0) { + console.log(' βœ… PASSED'); + passCount++; results.push({ name: testCase.name, passed: true, - message: 'Build successful, all validations passed', - imageSize: metadata?.size || buildResult.size, - layers: metadata?.layers || buildResult.layers, - buildTime, + message: 'Context prepared successfully', + warnings: buildResult.securityAnalysis.warnings.length, + buildKit: buildResult.buildKitAnalysis.recommended, + command: buildResult.nextAction.buildCommand.command, }); - passCount++; } else { - console.log(` ❌ FAILED`); - for (const msg of validation.messages) { + console.log(' ❌ FAILED'); + for (const msg of failureMessages) { console.log(` - ${msg}`); } + failCount++; results.push({ name: testCase.name, passed: false, - message: validation.messages.join('; '), - imageSize: metadata?.size || buildResult.size, - layers: metadata?.layers || buildResult.layers, - buildTime, + message: failureMessages.join('; '), + warnings: buildResult.securityAnalysis.warnings.length, + buildKit: buildResult.buildKitAnalysis.recommended, + command: buildResult.nextAction.buildCommand.command, }); - failCount++; } } catch (error) { - const buildTime = Date.now() - startTime; - console.log(` ❌ Error: ${error instanceof Error ? error.message : 'Unknown error'}`); + const message = error instanceof Error ? error.message : 'Unknown error'; + console.log(` ❌ Error: ${message}`); + failCount++; results.push({ name: testCase.name, passed: false, - message: `Exception: ${error instanceof Error ? error.message : 'Unknown error'}`, - buildTime, + message, }); - failCount++; } } // ───────────────────────────────────────────────────────────── - // Step 3: Cleanup + // Step 3: Summary // ───────────────────────────────────────────────────────────── - console.log('\n🧹 Step 3: Cleaning up...\n'); - cleanupTestImages(builtTags); - - // ───────────────────────────────────────────────────────────── - // Step 4: Generate Summary - // ───────────────────────────────────────────────────────────── - console.log('\n' + '='.repeat(60)); + console.log('\n'.repeat(2)); + console.log('='.repeat(60)); console.log('πŸ“Š TEST SUMMARY'); console.log('='.repeat(60)); console.log(`\n Total: ${results.length}`); @@ -413,41 +339,21 @@ async function main() { for (const result of results) { const status = result.passed ? 'βœ… PASS' : '❌ FAIL'; - const time = result.buildTime ? ` (${(result.buildTime / 1000).toFixed(1)}s)` : ''; - const size = result.imageSize ? ` ${formatSize(result.imageSize)}` : ''; - console.log(` ${status} ${result.name}${time}${size}`); + console.log(` ${status} ${result.name}`); if (!result.passed) { console.log(` ${result.message}`); } } - // Write results to JSON for CI/CD reporting - const resultsJson = { - total: results.length, - passed: passCount, - failed: failCount, - timestamp: new Date().toISOString(), - results: results.map((r) => ({ - name: r.name, - passed: r.passed, - message: r.message, - imageSize: r.imageSize, - layers: r.layers, - buildTimeMs: r.buildTime, - })), - }; - - writeFileSync('build-image-test-results.json', JSON.stringify(resultsJson, null, 2)); - console.log('\n Results written to build-image-test-results.json'); - - console.log('\n' + '='.repeat(60)); + writeResultsSummary(results); + console.log('\n Results written to build-image-context-test-results.json'); if (failCount > 0) { - console.log('❌ Some tests failed. See above for details.'); + console.log('\n❌ Some tests failed. See above for details.'); process.exit(1); } - console.log('βœ… All tests passed!'); + console.log('\nβœ… All context tests passed!'); } main().catch((error) => { diff --git a/scripts/integration-test-complete-workflow.ts b/scripts/integration-test-complete-workflow.ts index cd30abffa..735cdaf64 100644 --- a/scripts/integration-test-complete-workflow.ts +++ b/scripts/integration-test-complete-workflow.ts @@ -26,7 +26,7 @@ import { createToolContext } from '../dist/src/mcp/context.js'; import analyzeRepoTool from '../dist/src/tools/analyze-repo/tool.js'; import generateDockerfileTool from '../dist/src/tools/generate-dockerfile/tool.js'; -import buildImageTool from '../dist/src/tools/build-image/tool.js'; +import buildImageContextTool from '../dist/src/tools/build-image-context/tool.js'; import scanImageTool from '../dist/src/tools/scan-image/tool.js'; import tagImageTool from '../dist/src/tools/tag-image/tool.js'; import prepareClusterTool from '../dist/src/tools/prepare-cluster/tool.js'; @@ -100,7 +100,7 @@ async function waitForCondition( ): Promise { const startTime = Date.now(); let attempts = 0; - + while (Date.now() - startTime < timeoutMs) { attempts++; try { @@ -114,15 +114,15 @@ async function waitForCondition( } // Transient error, continue waiting } - + if (attempts % 5 === 0) { const elapsed = Math.floor((Date.now() - startTime) / 1000); console.log(` Still waiting for ${description}... (${elapsed}s elapsed)`); } - + await new Promise((resolve) => setTimeout(resolve, intervalMs)); } - + return false; } @@ -142,33 +142,41 @@ function detectPlatform(): DockerPlatform { */ async function cleanup(registryPort?: string): Promise { console.log('\n🧹 Cleaning up resources...\n'); - + try { - execSync('kubectl delete deployment sample-workflow-app --ignore-not-found=true', { stdio: 'pipe' }); - execSync('kubectl delete service sample-workflow-app --ignore-not-found=true', { stdio: 'pipe' }); + execSync('kubectl delete deployment sample-workflow-app --ignore-not-found=true', { + stdio: 'pipe', + }); + execSync('kubectl delete service sample-workflow-app --ignore-not-found=true', { + stdio: 'pipe', + }); console.log(' βœ… Kubernetes resources deleted'); } catch { console.log(' ⚠️ Kubernetes cleanup (may not exist)'); } - + try { execSync('kind delete cluster --name containerization-assist', { stdio: 'pipe' }); console.log(' βœ… Kind cluster deleted'); } catch { console.log(' ⚠️ Kind cluster cleanup (may not exist)'); } - + try { execSync('docker rm -f ca-registry', { stdio: 'pipe' }); console.log(' βœ… Registry container deleted'); } catch { console.log(' ⚠️ Registry cleanup (may not exist)'); } - + try { if (registryPort) { - execSync(`docker rmi -f localhost:${registryPort}/sample-workflow-app:v1.0.0`, { stdio: 'pipe' }); - execSync(`docker rmi -f localhost:${registryPort}/sample-workflow-app:latest`, { stdio: 'pipe' }); + execSync(`docker rmi -f localhost:${registryPort}/sample-workflow-app:v1.0.0`, { + stdio: 'pipe', + }); + execSync(`docker rmi -f localhost:${registryPort}/sample-workflow-app:latest`, { + stdio: 'pipe', + }); } execSync('docker rmi -f sample-workflow-app:local', { stdio: 'pipe' }); execSync('docker rmi -f sample-workflow-app:v1.0.0', { stdio: 'pipe' }); @@ -189,25 +197,25 @@ async function main() { const results: StepResult[] = []; let registryPort: string | undefined; - let currentImageId: string | undefined; const workflowStartTime = Date.now(); - + // Paths const fixturesPath = resolve('test/fixtures/complete-workflow'); const sampleAppPath = join(fixturesPath, 'sample-java-app'); const tempWorkDir = join(os.tmpdir(), 'e2e-workflow-test-' + Date.now()); - + // Determine platform const platform = detectPlatform(); console.log(`\nπŸ–₯️ Host Platform: ${platform}\n`); - + // Verify prerequisites console.log('πŸ“‹ Checking prerequisites...\n'); const hasDocker = verifyToolInstalled('Docker', 'docker --version'); const hasKind = verifyToolInstalled('kind', 'kind --version'); // Try short version first, fall back to long version (avoids shell-specific operators) - const hasKubectl = verifyToolInstalled('kubectl', 'kubectl version --client --short') - || verifyToolInstalled('kubectl', 'kubectl version --client'); + const hasKubectl = + verifyToolInstalled('kubectl', 'kubectl version --client --short') || + verifyToolInstalled('kubectl', 'kubectl version --client'); const hasTrivy = verifyToolInstalled('Trivy', 'trivy --version'); if (!hasDocker || !hasKind || !hasKubectl) { @@ -229,11 +237,11 @@ async function main() { // Create temp work directory and copy sample app console.log(`\nπŸ“ Setting up work directory: ${tempWorkDir}\n`); mkdirSync(tempWorkDir, { recursive: true }); - + // Copy sample Java app files copyFileSync(join(sampleAppPath, 'App.java'), join(tempWorkDir, 'App.java')); copyFileSync(join(sampleAppPath, 'pom.xml'), join(tempWorkDir, 'pom.xml')); - + // Create context const ctx = createToolContext(logger); @@ -244,11 +252,14 @@ async function main() { console.log('\n' + '─'.repeat(70)); console.log('πŸ“Š Step 1: Analyzing repository with analyze-repo'); console.log('─'.repeat(70)); - + const step1Start = Date.now(); - const analyzeResult = await analyzeRepoTool.handler({ - repositoryPath: tempWorkDir, - }, ctx); + const analyzeResult = await analyzeRepoTool.handler( + { + repositoryPath: tempWorkDir, + }, + ctx, + ); const step1Duration = Date.now() - step1Start; if (!analyzeResult.ok) { @@ -266,7 +277,7 @@ async function main() { const analysis = analyzeResult.value; const detectedLanguage = analysis.modules?.[0]?.language || analysis.language; const detectedFramework = analysis.modules?.[0]?.frameworks?.[0]?.name || analysis.framework; - + if (!detectedLanguage) { results.push({ step: 1, @@ -278,11 +289,11 @@ async function main() { }); throw new Error('analyze-repo failed: no language detected'); } - + console.log(' βœ… Repository analyzed'); console.log(` Language: ${detectedLanguage}`); console.log(` Framework: ${detectedFramework || 'none'}`); - + results.push({ step: 1, name: 'Analyze Repository', @@ -302,18 +313,22 @@ async function main() { console.log('\n' + '─'.repeat(70)); console.log('πŸ“ Step 2: Generating Dockerfile with generate-dockerfile'); console.log('─'.repeat(70)); - - const detectedVersion = analysis.modules?.[0]?.buildSystems?.[0]?.languageVersion || analysis.languageVersion || '21'; - + + const detectedVersion = + analysis.modules?.[0]?.buildSystems?.[0]?.languageVersion || analysis.languageVersion || '21'; + const step2Start = Date.now(); - const dockerfileResult = await generateDockerfileTool.handler({ - repositoryPath: tempWorkDir, - language: detectedLanguage, - languageVersion: detectedVersion, - framework: detectedFramework, - environment: 'production', - targetPlatform: platform, - }, ctx); + const dockerfileResult = await generateDockerfileTool.handler( + { + repositoryPath: tempWorkDir, + language: detectedLanguage, + languageVersion: detectedVersion, + framework: detectedFramework, + environment: 'production', + targetPlatform: platform, + }, + ctx, + ); const step2Duration = Date.now() - step2Start; if (!dockerfileResult.ok) { @@ -359,11 +374,11 @@ HEALTHCHECK --interval=30s --timeout=3s --start-period=10s --retries=3 \\ CMD ["java", "-jar", "app.jar"] `; writeFileSync(join(tempWorkDir, 'Dockerfile'), generatedDockerfile); - + console.log(' βœ… Dockerfile generated'); console.log(` Base image: ${baseImage}`); console.log(` Multi-stage: Yes`); - + results.push({ step: 2, name: 'Generate Dockerfile', @@ -384,21 +399,23 @@ CMD ["java", "-jar", "app.jar"] console.log('\n' + '─'.repeat(70)); console.log('πŸ”¨ Step 3: Building image with build-image'); console.log('─'.repeat(70)); - + const step3Start = Date.now(); - const buildResult = await buildImageTool.handler({ - path: tempWorkDir, - tags: ['sample-workflow-app:v1.0.0'], - platform, - strictPlatformValidation: false, - }, ctx); + const buildResult = await buildImageContextTool.handler( + { + path: tempWorkDir, + tags: ['sample-workflow-app:v1.0.0'], + platform, + }, + ctx, + ); const step3Duration = Date.now() - step3Start; if (!buildResult.ok) { results.push({ step: 3, name: 'Build Image', - tool: 'build-image', + tool: 'build-image-context', passed: false, message: `Build failed: ${buildResult.error}`, duration: step3Duration, @@ -406,21 +423,64 @@ CMD ["java", "-jar", "app.jar"] throw new Error('build-image failed'); } - currentImageId = buildResult.value.imageId; + // Execute the build command returned by build-image + const buildCommand = buildResult.value.nextAction.buildCommand; + console.log(` Executing: ${buildCommand.command}`); + + try { + const envVars = Object.entries(buildCommand.environment) + .map(([k, v]) => `${k}=${v}`) + .join(' '); + const fullCommand = envVars ? `${envVars} ${buildCommand.command}` : buildCommand.command; + execSync(fullCommand, { + stdio: 'inherit', + cwd: tempWorkDir, + env: { ...process.env, ...buildCommand.environment }, + }); + } catch (buildError) { + results.push({ + step: 3, + name: 'Build Image', + tool: 'build-image-context', + passed: false, + message: `Docker build execution failed: ${buildError instanceof Error ? buildError.message : 'Unknown error'}`, + duration: Date.now() - step3Start, + }); + throw new Error('build-image execution failed'); + } + + // Get the image ID after successful build + const imageTag = buildResult.value.buildConfig.finalTags[0] || 'sample-workflow-app:v1.0.0'; + let currentImageId: string | undefined; + let imageSize: number | undefined; + try { + currentImageId = execSync(`docker inspect --format='{{.Id}}' ${imageTag}`, { + encoding: 'utf-8', + }).trim(); + const sizeStr = execSync(`docker inspect --format='{{.Size}}' ${imageTag}`, { + encoding: 'utf-8', + }).trim(); + imageSize = parseInt(sizeStr, 10); + } catch { + // Image ID lookup failed, continue anyway + } + console.log(' βœ… Image built'); - console.log(` Image ID: ${currentImageId?.substring(0, 20)}...`); - console.log(` Size: ${buildResult.value.size ? Math.round(buildResult.value.size / 1024 / 1024) + 'MB' : 'unknown'}`); - + console.log(` Image ID: ${currentImageId?.substring(0, 20) || 'unknown'}...`); + console.log( + ` Size: ${imageSize ? Math.round(imageSize / 1024 / 1024) + 'MB' : 'unknown'}`, + ); + results.push({ step: 3, name: 'Build Image', - tool: 'build-image', + tool: 'build-image-context', passed: true, message: 'Image built successfully', - duration: step3Duration, + duration: Date.now() - step3Start, details: { imageId: currentImageId, - size: buildResult.value.size, + size: imageSize, }, }); @@ -430,15 +490,18 @@ CMD ["java", "-jar", "app.jar"] console.log('\n' + '─'.repeat(70)); console.log('πŸ”’ Step 4: Scanning image with scan-image'); console.log('─'.repeat(70)); - + const step4Start = Date.now(); - const scanResult = await scanImageTool.handler({ - imageId: 'sample-workflow-app:v1.0.0', - scanner: 'trivy', - severity: 'HIGH', - scanType: 'vulnerability', - enableAISuggestions: false, - }, ctx); + const scanResult = await scanImageTool.handler( + { + imageId: 'sample-workflow-app:v1.0.0', + scanner: 'trivy', + severity: 'HIGH', + scanType: 'vulnerability', + enableAISuggestions: false, + }, + ctx, + ); const step4Duration = Date.now() - step4Start; if (!scanResult.ok) { @@ -453,9 +516,9 @@ CMD ["java", "-jar", "app.jar"] }); throw new Error('scan-image failed'); } - + const vulnCounts = scanResult.value.vulnerabilities; - + // Validate scan result structure if (vulnCounts === undefined || vulnCounts === null) { console.log(` ❌ Scan result missing vulnerability counts`); @@ -469,13 +532,13 @@ CMD ["java", "-jar", "app.jar"] }); throw new Error('scan-image failed: invalid result structure'); } - + console.log(' βœ… Image scanned'); console.log(` Critical: ${vulnCounts.critical}`); console.log(` High: ${vulnCounts.high}`); console.log(` Medium: ${vulnCounts.medium}`); console.log(` Low: ${vulnCounts.low}`); - + results.push({ step: 4, name: 'Scan Image', @@ -494,14 +557,17 @@ CMD ["java", "-jar", "app.jar"] console.log('\n' + '─'.repeat(70)); console.log('☸️ Step 5: Preparing cluster with prepare-cluster'); console.log('─'.repeat(70)); - + const step5Start = Date.now(); - const clusterResult = await prepareClusterTool.handler({ - targetPlatform: platform, - environment: 'development', - namespace: 'default', - strictPlatformValidation: false, - }, ctx); + const clusterResult = await prepareClusterTool.handler( + { + targetPlatform: platform, + environment: 'development', + namespace: 'default', + strictPlatformValidation: false, + }, + ctx, + ); const step5Duration = Date.now() - step5Start; if (!clusterResult.ok) { @@ -521,7 +587,7 @@ CMD ["java", "-jar", "app.jar"] console.log(' βœ… Cluster prepared'); console.log(` Cluster: ${clusterResult.value.cluster}`); console.log(` Registry: ${registryUrl}`); - + results.push({ step: 5, name: 'Prepare Cluster', @@ -541,20 +607,26 @@ CMD ["java", "-jar", "app.jar"] console.log('\n' + '─'.repeat(70)); console.log('🏷️ Step 6: Tagging image with tag-image'); console.log('─'.repeat(70)); - + const step6Start = Date.now(); // Tag for registry - apply first tag - const tagResult = await tagImageTool.handler({ - imageId: 'sample-workflow-app:v1.0.0', - tag: `localhost:${registryPort}/sample-workflow-app:v1.0.0`, - }, ctx); - + const tagResult = await tagImageTool.handler( + { + imageId: 'sample-workflow-app:v1.0.0', + tag: `localhost:${registryPort}/sample-workflow-app:v1.0.0`, + }, + ctx, + ); + // Apply second tag (latest) if (tagResult.ok) { - await tagImageTool.handler({ - imageId: 'sample-workflow-app:v1.0.0', - tag: `localhost:${registryPort}/sample-workflow-app:latest`, - }, ctx); + await tagImageTool.handler( + { + imageId: 'sample-workflow-app:v1.0.0', + tag: `localhost:${registryPort}/sample-workflow-app:latest`, + }, + ctx, + ); } const step6Duration = Date.now() - step6Start; @@ -572,7 +644,7 @@ CMD ["java", "-jar", "app.jar"] console.log(' βœ… Image tagged'); console.log(` Tags applied: 2`); - + results.push({ step: 6, name: 'Tag Image', @@ -591,21 +663,27 @@ CMD ["java", "-jar", "app.jar"] console.log('\n' + '─'.repeat(70)); console.log('πŸ“€ Step 7: Pushing image with push-image'); console.log('─'.repeat(70)); - + const step7Start = Date.now(); - const pushResult = await pushImageTool.handler({ - imageId: `localhost:${registryPort}/sample-workflow-app:v1.0.0`, - registry: `localhost:${registryPort}`, - platform, - }, ctx); - - // Also push the latest tag - if (pushResult.ok) { - await pushImageTool.handler({ - imageId: `localhost:${registryPort}/sample-workflow-app:latest`, + const pushResult = await pushImageTool.handler( + { + imageId: `localhost:${registryPort}/sample-workflow-app:v1.0.0`, registry: `localhost:${registryPort}`, platform, - }, ctx); + }, + ctx, + ); + + // Also push the latest tag + if (pushResult.ok) { + await pushImageTool.handler( + { + imageId: `localhost:${registryPort}/sample-workflow-app:latest`, + registry: `localhost:${registryPort}`, + platform, + }, + ctx, + ); } const step7Duration = Date.now() - step7Start; @@ -623,7 +701,7 @@ CMD ["java", "-jar", "app.jar"] console.log(' βœ… Image pushed'); console.log(` Registry: localhost:${registryPort}`); - + results.push({ step: 7, name: 'Push Image', @@ -642,17 +720,17 @@ CMD ["java", "-jar", "app.jar"] console.log('\n' + '─'.repeat(70)); console.log('πŸš€ Step 8: Deploying to Kubernetes'); console.log('─'.repeat(70)); - + const step8Start = Date.now(); - + // Read and update deployment manifest with correct registry URL const manifestPath = join(fixturesPath, 'kubernetes', 'deployment.yaml'); let manifest = readFileSync(manifestPath, 'utf-8'); manifest = manifest.replace('REGISTRY_PLACEHOLDER', `localhost:${registryPort}`); - + const tempManifestPath = join(tempWorkDir, 'deployment.yaml'); writeFileSync(tempManifestPath, manifest); - + try { execSync(`kubectl apply -f ${tempManifestPath}`, { stdio: 'pipe' }); console.log(' βœ… Deployment applied'); @@ -671,7 +749,7 @@ CMD ["java", "-jar", "app.jar"] // Wait for deployment to be ready console.log(' ⏳ Waiting for deployment to be ready...'); - + try { const ready = await waitForCondition( 'deployment ready', @@ -679,40 +757,44 @@ CMD ["java", "-jar", "app.jar"] // Check for CrashLoopBackOff or other failure states first const podStatus = execSync( 'kubectl get pods -l app=sample-workflow-app -o jsonpath="{.items[*].status.containerStatuses[*].state.waiting.reason}"', - { encoding: 'utf-8', stdio: 'pipe' } + { encoding: 'utf-8', stdio: 'pipe' }, ).trim(); - - if (podStatus.includes('CrashLoopBackOff') || podStatus.includes('ImagePullBackOff') || podStatus.includes('ErrImagePull')) { + + if ( + podStatus.includes('CrashLoopBackOff') || + podStatus.includes('ImagePullBackOff') || + podStatus.includes('ErrImagePull') + ) { throw new TerminalPodStateError(podStatus); } - + // Check deployment Available condition (more reliable than readyReplicas) const available = execSync( 'kubectl get deployment sample-workflow-app -o jsonpath="{.status.conditions[?(@.type==\'Available\')].status}"', - { encoding: 'utf-8', stdio: 'pipe' } + { encoding: 'utf-8', stdio: 'pipe' }, ).trim(); - + const readyReplicas = execSync( 'kubectl get deployment sample-workflow-app -o jsonpath="{.status.readyReplicas}"', - { encoding: 'utf-8', stdio: 'pipe' } + { encoding: 'utf-8', stdio: 'pipe' }, ).trim(); - + return available === 'True' && parseInt(readyReplicas || '0') >= 2; }, 180000, // 3 minute timeout (JVM startup can be slow in CI) 3000, ); - + if (!ready) { throw new Error('Deployment did not become ready in time'); } } catch (error) { const step8Duration = Date.now() - step8Start; - + if (error instanceof TerminalPodStateError) { console.log(` ❌ Pod entered terminal failure state: ${error.podStatus}`); } - + // Get debug info console.log('\n Debug info:'); try { @@ -721,7 +803,7 @@ CMD ["java", "-jar", "app.jar"] } catch { // Ignore } - + results.push({ step: 8, name: 'Deploy to Kubernetes', @@ -734,7 +816,7 @@ CMD ["java", "-jar", "app.jar"] } console.log(' βœ… Deployment ready (2/2 replicas)'); - + const step8Duration = Date.now() - step8Start; results.push({ step: 8, @@ -754,13 +836,16 @@ CMD ["java", "-jar", "app.jar"] console.log('\n' + '─'.repeat(70)); console.log('βœ… Step 9: Verifying deployment with verify-deploy'); console.log('─'.repeat(70)); - + const step9Start = Date.now(); - const verifyResult = await verifyDeployTool.handler({ - deploymentName: 'sample-workflow-app', - namespace: 'default', - checks: ['pods', 'services', 'health'], - }, ctx); + const verifyResult = await verifyDeployTool.handler( + { + deploymentName: 'sample-workflow-app', + namespace: 'default', + checks: ['pods', 'services', 'health'], + }, + ctx, + ); const step9Duration = Date.now() - step9Start; if (!verifyResult.ok) { @@ -777,9 +862,11 @@ CMD ["java", "-jar", "app.jar"] console.log(' βœ… Deployment verified'); console.log(` Status: ${verifyResult.value.ready ? 'ready' : 'not ready'}`); - console.log(` Ready replicas: ${verifyResult.value.status?.readyReplicas || 0}/${verifyResult.value.status?.totalReplicas || 0}`); + console.log( + ` Ready replicas: ${verifyResult.value.status?.readyReplicas || 0}/${verifyResult.value.status?.totalReplicas || 0}`, + ); console.log(` Health: ${verifyResult.value.healthCheck?.status || 'healthy'}`); - + results.push({ step: 9, name: 'Verify Deployment', @@ -793,7 +880,6 @@ CMD ["java", "-jar", "app.jar"] healthStatus: verifyResult.value.healthCheck?.status, }, }); - } catch (error) { console.error('\n❌ Workflow failed:', error); } finally { @@ -816,7 +902,7 @@ CMD ["java", "-jar", "app.jar"] console.log(`Failed: ❌ ${failed}`); console.log(`Total Duration: ${Math.round(totalDuration / 1000)}s`); console.log('\nStep Results:'); - + for (const result of results) { const status = result.passed ? 'βœ…' : '❌'; const duration = result.duration > 0 ? ` (${Math.round(result.duration / 1000)}s)` : ''; diff --git a/scripts/smoke-journey.ts b/scripts/smoke-journey.ts index b08a1b883..1ded9149a 100644 --- a/scripts/smoke-journey.ts +++ b/scripts/smoke-journey.ts @@ -81,7 +81,7 @@ async function runSmokeTest(): Promise { }, { name: 'Build Docker Image', - tool: 'build-image', + tool: 'build-image-context', params: { path: TEST_DIR, // Use the existing Dockerfile in the fixture (generate-dockerfile returns a plan, doesn't write files) diff --git a/scripts/validate-public-api.ts b/scripts/validate-public-api.ts index 4916178be..69774b64e 100644 --- a/scripts/validate-public-api.ts +++ b/scripts/validate-public-api.ts @@ -39,7 +39,7 @@ const PUBLIC_EXPORTS: PublicExport[] = [ // Individual tool exports - Public API { file: 'src/index.ts', exportName: 'analyzeRepoTool', exportType: 'value', critical: false }, - { file: 'src/index.ts', exportName: 'buildImageTool', exportType: 'value', critical: false }, + { file: 'src/index.ts', exportName: 'buildImageContextTool', exportType: 'value', critical: false }, { file: 'src/index.ts', exportName: 'fixDockerfileTool', exportType: 'value', critical: false }, { file: 'src/index.ts', exportName: 'generateDockerfileTool', exportType: 'value', critical: false }, { file: 'src/index.ts', exportName: 'generateK8sManifestsTool', exportType: 'value', critical: false }, diff --git a/src/app/chain-hints.ts b/src/app/chain-hints.ts index 325f20b83..575a18207 100644 --- a/src/app/chain-hints.ts +++ b/src/app/chain-hints.ts @@ -33,16 +33,16 @@ export const DEFAULT_CHAIN_HINTS: ChainHintsRegistry = { failure: 'Repository analysis failed. Please check the logs for details.', }, - [TOOL_NAME.BUILD_IMAGE]: { + [TOOL_NAME.BUILD_IMAGE_CONTEXT]: { success: 'Image built successfully. Next: Call scan-image to check for security vulnerabilities.', - failure: 'Image build failed. Use fix-dockerfile to resolve issues, then retry build-image.', + failure: + 'Image build failed. Use fix-dockerfile to resolve issues, then retry build-image-context.', }, - [TOOL_NAME.FIX_DOCKERFILE]: { success: - 'Dockerfile fixes applied successfully. Next: Call build-image to test the fixed Dockerfile.', + 'Dockerfile fixes applied successfully. Next: Call build-image-context to test the fixed Dockerfile.', failure: 'Dockerfile fix failed. Review validation errors and try manual fixes.', }, @@ -53,7 +53,8 @@ export const DEFAULT_CHAIN_HINTS: ChainHintsRegistry = { }, [TOOL_NAME.PREPARE_CLUSTER]: { - success: 'Cluster preparation successful. Next: Use `kubectl apply -f ` to deploy your manifests to the cluster, then call verify-deploy to check deployment status.', + success: + 'Cluster preparation successful. Next: Use `kubectl apply -f ` to deploy your manifests to the cluster, then call verify-deploy to check deployment status.', failure: 'Cluster preparation found issues. Check connectivity, permissions, and namespace configuration.', }, diff --git a/src/cli/cli.ts b/src/cli/cli.ts index 6c79a2fc7..dd6570ee5 100644 --- a/src/cli/cli.ts +++ b/src/cli/cli.ts @@ -77,7 +77,7 @@ Examples: MCP Tools Available (11 total): β€’ Analysis: analyze-repo β€’ Dockerfile: generate-dockerfile, fix-dockerfile - β€’ Image: build-image, scan-image, tag-image, push-image + β€’ Image: build-image-context, scan-image, tag-image, push-image β€’ Kubernetes: generate-k8s-manifests, prepare-cluster, verify-deploy β€’ Utilities: ops @@ -120,11 +120,11 @@ async function handleListPoliciesCommand(opts: { showMerged?: boolean }): Promis } // Group by source - const builtIn = policies.filter( - (p) => p.includes('/policies/') && !p.includes('/policies.user'), - ); + const builtIn = policies.filter((p) => p.includes('/policies/') && !p.includes('/policies.user')); const user = policies.filter((p) => p.includes('/policies.user/')); - const custom = policies.filter((p) => !p.includes('/policies/') && !p.includes('/policies.user/')); + const custom = policies.filter( + (p) => !p.includes('/policies/') && !p.includes('/policies.user/'), + ); if (builtIn.length > 0) { console.error(' Built-in (Priority: Low):'); diff --git a/src/index.ts b/src/index.ts index b2424b7cf..71bf90162 100644 --- a/src/index.ts +++ b/src/index.ts @@ -253,7 +253,7 @@ export { tool } from './types/tool.js'; * Tools are organized by workflow stage: * 1. Analysis: `analyzeRepoTool` - Detect language, framework, and dependencies * 2. Dockerfile: `generateDockerfileTool`, `fixDockerfileTool` - * 3. Build: `buildImageTool`, `scanImageTool`, `tagImageTool`, `pushImageTool` + * 3. Build: `buildImageContextTool`, `scanImageTool`, `tagImageTool`, `pushImageTool` * 4. Deploy: `generateK8sManifestsTool`, `prepareClusterTool`, `verifyDeployTool` * 5. Operations: `opsTool` - Operational utilities * @@ -262,7 +262,7 @@ export { tool } from './types/tool.js'; export { ALL_TOOLS, analyzeRepoTool, - buildImageTool, + buildImageContextTool, fixDockerfileTool, generateDockerfileTool, generateK8sManifestsTool, diff --git a/src/infra/docker/buildkit-decoder.ts b/src/infra/docker/buildkit-decoder.ts deleted file mode 100644 index 1caabd432..000000000 --- a/src/infra/docker/buildkit-decoder.ts +++ /dev/null @@ -1,216 +0,0 @@ -/** - * BuildKit trace decoder for moby.buildkit.trace events - * Decodes protobuf-encoded BuildKit status messages into human-readable logs - */ - -import protobuf from 'protobufjs'; -import type { Logger } from 'pino'; - -// Lazily loaded protobuf root -let protoRoot: protobuf.Root | null = null; - -/** - * Initialize the protobuf schema - * Define the schema inline to avoid file I/O - */ -function initializeProto(logger: Logger): protobuf.Root { - if (protoRoot) { - return protoRoot; - } - - try { - // Define the protobuf schema programmatically - protoRoot = new protobuf.Root(); - - // Define the StatusResponse message and its nested types - const StatusResponse = new protobuf.Type('StatusResponse') - .add(new protobuf.Field('vertexes', 1, 'Vertex', 'repeated')) - .add(new protobuf.Field('statuses', 2, 'VertexStatus', 'repeated')) - .add(new protobuf.Field('logs', 3, 'VertexLog', 'repeated')) - .add(new protobuf.Field('warnings', 4, 'VertexWarning', 'repeated')); - - const Vertex = new protobuf.Type('Vertex') - .add(new protobuf.Field('digest', 1, 'string')) - .add(new protobuf.Field('inputs', 2, 'string', 'repeated')) - .add(new protobuf.Field('name', 3, 'string')) - .add(new protobuf.Field('cached', 4, 'bool')) - .add(new protobuf.Field('started', 5, 'google.protobuf.Timestamp')) - .add(new protobuf.Field('completed', 6, 'google.protobuf.Timestamp')) - .add(new protobuf.Field('error', 7, 'string')); - - const VertexStatus = new protobuf.Type('VertexStatus') - .add(new protobuf.Field('ID', 1, 'string')) - .add(new protobuf.Field('vertex', 2, 'string')) - .add(new protobuf.Field('name', 3, 'string')) - .add(new protobuf.Field('current', 4, 'int64')) - .add(new protobuf.Field('total', 5, 'int64')) - .add(new protobuf.Field('timestamp', 6, 'google.protobuf.Timestamp')) - .add(new protobuf.Field('started', 7, 'google.protobuf.Timestamp')) - .add(new protobuf.Field('completed', 8, 'google.protobuf.Timestamp')); - - const VertexLog = new protobuf.Type('VertexLog') - .add(new protobuf.Field('vertex', 1, 'string')) - .add(new protobuf.Field('timestamp', 2, 'google.protobuf.Timestamp')) - .add(new protobuf.Field('stream', 3, 'int64')) - .add(new protobuf.Field('msg', 4, 'bytes')); - - const VertexWarning = new protobuf.Type('VertexWarning') - .add(new protobuf.Field('vertex', 1, 'string')) - .add(new protobuf.Field('level', 2, 'int64')) - .add(new protobuf.Field('short', 3, 'bytes')) - .add(new protobuf.Field('detail', 4, 'bytes', 'repeated')) - .add(new protobuf.Field('url', 5, 'string')); - - // Google Timestamp type - const Timestamp = new protobuf.Type('Timestamp') - .add(new protobuf.Field('seconds', 1, 'int64')) - .add(new protobuf.Field('nanos', 2, 'int32')); - - // Add to namespace - const googleProtobuf = new protobuf.Namespace('google.protobuf'); - googleProtobuf.add(Timestamp); - - const mobyBuildkit = new protobuf.Namespace('moby.buildkit.v1'); - mobyBuildkit.add(StatusResponse); - mobyBuildkit.add(Vertex); - mobyBuildkit.add(VertexStatus); - mobyBuildkit.add(VertexLog); - mobyBuildkit.add(VertexWarning); - - protoRoot.add(googleProtobuf); - protoRoot.add(mobyBuildkit); - - return protoRoot; - } catch (error) { - logger.error( - { error, errorMessage: error instanceof Error ? error.message : String(error) }, - 'Failed to initialize BuildKit protobuf schema', - ); - throw error; - } -} - -/** - * Decoded BuildKit status response - */ -export interface BuildKitStatus { - /** Completed build steps */ - steps: string[]; - /** Log messages */ - logs: string[]; - /** Warnings */ - warnings: string[]; - /** Errors */ - errors: string[]; -} - -/** - * Decode a BuildKit trace event from base64-encoded protobuf - * - * @param auxData - Base64-encoded protobuf data from moby.buildkit.trace event - * @param logger - Logger for debugging - * @returns Decoded status with human-readable messages, or null if decoding fails - */ -export function decodeBuildKitTrace(auxData: unknown, logger: Logger): BuildKitStatus | null { - if (!auxData || typeof auxData !== 'string') { - return null; - } - - try { - // Initialize protobuf schema - const root = initializeProto(logger); - const StatusResponse = root.lookupType('moby.buildkit.v1.StatusResponse'); - - // Decode base64 β†’ bytes β†’ protobuf - const bytes = Buffer.from(auxData, 'base64'); - - const message = StatusResponse.decode(bytes); - const obj = StatusResponse.toObject(message, { - longs: Number, - bytes: Buffer, // Return bytes as Buffer instead of base64 string - defaults: false, - arrays: true, - objects: true, - oneofs: true, - }); - - const status: BuildKitStatus = { - steps: [], - logs: [], - warnings: [], - errors: [], - }; - - // Extract completed vertices (build steps) - if (obj.vertexes && Array.isArray(obj.vertexes)) { - for (const vertex of obj.vertexes) { - if (vertex.started && vertex.completed && vertex.name) { - status.steps.push(vertex.name); - } - if (vertex.error) { - status.errors.push(`${vertex.name}: ${vertex.error}`); - } - } - } - - // Extract vertex logs - if (obj.logs && Array.isArray(obj.logs)) { - for (const log of obj.logs) { - if (log.msg) { - // msg is a Buffer, convert to string - const logText = Buffer.isBuffer(log.msg) ? log.msg.toString('utf-8') : String(log.msg); - const trimmed = logText.trim(); - if (trimmed) { - status.logs.push(trimmed); - } - } - } - } - - // Extract warnings - if (obj.warnings && Array.isArray(obj.warnings)) { - for (const warning of obj.warnings) { - if (warning.short) { - const warnText = Buffer.isBuffer(warning.short) - ? warning.short.toString('utf-8') - : String(warning.short); - status.warnings.push(warnText.trim()); - } - } - } - - return status; - } catch (error) { - logger.debug({ error }, 'Failed to decode BuildKit protobuf'); - return null; - } -} - -/** - * Format BuildKit status into a single log message - * Returns the most relevant message (step, log, or warning) - */ -export function formatBuildKitStatus(status: BuildKitStatus): string | null { - // Prioritize errors - if (status.errors.length > 0) { - return status.errors[status.errors.length - 1] ?? null; - } - - // Then completed steps - if (status.steps.length > 0) { - return status.steps[status.steps.length - 1] ?? null; - } - - // Then logs - if (status.logs.length > 0) { - return status.logs[status.logs.length - 1] ?? null; - } - - // Finally warnings - if (status.warnings.length > 0) { - const warning = status.warnings[status.warnings.length - 1]; - return warning ? `⚠️ ${warning}` : null; - } - - return null; -} diff --git a/src/infra/docker/client.ts b/src/infra/docker/client.ts index 67fb35858..d089e437c 100644 --- a/src/infra/docker/client.ts +++ b/src/infra/docker/client.ts @@ -5,14 +5,10 @@ */ import Docker, { DockerOptions } from 'dockerode'; -import tar from 'tar-fs'; -import path from 'path'; import type { Logger } from 'pino'; import { Success, Failure, type Result } from '@/types'; import { extractDockerErrorGuidance } from './errors'; import { autoDetectDockerSocket } from './socket-validation'; -import { getDockerBuildFiles } from '@/lib/dockerignore-parser'; -import { createProgressTracker, type ProgressCallback } from './progress'; /** * Docker client configuration options. @@ -28,55 +24,6 @@ export interface DockerClientConfig { timeout?: number; } -/** - * Callback for Docker build progress events - */ -export type DockerBuildProgressCallback = ProgressCallback; - -/** - * Options for building a Docker image. - */ -export interface DockerBuildOptions { - /** Path to Dockerfile relative to context */ - dockerfile?: string; - /** Primary tag for the built image */ - t?: string; - /** Additional tags to apply to the built image */ - tags?: string[]; - /** Build context directory (default: current directory) */ - context?: string; - /** Build-time variables (Docker ARG values) */ - buildargs?: Record; - /** Alternative property name for build arguments */ - buildArgs?: Record; - /** Target platform for multi-platform builds (e.g., 'linux/amd64') */ - platform?: string; - /** Optional callback for build progress events */ - onProgress?: DockerBuildProgressCallback; -} - -/** - * Result of a Docker image build operation. - */ -export interface DockerBuildResult { - /** Unique identifier of the built image */ - imageId: string; - /** Content-addressable digest of the built image */ - digest: string; - /** Size of the built image in bytes */ - size: number; - /** Number of layers in the image */ - layers?: number; - /** Total build time in milliseconds */ - buildTime: number; - /** Build process log messages */ - logs: string[]; - /** Tags applied to the built image */ - tags?: string[]; - /** Build-time warnings */ - warnings: string[]; -} - /** * Result of pushing a Docker image to a registry. */ @@ -121,13 +68,6 @@ export interface DockerImageInfo { * Docker client interface for container operations. */ export interface DockerClient { - /** - * Builds a Docker image from a Dockerfile. - * @param options - Build configuration options - * @returns Result containing build details or error - */ - buildImage: (options: DockerBuildOptions) => Promise>; - /** * Retrieves information about a Docker image. * @param id - Image ID or tag @@ -198,30 +138,6 @@ export interface DockerClient { ping: () => Promise>; } -/** - * Generate a digest from an image ID - * @param imageId - The Docker image ID - * @param logger - Logger instance - * @returns A SHA-256 digest string or empty string if invalid - */ -function generateDigestFromImageId(imageId: string, logger: Logger): string { - // If already prefixed, validate the hash portion - if (imageId.startsWith('sha256:')) { - const hash = imageId.substring(7); - if (/^[a-f0-9]{64}$/.test(hash)) { - return imageId; - } - } else { - // If not prefixed, validate and add prefix - if (/^[a-f0-9]{64}$/.test(imageId)) { - return `sha256:${imageId}`; - } - } - - logger.warn({ imageId }, 'Image ID is not a valid SHA-256 hash, cannot generate digest'); - return ''; -} - /** * Create base Docker client implementation */ @@ -261,185 +177,6 @@ function createBaseDockerClient(docker: Docker, logger: Logger): DockerClient { }; return { - async buildImage(options: DockerBuildOptions): Promise> { - const buildLogs: string[] = []; - const buildWarnings: string[] = []; - const startTime = Date.now(); - - try { - logger.debug({ options }, 'Starting Docker build'); - - const contextPath = options.context || '.'; - const dockerfilePath = options.dockerfile - ? path.resolve(contextPath, options.dockerfile) - : undefined; - const files = await getDockerBuildFiles(contextPath, dockerfilePath); - - const tarStream = tar.pack(contextPath, { - entries: files, - }); - - const stream = await docker.buildImage(tarStream, { - t: options.t || options.tags?.[0], - dockerfile: options.dockerfile, - buildargs: options.buildargs || options.buildArgs, - ...(options.platform && { platform: options.platform }), - version: '2', // Use BuildKit backend for cross-platform builds - }); - - interface DockerBuildEvent { - stream?: string; - aux?: { ID?: string }; - id?: string; - error?: string; - errorDetail?: Record; - } - - interface DockerBuildResponse { - aux?: { ID?: string }; - } - - let buildError: string | null = null; - - // Create progress tracker for BuildKit trace decoding and progress notifications - const trackerOptions: { onProgress?: ProgressCallback; logger: Logger } = { logger }; - if (options.onProgress) { - trackerOptions.onProgress = options.onProgress; - } - const progressTracker = createProgressTracker(trackerOptions); - - const result = await new Promise((resolve, reject) => { - docker.modem.followProgress( - stream, - (err: Error | null, res: DockerBuildResponse[]) => { - if (err) { - // Log detailed error information before rejecting - const guidance = extractDockerErrorGuidance(err); - logger.error( - { - error: guidance.message, - hint: guidance.hint, - resolution: guidance.resolution, - errorDetails: guidance.details, - originalError: err, - options, - }, - 'Docker build followProgress error', - ); - reject(err); - } else if (buildError) { - // If we detected an error during build progress, treat it as a failure - const errorObj = new Error(buildError); - logger.error({ buildError, options }, 'Docker build failed with error event'); - reject(errorObj); - } else { - resolve(res); - } - }, - (event: DockerBuildEvent) => { - // For BuildKit progress events, decode and send build status updates - if (event.id === 'moby.buildkit.trace') { - const buildKitMessage = progressTracker.processBuildKitTrace(event.aux); - // Also capture BuildKit messages in build logs - if (buildKitMessage) { - buildLogs.push(buildKitMessage); - } - } - - if (event.error || event.errorDetail) { - logger.error({ errorEvent: event }, 'Docker build error event received'); - const errorMsg = event.error || 'Build step failed'; - const errorLogLine = `ERROR: ${errorMsg}`; - buildLogs.push(errorLogLine); - logger.error(errorLogLine); - - // Capture the first error encountered during the build - if (!buildError) { - buildError = - event.error || - (event.errorDetail && - typeof event.errorDetail === 'object' && - 'message' in event.errorDetail - ? String(event.errorDetail.message) - : 'Build step failed'); - } - } - }, - ); - }); - - const imageId = result[result.length - 1]?.aux?.ID || ''; - const buildTime = Date.now() - startTime; - - // Inspect the image to get size, digest, and layers - let size = 0; - let digest = ''; - let layers: number | undefined; - - if (imageId) { - try { - const image = docker.getImage(imageId); - const inspect = await image.inspect(); - - size = inspect.Size || 0; - // Use RepoDigests if available, otherwise fallback to image ID if it looks like a valid SHA-256 hash - if (inspect.RepoDigests?.[0]) { - digest = inspect.RepoDigests[0]; - } else { - digest = generateDigestFromImageId(inspect.Id, logger); - } - layers = inspect.RootFS?.Layers?.length; - } catch (inspectError) { - logger.warn({ error: inspectError, imageId }, 'Could not inspect image after build'); - buildWarnings.push('Could not retrieve complete image metadata'); - // Use fallback digest from image ID - digest = generateDigestFromImageId(imageId, logger); - } - } - - const buildResult: DockerBuildResult = { - imageId, - digest, - size, - ...(layers !== undefined && { layers }), - buildTime, - logs: buildLogs, - tags: options.tags || [], - warnings: buildWarnings, - }; - - logger.debug({ buildResult }, 'Docker build completed successfully'); - return Success(buildResult); - } catch (error) { - const guidance = extractDockerErrorGuidance(error); - const errorMessage = `Build failed: ${guidance.message}`; - - logger.error( - { - error: errorMessage, - hint: guidance.hint, - resolution: guidance.resolution, - errorDetails: guidance.details, - originalError: error, - options, - buildLogs, - }, - 'Docker build failed', - ); - - const enhancedGuidance = { - ...guidance, - details: { - ...guidance.details, - buildLogs: buildLogs.length > 0 ? buildLogs : ['No build logs captured'], - buildTime: Date.now() - startTime, - }, - }; - - return Failure(errorMessage, enhancedGuidance); - } - }, - async getImage(id: string): Promise> { return fetchImageInfo(id); }, @@ -737,7 +474,7 @@ function createBaseDockerClient(docker: Docker, logger: Logger): DockerClient { * Create a Docker client with core operations * @param logger - Logger instance for debug output * @param config - Optional Docker client configuration - * @returns DockerClient with build, get, tag, and push operations + * @returns DockerClient with get, tag, push, and management operations */ export const createDockerClient = (logger: Logger, config?: DockerClientConfig): DockerClient => { // Determine the socket path to use diff --git a/src/infra/docker/progress.ts b/src/infra/docker/progress.ts deleted file mode 100644 index dcd7a083f..000000000 --- a/src/infra/docker/progress.ts +++ /dev/null @@ -1,76 +0,0 @@ -/** - * Docker build progress tracking utilities - * Handles BuildKit trace decoding - */ - -import type { Logger } from 'pino'; -import { decodeBuildKitTrace, formatBuildKitStatus } from './buildkit-decoder'; - -export type ProgressCallback = (message: string) => void; - -/** - * Options for progress tracking - */ -export interface ProgressTrackerOptions { - /** Callback to invoke with progress messages */ - onProgress?: ProgressCallback; - /** Logger for debug output */ - logger: Logger; -} - -/** - * Progress tracker for Docker builds - * Handles BuildKit trace decoding - */ -export class ProgressTracker { - private readonly onProgress: ProgressCallback | undefined; - private readonly logger: Logger; - private readonly seenMessages: Set = new Set(); - - constructor(options: ProgressTrackerOptions) { - this.onProgress = options.onProgress; - this.logger = options.logger; - } - - /** - * Process a BuildKit trace event and extract a readable status message. - * - * @returns The extracted status message when a new, non-duplicate message is - * produced; otherwise an empty string (for example, when no message - * can be extracted, when the message is a duplicate, or when an - * error occurs during processing). - */ - processBuildKitTrace(auxData: unknown): string { - try { - // Decode BuildKit trace synchronously - const status = decodeBuildKitTrace(auxData, this.logger); - if (status) { - const message = formatBuildKitStatus(status); - if (message && !this.seenMessages.has(message)) { - this.seenMessages.add(message); - if (this.onProgress) { - this.onProgress(message); - } - return message; - } - } - } catch (error) { - this.logger.error( - { - error, - errorMessage: error instanceof Error ? error.message : String(error), - }, - 'Error in processBuildKitTrace', - ); - } - - return ''; - } -} - -/** - * Create a progress tracker for Docker build operations - */ -export function createProgressTracker(options: ProgressTrackerOptions): ProgressTracker { - return new ProgressTracker(options); -} diff --git a/src/lib/telemetry-utils.ts b/src/lib/telemetry-utils.ts index eb6c1fd8c..64573fc4a 100644 --- a/src/lib/telemetry-utils.ts +++ b/src/lib/telemetry-utils.ts @@ -157,7 +157,7 @@ export function extractSafeTelemetryMetrics( if ('moduleCount' in result) metrics.moduleCount = result.moduleCount; break; - case 'build-image': + case 'build-image-context': // Safe: Build duration, image size (aggregates) if ('buildTime' in result) metrics.buildTimeMs = result.buildTime; if ('size' in result) metrics.imageSizeBytes = result.size; diff --git a/src/mcp/formatters/natural-language-formatters.ts b/src/mcp/formatters/natural-language-formatters.ts index 68d61b98d..663a5770f 100644 --- a/src/mcp/formatters/natural-language-formatters.ts +++ b/src/mcp/formatters/natural-language-formatters.ts @@ -19,7 +19,7 @@ import type { ScanImageResult } from '@/tools/scan-image/tool'; import type { DockerfilePlan } from '@/tools/generate-dockerfile/schema'; -import type { BuildImageResult } from '@/tools/build-image/tool'; +import type { BuildImageResult } from '@/tools/build-image-context/schema'; import type { RepositoryAnalysis } from '@/tools/analyze-repo/schema'; import type { VerifyDeploymentResult } from '@/tools/verify-deploy/tool'; import type { DockerfileFixPlan } from '@/tools/fix-dockerfile/schema'; @@ -28,7 +28,7 @@ import type { PushImageResult } from '@/tools/push-image/tool'; import type { TagImageResult } from '@/tools/tag-image/tool'; import type { PrepareClusterResult } from '@/tools/prepare-cluster/tool'; import type { PingResult, ServerStatusResult } from '@/tools/ops/tool'; -import { formatSize, formatDuration, formatVulnerabilities } from '@/lib/summary-helpers'; +import { formatDuration, formatVulnerabilities } from '@/lib/summary-helpers'; import { CHAINHINTSMODE, ChainHintsMode } from '@/app/orchestrator-types'; /** @@ -272,11 +272,11 @@ export function formatDockerfilePlanNarrative( parts.push('\n**Next Steps:**'); if (plan.nextAction.action === 'create-files') { parts.push(' 1. Create Dockerfile using the base images and recommendations above'); - parts.push(' 2. Build image with build-image tool'); + parts.push(' 2. Build image with build-image-context tool'); parts.push(' 3. Scan for vulnerabilities with scan-image'); } else { parts.push(' 1. Update Dockerfile preserving good patterns and applying improvements'); - parts.push(' 2. Rebuild image with build-image tool'); + parts.push(' 2. Rebuild image with build-image-context tool'); parts.push(' 3. Re-scan with scan-image to verify fixes'); } } @@ -285,20 +285,20 @@ export function formatDockerfilePlanNarrative( } /** - * Format build-image result as natural language narrative + * Format build-image-context result as natural language narrative * - * @param result - Build result with image details and metrics + * @param result - Build context preparation result with analysis and command * @param chainHintsMode - Whether to include "Next Steps" section (default: 'enabled') - * @returns Formatted narrative with image details, metrics, and next steps + * @returns Formatted narrative with context analysis, security warnings, and build command * * @description - * Produces a concise build report including: - * - Success status with icon - * - Image ID and applied tags - * - Image size (formatted in MB/GB) - * - Build time (formatted in seconds/minutes) - * - Layer count (if available) - * - Standard next steps for containerization workflow (when chainHintsMode is 'enabled') + * Produces a concise build preparation report including: + * - Summary of build context analysis + * - Dockerfile analysis (base images, ports, layers) + * - Security warnings with severity + * - BuildKit feature recommendations + * - Ready-to-execute build command + * - Next steps for executing the build (when chainHintsMode is 'enabled') */ export function formatBuildImageNarrative( result: BuildImageResult, @@ -307,34 +307,57 @@ export function formatBuildImageNarrative( const parts: string[] = []; // Header - parts.push('βœ… Image Built Successfully\n'); + parts.push('πŸ“¦ Build Context Ready\n'); - // Build info - parts.push(`**Image:** ${result.imageId}`); - if (result.createdTags && result.createdTags.length > 0) { - parts.push(`**Tags Created:** ${result.createdTags.join(', ')}`); + // Summary + parts.push(`**Summary:** ${result.summary}`); + + // Build configuration + if (result.buildConfig.finalTags.length > 0) { + parts.push(`**Tags:** ${result.buildConfig.finalTags.join(', ')}`); } - if (result.failedTags && result.failedTags.length > 0) { - parts.push(`**Failed Tags:** ${result.failedTags.join(', ')}`); + parts.push(`**Platform:** ${result.buildConfig.platform}`); + + // Dockerfile analysis + const analysis = result.dockerfileAnalysis; + parts.push(`\n**Dockerfile Analysis:**`); + parts.push(` Base Images: ${analysis.baseImages.join(', ') || 'None'}`); + if (analysis.exposedPorts.length > 0) { + parts.push(` Exposed Ports: ${analysis.exposedPorts.join(', ')}`); } - if (result.size) { - parts.push(`**Size:** ${formatSize(result.size)}`); + parts.push(` Estimated Layers: ${analysis.layerCount}`); + if (analysis.finalUser) { + parts.push(` Final USER: ${analysis.finalUser}`); } - if (result.buildTime) { - parts.push(`**Build Time:** ${formatDuration(Math.round(result.buildTime / 1000))}`); + parts.push(` HEALTHCHECK: ${analysis.hasHealthcheck ? 'Yes' : 'No'}`); + + // Security warnings + if (result.securityAnalysis.warnings.length > 0) { + parts.push(`\n**Security Warnings:**`); + result.securityAnalysis.warnings.forEach((w) => { + parts.push(` ⚠️ [${w.severity.toUpperCase()}] ${w.message}`); + }); + parts.push(` Risk Level: ${result.securityAnalysis.riskLevel}`); } - // Layer information - if (result.layers) { - parts.push(`**Layers:** ${result.layers}`); + // BuildKit recommendations + if (result.buildKitAnalysis.recommended) { + parts.push(`\n**BuildKit:** Recommended for this Dockerfile`); } + // Build command + parts.push(`\n**Build Command:**`); + parts.push(`\`\`\`bash\n${result.nextAction.buildCommand.command}\n\`\`\``); + // Next steps (only if chainHintsMode is enabled) if (chainHintsMode === CHAINHINTSMODE.ENABLED) { parts.push('\n**Next Steps:**'); - parts.push(' β†’ Scan image for vulnerabilities with scan-image'); + result.nextAction.preChecks.forEach((check) => { + parts.push(` β–‘ ${check}`); + }); + parts.push(' β†’ Execute the build command above'); + parts.push(' β†’ Scan built image for vulnerabilities with scan-image'); parts.push(' β†’ Tag image for registry with tag-image'); - parts.push(' β†’ Push to registry with push-image'); } return parts.join('\n'); @@ -667,7 +690,7 @@ export function formatFixDockerfileNarrative( if (result.validationGrade === 'A' || result.validationGrade === 'B') { parts.push(' β†’ Dockerfile is in good shape with minor improvements available'); parts.push(' β†’ Review fix recommendations for optimization'); - parts.push(' β†’ Proceed with build-image'); + parts.push(' β†’ Proceed with build-image-context'); } else { parts.push(' β†’ Address high-priority security issues first'); parts.push(' β†’ Apply recommended fixes to improve validation score'); diff --git a/src/mcp/mcp-server.ts b/src/mcp/mcp-server.ts index 1b932a60e..e2cf6aa4c 100644 --- a/src/mcp/mcp-server.ts +++ b/src/mcp/mcp-server.ts @@ -28,7 +28,7 @@ import { import type { Result, ErrorGuidance } from '@/types'; import type { ScanImageResult } from '@/tools/scan-image/tool'; import type { DockerfilePlan } from '@/tools/generate-dockerfile/schema'; -import type { BuildImageResult } from '@/tools/build-image/tool'; +import type { BuildImageResult } from '@/tools/build-image-context/schema'; import type { RepositoryAnalysis } from '@/tools/analyze-repo/schema'; import type { VerifyDeploymentResult } from '@/tools/verify-deploy/tool'; import type { DockerfileFixPlan } from '@/tools/fix-dockerfile/schema'; @@ -501,7 +501,7 @@ export function formatOutput( * - scan-image: Security scan results with severity breakdown * - generate-dockerfile: Planning with base images and recommendations * - deploy: Deployment status with endpoints and conditions - * - build-image: Build results with metrics + * - build-image-context: Build results with metrics * - analyze-repo: Repository analysis with module detection * * Falls back to summary field or JSON for other tool types. @@ -593,7 +593,7 @@ function isDockerfilePlan(output: object): output is DockerfilePlan { } function isBuildImageResult(output: object): output is BuildImageResult { - return 'imageId' in output && 'buildTime' in output; + return 'buildConfig' in output && 'nextAction' in output && 'dockerfileAnalysis' in output; } function isAnalyzeRepoResult(output: object): output is RepositoryAnalysis { diff --git a/src/sdk/formatters.ts b/src/sdk/formatters.ts index 1cfbcb55f..f338a2b10 100644 --- a/src/sdk/formatters.ts +++ b/src/sdk/formatters.ts @@ -69,13 +69,6 @@ function formatSection(title: string, content: string): string { return `## ${title}\n${content}\n`; } -function formatBytes(bytes: number): string { - if (bytes < 1024) return `${bytes} B`; - if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`; - if (bytes < 1024 * 1024 * 1024) return `${(bytes / 1024 / 1024).toFixed(1)} MB`; - return `${(bytes / 1024 / 1024 / 1024).toFixed(2)} GB`; -} - function formatDuration(ms: number): string { if (ms < 1000) return `${ms}ms`; if (ms < 60000) return `${(ms / 1000).toFixed(1)}s`; @@ -144,8 +137,14 @@ function formatAnalyzeRepoResultProse( } else if (result.language) { // Fallback to top-level fields if modules not populated const details: string[] = []; - if (result.language) details.push(`- **Language**: ${result.language}${result.languageVersion ? ` ${result.languageVersion}` : ''}`); - if (result.framework) details.push(`- **Framework**: ${result.framework}${result.frameworkVersion ? ` ${result.frameworkVersion}` : ''}`); + if (result.language) + details.push( + `- **Language**: ${result.language}${result.languageVersion ? ` ${result.languageVersion}` : ''}`, + ); + if (result.framework) + details.push( + `- **Framework**: ${result.framework}${result.frameworkVersion ? ` ${result.frameworkVersion}` : ''}`, + ); if (result.buildSystem?.type) details.push(`- **Build System**: ${result.buildSystem.type}`); if (result.entryPoint) details.push(`- **Entry Point**: ${result.entryPoint}`); if (details.length > 0) { @@ -156,7 +155,8 @@ function formatAnalyzeRepoResultProse( // Dependencies if (result.dependencies && result.dependencies.length > 0) { const depList = result.dependencies.slice(0, 10).join(', '); - const moreText = result.dependencies.length > 10 ? ` (+${result.dependencies.length - 10} more)` : ''; + const moreText = + result.dependencies.length > 10 ? ` (+${result.dependencies.length - 10} more)` : ''; sections.push(formatSection('Key Dependencies', depList + moreText)); } @@ -317,6 +317,7 @@ export const formatFixDockerfileResult = createFormatter(formatFixDockerfileResu /** * Format build-image result prose output. + * Now formats build context preparation results (not execution results). */ function formatBuildImageResultProse( result: BuildImageResult, @@ -324,48 +325,61 @@ function formatBuildImageResultProse( ): string { const sections: string[] = []; - if (result.summary) { - sections.push(formatSection('Build Result', result.summary)); - } else { - sections.push( - formatSection('Build Result', result.success ? 'βœ… Build successful' : '❌ Build failed'), - ); - } + // Summary + sections.push(formatSection('Build Context Analysis', result.summary)); - // Details - const details: string[] = []; - if (result.imageId) details.push(`- **Image ID**: \`${result.imageId.slice(0, 19)}...\``); - if (result.createdTags?.length) { - details.push(`- **Tags**: ${result.createdTags.map((t) => `\`${t}\``).join(', ')}`); - } - if (result.size) details.push(`- **Size**: ${formatBytes(result.size)}`); - if (result.buildTime) details.push(`- **Build Time**: ${formatDuration(result.buildTime)}`); - if (result.layers) details.push(`- **Layers**: ${result.layers}`); + // Context paths + const contextDetails = [ + `- **Build Context**: \`${result.context.buildContextPath}\``, + `- **Dockerfile**: \`${result.context.dockerfilePath}\``, + `- **Has .dockerignore**: ${result.context.hasDockerignore ? 'Yes' : 'No'}`, + ]; + sections.push(formatSection('Paths', contextDetails.join('\n'))); - if (details.length > 0) { - sections.push(formatSection('Build Details', details.join('\n'))); + // Build configuration + if (result.buildConfig.finalTags.length > 0) { + const configDetails = [ + `- **Tags**: ${result.buildConfig.finalTags.map((t: string) => `\`${t}\``).join(', ')}`, + `- **Platform**: ${result.buildConfig.platform}`, + ]; + sections.push(formatSection('Build Configuration', configDetails.join('\n'))); } // Security warnings - if (result.securityWarnings && result.securityWarnings.length > 0) { - const warnings = result.securityWarnings.map((w) => `- ⚠️ ${w}`).join('\n'); + if (result.securityAnalysis.warnings.length > 0) { + const warnings = result.securityAnalysis.warnings + .map( + (w: { severity: string; message: string }) => + `- ⚠️ [${w.severity.toUpperCase()}] ${w.message}`, + ) + .join('\n'); sections.push(formatSection('Security Warnings', warnings)); } - // Failed tags - if (result.failedTags && result.failedTags.length > 0) { - sections.push( - formatSection('Failed Tags', result.failedTags.map((t) => `- \`${t}\``).join('\n')), - ); + // Dockerfile analysis + const analysis = result.dockerfileAnalysis; + const analysisDetails = [ + `- **Base Images**: ${analysis.baseImages.join(', ') || 'None detected'}`, + `- **Exposed Ports**: ${analysis.exposedPorts.join(', ') || 'None'}`, + `- **Estimated Layers**: ${analysis.layerCount}`, + `- **Has HEALTHCHECK**: ${analysis.hasHealthcheck ? 'Yes' : 'No'}`, + ]; + if (analysis.finalUser) { + analysisDetails.push(`- **Final USER**: ${analysis.finalUser}`); } + sections.push(formatSection('Dockerfile Analysis', analysisDetails.join('\n'))); + + // Build command + sections.push( + formatSection('Build Command', `\`\`\`bash\n${result.nextAction.buildCommand.command}\n\`\`\``), + ); if (opts.includeSuggestedNext) { - sections.push( - formatSection( - 'Suggested Next Step', - 'Use `scan_image` to check for vulnerabilities, or `tag_image` to add version tags.', - ), - ); + const nextSteps = [ + 'Execute the build command above', + ...result.nextAction.postBuildSteps.map((step: string) => step), + ]; + sections.push(formatSection('Suggested Next Steps', nextSteps.map((s) => `- ${s}`).join('\n'))); } return sections.join('\n'); @@ -585,7 +599,9 @@ function formatGenerateK8sManifestsResultProse( /** * Format generate-k8s-manifests result for LLM consumption. */ -export const formatGenerateK8sManifestsResult = createFormatter(formatGenerateK8sManifestsResultProse); +export const formatGenerateK8sManifestsResult = createFormatter( + formatGenerateK8sManifestsResultProse, +); /** * Format prepare-cluster result prose output. @@ -678,13 +694,18 @@ function formatVerifyDeployResultProse( details.push(`- **Deployment**: ${result.deploymentName}`); details.push(`- **Namespace**: ${result.namespace}`); details.push(`- **Status**: ${result.ready ? 'Ready' : 'Not Ready'}`); - details.push(`- **Replicas**: ${result.status.readyReplicas}/${result.status.totalReplicas} ready`); + details.push( + `- **Replicas**: ${result.status.readyReplicas}/${result.status.totalReplicas} ready`, + ); sections.push(formatSection('Status', details.join('\n'))); // Endpoints if (result.endpoints && result.endpoints.length > 0) { const endpoints = result.endpoints - .map((e) => `- ${e.type}: \`${e.url}:${e.port}\` ${e.healthy ? 'βœ“' : e.healthy === false ? 'βœ—' : ''}`) + .map( + (e) => + `- ${e.type}: \`${e.url}:${e.port}\` ${e.healthy ? 'βœ“' : e.healthy === false ? 'βœ—' : ''}`, + ) .join('\n'); sections.push(formatSection('Endpoints', endpoints)); } @@ -761,10 +782,7 @@ function formatServerStatusResultProse(result: ServerStatusResult): string { * Uses discriminated union pattern with `kind` field for type-safe narrowing. * No type guards or unsafe casts needed. */ -function formatOpsResultProse( - result: OpsResult, - _opts: Required, -): string { +function formatOpsResultProse(result: OpsResult, _opts: Required): string { switch (result.kind) { case 'ping': return formatPingResultProse(result); @@ -803,7 +821,7 @@ interface FormatterRegistry { analyzeRepo: FormatterFunction; generateDockerfile: FormatterFunction; fixDockerfile: FormatterFunction; - buildImage: FormatterFunction; + buildImageContext: FormatterFunction; scanImage: FormatterFunction; tagImage: FormatterFunction; pushImage: FormatterFunction; @@ -837,7 +855,7 @@ export const resultFormatters = { analyzeRepo: formatAnalyzeRepoResult, generateDockerfile: formatGenerateDockerfileResult, fixDockerfile: formatFixDockerfileResult, - buildImage: formatBuildImageResult, + buildImageContext: formatBuildImageResult, scanImage: formatScanImageResult, tagImage: formatTagImageResult, pushImage: formatPushImageResult, diff --git a/src/sdk/index.ts b/src/sdk/index.ts index 593e0b4f6..0c2314d51 100644 --- a/src/sdk/index.ts +++ b/src/sdk/index.ts @@ -11,11 +11,11 @@ * * @example * ```typescript - * import { analyzeRepo, buildImage, scanImage } from 'containerization-assist-mcp/sdk'; + * import { analyzeRepo, buildImageContext, scanImage } from 'containerization-assist-mcp/sdk'; * * // Full containerization workflow * const analysis = await analyzeRepo({ repositoryPath: './my-app' }); - * const build = await buildImage({ path: './my-app', imageName: 'myapp:v1' }); + * const build = await buildImageContext({ path: './my-app', imageName: 'myapp:v1' }); * const scan = await scanImage({ imageId: 'myapp:v1' }); * ``` * @@ -57,9 +57,9 @@ import { generateDockerfileSchema } from '@/tools/generate-dockerfile/schema'; import fixDockerfileTool from '@/tools/fix-dockerfile/tool'; import { fixDockerfileSchema } from '@/tools/fix-dockerfile/schema'; -// build-image -import buildImageTool from '@/tools/build-image/tool'; -import { buildImageSchema } from '@/tools/build-image/schema'; +// build-image-context +import buildImageContextTool from '@/tools/build-image-context/tool'; +import { buildImageSchema } from '@/tools/build-image-context/schema'; // scan-image import scanImageTool from '@/tools/scan-image/tool'; @@ -124,8 +124,8 @@ export type AnalyzeRepoInput = z.input; export type GenerateDockerfileInput = z.input; /** Input type for fixDockerfile - derived from Zod schema */ export type FixDockerfileInput = z.input; -/** Input type for buildImage - derived from Zod schema */ -export type BuildImageInput = z.input; +/** Input type for buildImageContext - derived from Zod schema */ +export type BuildImageContextInput = z.input; /** Input type for scanImage - derived from Zod schema */ export type ScanImageInput = z.input; /** Input type for tagImage - derived from Zod schema */ @@ -185,10 +185,10 @@ export const fixDockerfile = createSDKFunction(fixDockerfileTool); // ----- Image Tools ----- /** - * Build a Docker image from a Dockerfile. - * Requires Docker daemon to be running. + * Prepare Docker build context with security analysis and optimized commands. + * Returns structured guidance for executing builds - does not execute Docker directly. */ -export const buildImage = createSDKFunction(buildImageTool); +export const buildImageContext = createSDKFunction(buildImageContextTool); /** * Scan a Docker image for security vulnerabilities. @@ -248,7 +248,7 @@ interface ToolRegistry { analyzeRepo: Tool; generateDockerfile: Tool; fixDockerfile: Tool; - buildImage: Tool; + buildImageContext: Tool; scanImage: Tool; tagImage: Tool; pushImage: Tool; @@ -281,7 +281,7 @@ interface ToolRegistry { * const schema = tools.analyzeRepo.schema; * * // Access tool metadata - * console.log(tools.buildImage.description); + * console.log(tools.buildImageContext.description); * ``` */ export const tools = { @@ -296,8 +296,8 @@ export const tools = { fixDockerfile: fixDockerfileTool, // ===== Image Operations ===== - /** Build Docker image from Dockerfile (requires Docker daemon) */ - buildImage: buildImageTool, + /** Prepare Docker build context with security analysis (returns build commands) */ + buildImageContext: buildImageContextTool, /** Scan image for security vulnerabilities (requires Trivy) */ scanImage: scanImageTool, /** Tag Docker image with additional tags */ @@ -358,7 +358,7 @@ export { analyzeRepoJsonSchema, generateDockerfileJsonSchema, fixDockerfileJsonSchema, - buildImageJsonSchema, + buildImageContextJsonSchema, scanImageJsonSchema, tagImageJsonSchema, pushImageJsonSchema, @@ -387,7 +387,7 @@ export { analyzeRepoMetadata, generateDockerfileMetadata, fixDockerfileMetadata, - buildImageMetadata, + buildImageContextMetadata, scanImageMetadata, tagImageMetadata, pushImageMetadata, diff --git a/src/sdk/metadata.ts b/src/sdk/metadata.ts index 4bdee799f..2499b3a5a 100644 --- a/src/sdk/metadata.ts +++ b/src/sdk/metadata.ts @@ -66,9 +66,7 @@ export interface ExternalDependency { * @returns Comma-separated string of dependency names */ export function getRequiredDepsString(deps: readonly ExternalDependency[]): string { - return deps - .map((dep) => (dep.optional ? `${dep.id} (optional)` : dep.id)) - .join(', '); + return deps.map((dep) => (dep.optional ? `${dep.id} (optional)` : dep.id)).join(', '); } /** @@ -221,35 +219,33 @@ Does NOT modify the file - returns recommendations for the LLM to apply.`, }; /** - * Metadata for build-image tool. + * Metadata for build-image-context tool. */ -export const buildImageMetadata: ToolMetadata = { - name: 'build_image', - displayName: 'Build Docker Image', - toolReferenceName: 'containerization-build', - modelDescription: `Builds a Docker image from a Dockerfile. REQUIRES Docker daemon to be running. Use after generating or fixing a Dockerfile. Accepts either: -- A path to an existing Dockerfile (dockerfilePath) -- Dockerfile content as a string (dockerfile) - -Returns build results including: -- Image ID and tags -- Build duration and image size -- Build logs (truncated if very long) -- Success/failure status with error details - -On failure, provides actionable error messages. Consider running scan_image after successful builds.`, - userDescription: 'Build a Docker image from a Dockerfile', +export const buildImageContextMetadata: ToolMetadata = { + name: 'build_image_context', + displayName: 'Prepare Docker Build Context', + toolReferenceName: 'containerization-build-context', + modelDescription: `Prepares Docker build context with security analysis and optimized build commands. Does NOT execute the build - returns structured guidance for the agent to execute. + +Returns comprehensive build context including: +- Security analysis with warnings and risk level +- BuildKit feature detection and recommendations +- Dockerfile analysis (base images, ports, layers, healthcheck) +- Pre-generated build command ready to execute +- Pre-checks and post-build suggestions + +The agent should execute the returned buildCommand.command to perform the actual build.`, + userDescription: 'Analyze Dockerfile and prepare optimized build commands', icon: '$(package)', canBeReferencedInPrompt: true, confirmation: { - title: 'Build Docker Image', - messageTemplate: 'Build Docker image:\n\n**Context**: `{{path}}`\n**Image**: `{{imageName}}`', - isReadOnly: false, - warning: 'This will execute Docker build commands.', + title: 'Prepare Build Context', + messageTemplate: 'Analyze build context:\n\n**Path**: `{{path}}`\n**Image**: `{{imageName}}`', + isReadOnly: true, }, suggestedNextTools: ['scan_image', 'tag_image'], category: 'image', - requiresExternalDeps: [{ id: ExternalDeps.DOCKER }], + requiresExternalDeps: [], }; /** @@ -276,10 +272,7 @@ Requires Trivy to be installed. If Trivy is not available, returns a limited ana }, suggestedNextTools: ['fix_dockerfile', 'tag_image', 'push_image'], category: 'image', - requiresExternalDeps: [ - { id: ExternalDeps.DOCKER }, - { id: ExternalDeps.TRIVY, optional: true }, - ], + requiresExternalDeps: [{ id: ExternalDeps.DOCKER }, { id: ExternalDeps.TRIVY, optional: true }], }; /** @@ -333,10 +326,7 @@ The image name must include the registry (unless pushing to Docker Hub). Example }, suggestedNextTools: ['generate_k8s_manifests'], category: 'image', - requiresExternalDeps: [ - { id: ExternalDeps.DOCKER }, - { id: ExternalDeps.REGISTRY_AUTH }, - ], + requiresExternalDeps: [{ id: ExternalDeps.DOCKER }, { id: ExternalDeps.REGISTRY_AUTH }], }; /** @@ -390,10 +380,7 @@ Use before deploying to ensure the target namespace exists and the cluster is ac }, suggestedNextTools: ['verify_deploy'], category: 'kubernetes', - requiresExternalDeps: [ - { id: ExternalDeps.KUBECTL }, - { id: ExternalDeps.CLUSTER_ACCESS }, - ], + requiresExternalDeps: [{ id: ExternalDeps.KUBECTL }, { id: ExternalDeps.CLUSTER_ACCESS }], }; /** @@ -421,10 +408,7 @@ Includes configurable timeout for waiting on deployment readiness.`, }, suggestedNextTools: [], category: 'kubernetes', - requiresExternalDeps: [ - { id: ExternalDeps.KUBECTL }, - { id: ExternalDeps.CLUSTER_ACCESS }, - ], + requiresExternalDeps: [{ id: ExternalDeps.KUBECTL }, { id: ExternalDeps.CLUSTER_ACCESS }], }; /** @@ -462,7 +446,7 @@ interface ToolMetadataRegistry { analyzeRepo: ToolMetadata; generateDockerfile: ToolMetadata; fixDockerfile: ToolMetadata; - buildImage: ToolMetadata; + buildImageContext: ToolMetadata; scanImage: ToolMetadata; tagImage: ToolMetadata; pushImage: ToolMetadata; @@ -497,7 +481,7 @@ export const toolMetadata = { analyzeRepo: analyzeRepoMetadata, generateDockerfile: generateDockerfileMetadata, fixDockerfile: fixDockerfileMetadata, - buildImage: buildImageMetadata, + buildImageContext: buildImageContextMetadata, scanImage: scanImageMetadata, tagImage: tagImageMetadata, pushImage: pushImageMetadata, @@ -525,7 +509,7 @@ export type ToolMetadataName = keyof typeof toolMetadata; export const standardWorkflow: readonly ToolMetadataName[] = [ 'analyzeRepo', 'generateDockerfile', - 'buildImage', + 'buildImageContext', 'scanImage', 'tagImage', 'pushImage', diff --git a/src/sdk/schemas.ts b/src/sdk/schemas.ts index 18325df12..196237705 100644 --- a/src/sdk/schemas.ts +++ b/src/sdk/schemas.ts @@ -22,7 +22,7 @@ export { convertZodToJsonSchema } from './json-schema-converter.js'; import { analyzeRepoSchema } from '@/tools/analyze-repo/schema'; import { generateDockerfileSchema } from '@/tools/generate-dockerfile/schema'; import { fixDockerfileSchema } from '@/tools/fix-dockerfile/schema'; -import { buildImageSchema } from '@/tools/build-image/schema'; +import { buildImageSchema } from '@/tools/build-image-context/schema'; import { scanImageSchema } from '@/tools/scan-image/schema'; import { tagImageSchema } from '@/tools/tag-image/schema'; import { pushImageSchema } from '@/tools/push-image/schema'; @@ -49,9 +49,9 @@ export const generateDockerfileJsonSchema = convertZodToJsonSchema(generateDocke export const fixDockerfileJsonSchema = convertZodToJsonSchema(fixDockerfileSchema); /** - * JSON Schema for build-image tool input. + * JSON Schema for build-image-context tool input. */ -export const buildImageJsonSchema = convertZodToJsonSchema(buildImageSchema); +export const buildImageContextJsonSchema = convertZodToJsonSchema(buildImageSchema); /** * JSON Schema for scan-image tool input. @@ -111,7 +111,7 @@ export const jsonSchemas = { analyzeRepo: analyzeRepoJsonSchema, generateDockerfile: generateDockerfileJsonSchema, fixDockerfile: fixDockerfileJsonSchema, - buildImage: buildImageJsonSchema, + buildImageContext: buildImageContextJsonSchema, scanImage: scanImageJsonSchema, tagImage: tagImageJsonSchema, pushImage: pushImageJsonSchema, diff --git a/src/sdk/types.ts b/src/sdk/types.ts index 49acef2bd..ada56863c 100644 --- a/src/sdk/types.ts +++ b/src/sdk/types.ts @@ -68,10 +68,9 @@ export type { FixRecommendation, } from '@/tools/fix-dockerfile/schema'; -// ===== BUILD-IMAGE TYPES ===== +// ===== BUILD-IMAGE-CONTEXT TYPES ===== -export type { BuildImageParams } from '@/tools/build-image/schema'; -export type { BuildImageResult } from '@/tools/build-image/tool'; +export type { BuildImageParams, BuildImageResult } from '@/tools/build-image-context/schema'; // ===== SCAN-IMAGE TYPES ===== diff --git a/src/tools/build-image-context/schema.ts b/src/tools/build-image-context/schema.ts new file mode 100644 index 000000000..2708500fb --- /dev/null +++ b/src/tools/build-image-context/schema.ts @@ -0,0 +1,158 @@ +/** + * Schema definition for build-image-context tool + * Provides context and recommendations for Docker image builds + */ + +import { z } from 'zod'; +import { imageName, tags, buildArgs, platform } from '../shared/schemas'; + +/** + * Input parameters for build context preparation + */ +export const buildImageSchema = z.object({ + path: z + .string() + .optional() + .describe('Build context path (use forward slashes: /path/to/context)'), + dockerfile: z.string().optional().describe('Dockerfile name (relative to context)'), + dockerfilePath: z + .string() + .optional() + .describe('Path to Dockerfile (use forward slashes: /path/to/Dockerfile)'), + imageName: imageName.optional(), + tags: tags.optional(), + buildArgs: buildArgs.optional(), + platform, +}); + +export type BuildImageParams = z.infer; + +/** + * Security warning with severity and remediation + */ +export interface SecurityWarning { + /** Warning identifier */ + id: string; + /** Severity level */ + severity: 'low' | 'medium' | 'high' | 'critical'; + /** Human-readable message */ + message: string; + /** Line number in Dockerfile (if applicable) */ + line?: number; + /** Recommended fix */ + remediation: string; +} + +/** + * BuildKit feature detection + */ +export interface BuildKitFeatures { + /** Dockerfile uses --mount=type=cache */ + cacheMount: boolean; + /** Dockerfile uses --mount=type=secret */ + secretMount: boolean; + /** Dockerfile uses --mount=type=ssh */ + sshMount: boolean; + /** Multi-stage build detected */ + multiStage: boolean; + /** Number of build stages */ + stageCount: number; + /** Uses COPY --from for multi-stage */ + copyFrom: boolean; + /** Uses heredoc syntax (requires BuildKit) */ + heredoc: boolean; +} + +/** + * Structured command for agent execution + */ +export interface BuildCommand { + /** Full command string ready to execute */ + command: string; + /** Structured parts for programmatic use */ + parts: { + executable: 'docker'; + subcommand: 'build' | 'buildx build'; + flags: string[]; + context: string; + }; + /** Environment variables to set */ + environment: Record; +} + +/** + * Result of build context preparation + */ +export interface BuildImageResult { + /** Natural language summary */ + summary: string; + + /** Validated paths */ + context: { + /** Absolute path to build context directory */ + buildContextPath: string; + /** Absolute path to Dockerfile */ + dockerfilePath: string; + /** Relative path for -f flag */ + dockerfileRelative: string; + /** Whether .dockerignore exists */ + hasDockerignore: boolean; + }; + + /** Security analysis results */ + securityAnalysis: { + /** Structured warnings */ + warnings: SecurityWarning[]; + /** Overall risk assessment */ + riskLevel: 'low' | 'medium' | 'high'; + /** Actionable recommendations */ + recommendations: string[]; + }; + + /** Computed build configuration */ + buildConfig: { + /** Final tags to apply (computed from imageName + tags) */ + finalTags: string[]; + /** Merged build arguments (defaults + user provided) */ + buildArgs: Record; + /** Target platform */ + platform: string; + }; + + /** BuildKit feature analysis */ + buildKitAnalysis: { + /** Detected features */ + features: BuildKitFeatures; + /** Whether BuildKit is recommended */ + recommended: boolean; + /** Optimization recommendations */ + recommendations: string[]; + }; + + /** Dockerfile content analysis */ + dockerfileAnalysis: { + /** Base images used */ + baseImages: string[]; + /** Exposed ports */ + exposedPorts: number[]; + /** Final USER directive (if any) */ + finalUser?: string; + /** Has HEALTHCHECK */ + hasHealthcheck: boolean; + /** Estimated layer count */ + layerCount: number; + }; + + /** Agent execution instructions */ + nextAction: { + action: 'execute-build'; + /** Pre-execution checklist */ + preChecks: string[]; + /** Primary build command */ + buildCommand: BuildCommand; + /** Alternative command if primary fails */ + fallbackCommand?: BuildCommand; + /** Post-build suggestions */ + postBuildSteps: string[]; + }; +} diff --git a/src/tools/build-image-context/tool.ts b/src/tools/build-image-context/tool.ts new file mode 100644 index 000000000..7985948dc --- /dev/null +++ b/src/tools/build-image-context/tool.ts @@ -0,0 +1,660 @@ +/** + * Build Image Context Preparation Tool + * + * Analyzes Dockerfiles and build contexts, returning structured recommendations + * and commands for an agent to execute the build. + * + * This tool does NOT execute Docker builds - it provides context and guidance + * for agents to run builds with optimal settings. + * + * @example + * ```typescript + * const result = await buildImageContext({ + * path: '/path/to/app', + * imageName: 'myapp', + * tags: ['latest', 'v1.0.0'], + * }, context); + * + * // Agent executes: result.value.nextAction.buildCommand.command + * ``` + */ + +import path from 'path'; +import fs from 'fs/promises'; +import { normalizePath } from '@/lib/platform'; +import { setupToolContext } from '@/lib/tool-context-helpers'; +import type { ToolContext } from '@/core/context'; +import { validatePathOrFail } from '@/lib/validation-helpers'; +import { readDockerfile } from '@/lib/file-utils'; + +import { type Result, Success, Failure } from '@/types'; +import { extractErrorMessage } from '@/lib/errors'; +import { + type BuildImageParams, + type BuildImageResult, + type SecurityWarning, + type BuildKitFeatures, + type BuildCommand, + buildImageSchema, +} from './schema'; + +/** + * Prepare build arguments by merging user-provided args with default build metadata + */ +function prepareBuildArgs(buildArgs: Record = {}): Record { + const defaults: Record = { + NODE_ENV: process.env.NODE_ENV ?? 'production', + BUILD_DATE: new Date().toISOString(), + VCS_REF: process.env.GIT_COMMIT ?? 'unknown', + }; + + return { ...defaults, ...buildArgs }; +} + +/** + * Analyze Dockerfile for security issues with structured output + */ +function analyzeSecurityIssues( + dockerfile: string, + buildArgs: Record, +): SecurityWarning[] { + const warnings: SecurityWarning[] = []; + const lines = dockerfile.split('\n'); + + // Check for secrets in build args + const sensitivePatterns = [ + 'password', + 'token', + 'key', + 'secret', + 'api_key', + 'apikey', + 'credential', + 'private', + ]; + for (const [key, value] of Object.entries(buildArgs)) { + const keyLower = key.toLowerCase(); + if (sensitivePatterns.some((pattern) => keyLower.includes(pattern))) { + warnings.push({ + id: 'secret-in-build-arg', + severity: 'high', + message: `Potential secret in build arg: ${key}`, + remediation: + 'Use --mount=type=secret for sensitive data instead of build args. Build args are visible in image history.', + }); + } + // Also check if value looks like a secret + if (value && (value.length > 20 || /^[A-Za-z0-9+/=]{20,}$/.test(value))) { + warnings.push({ + id: 'secret-value-in-build-arg', + severity: 'medium', + message: `Build arg "${key}" may contain a secret value`, + remediation: + 'Review if this value should be passed via --mount=type=secret instead of build args.', + }); + } + } + + // Check for sudo usage + lines.forEach((line, index) => { + if (/\bsudo\s/.test(line) && !line.trim().startsWith('#')) { + warnings.push({ + id: 'sudo-usage', + severity: 'medium', + message: 'Using sudo in Dockerfile', + line: index + 1, + remediation: + 'RUN commands execute as root by default. Remove sudo and add a non-root USER directive at the end.', + }); + } + }); + + // Check for :latest tags in FROM + lines.forEach((line, index) => { + const fromMatch = line.match(/^FROM\s+(\S+)/i); + if (fromMatch) { + const image = fromMatch[1]; + if (image?.endsWith(':latest') || (!image?.includes(':') && !image?.includes('@'))) { + warnings.push({ + id: 'unpinned-base-image', + severity: 'medium', + message: `Unpinned base image: ${image}`, + line: index + 1, + remediation: + 'Pin base images to specific versions (e.g., node:20-alpine) or digests for reproducible builds.', + }); + } + } + }); + + // Check for root user + const userDirectives = lines.filter((line) => /^USER\s+/i.test(line.trim())); + const lastUser = userDirectives[userDirectives.length - 1]; + if (!lastUser || /^USER\s+(root|0)\s*$/i.test(lastUser.trim())) { + warnings.push({ + id: 'runs-as-root', + severity: 'high', + message: 'Container runs as root user', + remediation: + 'Add a non-root USER directive (e.g., USER node or USER 1000). Running as root increases attack surface.', + }); + } + + // Check for ADD instead of COPY (potential remote URL fetch) + lines.forEach((line, index) => { + if (/^ADD\s+https?:\/\//i.test(line.trim())) { + warnings.push({ + id: 'add-remote-url', + severity: 'medium', + message: 'ADD used with remote URL', + line: index + 1, + remediation: + 'Use RUN curl/wget instead of ADD for remote URLs. ADD auto-extracts archives which can be unexpected.', + }); + } + }); + + // Check for chmod 777 + lines.forEach((line, index) => { + if (/chmod\s+777/i.test(line)) { + warnings.push({ + id: 'overly-permissive-chmod', + severity: 'high', + message: 'chmod 777 grants excessive permissions', + line: index + 1, + remediation: 'Use more restrictive permissions (e.g., 755 for directories, 644 for files).', + }); + } + }); + + // Check for apt-get without --no-install-recommends + lines.forEach((line, index) => { + if (/apt-get\s+install/i.test(line) && !/--no-install-recommends/i.test(line)) { + warnings.push({ + id: 'apt-install-recommends', + severity: 'low', + message: 'apt-get install without --no-install-recommends', + line: index + 1, + remediation: + 'Add --no-install-recommends to reduce image size by skipping optional packages.', + }); + } + }); + + return warnings; +} + +/** + * Compute overall risk level from warnings + */ +function computeRiskLevel(warnings: SecurityWarning[]): 'low' | 'medium' | 'high' { + const hasCritical = warnings.some((w) => w.severity === 'critical'); + const highCount = warnings.filter((w) => w.severity === 'high').length; + const mediumCount = warnings.filter((w) => w.severity === 'medium').length; + + if (hasCritical || highCount >= 2) return 'high'; + if (highCount >= 1 || mediumCount >= 3) return 'medium'; + return 'low'; +} + +/** + * Generate security recommendations based on warnings + */ +function generateSecurityRecommendations(warnings: SecurityWarning[]): string[] { + const recommendations: string[] = []; + + if (warnings.some((w) => w.id === 'runs-as-root')) { + recommendations.push( + 'Add a non-root USER directive to improve container security and follow least-privilege principle.', + ); + } + + if (warnings.some((w) => w.id === 'secret-in-build-arg')) { + recommendations.push( + 'Use BuildKit secrets (--mount=type=secret) instead of build args for sensitive data.', + ); + } + + if (warnings.some((w) => w.id === 'unpinned-base-image')) { + recommendations.push( + 'Pin base images to specific versions or SHA digests for reproducible, secure builds.', + ); + } + + if (recommendations.length === 0) { + recommendations.push( + 'No critical security issues detected. Consider running a vulnerability scan after build.', + ); + } + + return recommendations; +} + +/** + * Analyze Dockerfile for BuildKit features + */ +function analyzeBuildKitFeatures(dockerfile: string): BuildKitFeatures { + const lines = dockerfile.split('\n'); + + // Detect multi-stage builds + const fromStatements = lines.filter((line) => /^FROM\s+/i.test(line.trim())); + const multiStage = fromStatements.length > 1; + const stageCount = fromStatements.length; + + // Detect BuildKit-specific features + const cacheMount = /--mount=type=cache/i.test(dockerfile); + const secretMount = /--mount=type=secret/i.test(dockerfile); + const sshMount = /--mount=type=ssh/i.test(dockerfile); + const copyFrom = /COPY\s+--from=/i.test(dockerfile); + const heredoc = /<<[-~]?\s*\w+/.test(dockerfile) || /<<[-~]?EOF/i.test(dockerfile); + + return { + cacheMount, + secretMount, + sshMount, + multiStage, + stageCount, + copyFrom, + heredoc, + }; +} + +/** + * Generate BuildKit recommendations + */ +function generateBuildKitRecommendations(features: BuildKitFeatures, dockerfile: string): string[] { + const recommendations: string[] = []; + + // If already using BuildKit features + if (features.cacheMount || features.secretMount || features.sshMount || features.heredoc) { + recommendations.push('Dockerfile uses BuildKit features - ensure DOCKER_BUILDKIT=1 is set.'); + } + + // Suggest cache mounts for package managers + if (!features.cacheMount) { + if (/npm\s+(install|ci)/i.test(dockerfile)) { + recommendations.push( + 'Consider using --mount=type=cache,target=/root/.npm for npm cache to speed up builds.', + ); + } + if (/pip\s+install/i.test(dockerfile)) { + recommendations.push( + 'Consider using --mount=type=cache,target=/root/.cache/pip for pip cache.', + ); + } + if (/go\s+(build|mod)/i.test(dockerfile)) { + recommendations.push( + 'Consider using --mount=type=cache,target=/go/pkg/mod for Go module cache.', + ); + } + } + + // Multi-stage build suggestions + if (!features.multiStage && dockerfile.length > 500) { + recommendations.push( + 'Consider using multi-stage builds to reduce final image size by separating build and runtime stages.', + ); + } + + return recommendations; +} + +/** + * Analyze Dockerfile content for metadata + */ +function analyzeDockerfileContent(dockerfile: string): { + baseImages: string[]; + exposedPorts: number[]; + finalUser?: string; + hasHealthcheck: boolean; + layerCount: number; +} { + const lines = dockerfile.split('\n'); + + // Extract base images + const baseImages = lines + .filter((line) => /^FROM\s+/i.test(line.trim())) + .map((line) => { + const match = line.match(/^FROM\s+(\S+)/i); + return match?.[1] ?? ''; + }) + .filter(Boolean); + + // Extract exposed ports + const exposedPorts: number[] = []; + lines.forEach((line) => { + const match = line.match(/^EXPOSE\s+(.+)/i); + if (match?.[1]) { + const ports = match[1].split(/\s+/).map((p) => { + const portStr = p.split('/')[0]; + return portStr ? parseInt(portStr, 10) : NaN; + }); + exposedPorts.push(...ports.filter((p) => !isNaN(p))); + } + }); + + // Find final USER + const userDirectives = lines.filter((line) => /^USER\s+/i.test(line.trim())); + const lastUserLine = userDirectives[userDirectives.length - 1]; + const finalUser = lastUserLine?.match(/^USER\s+(\S+)/i)?.[1]; + + // Check for HEALTHCHECK + const hasHealthcheck = lines.some((line) => /^HEALTHCHECK\s+/i.test(line.trim())); + + // Estimate layer count (each RUN, COPY, ADD creates a layer) + const layerCount = lines.filter((line) => { + const trimmed = line.trim(); + return /^(RUN|COPY|ADD)\s+/i.test(trimmed) && !trimmed.startsWith('#'); + }).length; + + const result: { + baseImages: string[]; + exposedPorts: number[]; + finalUser?: string; + hasHealthcheck: boolean; + layerCount: number; + } = { + baseImages, + exposedPorts, + hasHealthcheck, + layerCount, + }; + + if (finalUser !== undefined) { + result.finalUser = finalUser; + } + + return result; +} + +/** + * Combine image name with tag to create a full image reference + */ +function combineImageNameAndTag(imageName: string | undefined, tag: string): string { + if (tag.includes(':') || tag.includes('/')) { + return tag; + } + if (!imageName) { + return tag; + } + return `${imageName}:${tag}`; +} + +/** + * Compute final tags from imageName and tags array + */ +function computeFinalTags(imageName?: string, tags?: string[]): string[] { + if (tags && tags.length > 0) { + return tags.map((tag) => combineImageNameAndTag(imageName, tag)); + } + if (imageName) { + // Ensure imageName has a tag + return [imageName.includes(':') ? imageName : `${imageName}:latest`]; + } + return ['app:latest']; +} + +/** + * Generate Docker build command + */ +function generateBuildCommand( + contextPath: string, + dockerfileRelative: string, + finalTags: string[], + buildArgs: Record, + platform: string, + useBuildKit: boolean, +): BuildCommand { + const flags: string[] = []; + + // Dockerfile path + if (dockerfileRelative !== 'Dockerfile') { + flags.push(`-f ${dockerfileRelative}`); + } + + // Tags + for (const tag of finalTags) { + flags.push(`-t ${tag}`); + } + + // Build args + for (const [key, value] of Object.entries(buildArgs)) { + // Escape values with spaces or special characters + const escapedValue = value.includes(' ') || value.includes('"') ? `"${value}"` : value; + flags.push(`--build-arg ${key}=${escapedValue}`); + } + + // Platform + flags.push(`--platform ${platform}`); + + // BuildKit progress output + if (useBuildKit) { + flags.push('--progress=plain'); + } + + const command = `docker build ${flags.join(' ')} ${contextPath}`; + + return { + command, + parts: { + executable: 'docker', + subcommand: 'build', + flags, + context: contextPath, + }, + environment: useBuildKit ? { DOCKER_BUILDKIT: '1' } : {}, + }; +} + +/** + * Check if .dockerignore exists + */ +async function checkDockerignore(contextPath: string): Promise { + try { + await fs.access(path.join(contextPath, '.dockerignore')); + return true; + } catch { + return false; + } +} + +/** + * Build image context preparation handler + */ +async function handleBuildImage( + params: BuildImageParams, + context: ToolContext, +): Promise> { + if (!params || typeof params !== 'object') { + return Failure('Invalid parameters provided', { + message: 'Parameters must be a valid object', + hint: 'Tool received invalid or missing parameters', + resolution: 'Ensure parameters are provided as a JSON object', + }); + } + + const { logger, timer } = setupToolContext(context, 'build-image-context'); + + const { + path: rawBuildPath = '.', + dockerfile = 'Dockerfile', + dockerfilePath: rawDockerfilePath, + imageName, + tags = [], + buildArgs = {}, + platform = 'linux/amd64', + } = params; + + try { + // Validate build context path + const buildContextResult = await validatePathOrFail(rawBuildPath, { + mustExist: true, + mustBeDirectory: true, + }); + if (!buildContextResult.ok) return buildContextResult; + + // Normalize paths + const buildContextPath = normalizePath(buildContextResult.value); + const dockerfilePath = rawDockerfilePath ? normalizePath(rawDockerfilePath) : undefined; + const dockerfileRelativePath = dockerfilePath || dockerfile; + const finalDockerfilePath = path.resolve(buildContextPath, dockerfileRelativePath); + + // Read Dockerfile + const dockerfileContentResult = await readDockerfile({ + path: finalDockerfilePath, + }); + + if (!dockerfileContentResult.ok) { + return dockerfileContentResult; + } + + const dockerfileContent = dockerfileContentResult.value; + + // Prepare build arguments + const finalBuildArgs = prepareBuildArgs(buildArgs); + + // Security analysis + const securityWarnings = analyzeSecurityIssues(dockerfileContent, finalBuildArgs); + const riskLevel = computeRiskLevel(securityWarnings); + const securityRecommendations = generateSecurityRecommendations(securityWarnings); + + logger.debug( + { warningCount: securityWarnings.length, riskLevel }, + 'Security analysis complete', + ); + + // BuildKit analysis + const buildKitFeatures = analyzeBuildKitFeatures(dockerfileContent); + const buildKitRecommendations = generateBuildKitRecommendations( + buildKitFeatures, + dockerfileContent, + ); + const useBuildKit = + buildKitFeatures.cacheMount || + buildKitFeatures.secretMount || + buildKitFeatures.sshMount || + buildKitFeatures.heredoc || + buildKitFeatures.multiStage; + + // Dockerfile analysis + const dockerfileAnalysis = analyzeDockerfileContent(dockerfileContent); + + // Compute final tags + const finalTags = computeFinalTags(imageName, tags); + + // Check for .dockerignore + const hasDockerignore = await checkDockerignore(buildContextPath); + + // Generate build command + const dockerfileRelative = path.relative(buildContextPath, finalDockerfilePath); + const buildCommand = generateBuildCommand( + buildContextPath, + dockerfileRelative, + finalTags, + finalBuildArgs, + platform, + useBuildKit, + ); + + // Generate fallback command (without BuildKit) + const fallbackCommand = useBuildKit + ? generateBuildCommand( + buildContextPath, + dockerfileRelative, + finalTags, + finalBuildArgs, + platform, + false, + ) + : undefined; + + // Pre-checks for agent + const preChecks: string[] = ['Verify Docker daemon is running: docker info']; + if (!hasDockerignore) { + preChecks.push( + 'Consider creating .dockerignore to exclude unnecessary files from build context', + ); + } + if (riskLevel === 'high') { + preChecks.push('Review security warnings before proceeding with build'); + } + + // Post-build suggestions + const postBuildSteps: string[] = [ + `Verify image was created: docker images | grep ${finalTags[0]?.split(':')[0] || 'app'}`, + 'Run vulnerability scan: docker scout quickview or trivy image', + ]; + if (!dockerfileAnalysis.hasHealthcheck) { + postBuildSteps.push('Consider adding HEALTHCHECK to Dockerfile for production deployments'); + } + + // Generate summary + const warningText = + securityWarnings.length > 0 + ? ` Found ${securityWarnings.length} security warning(s) (${riskLevel} risk).` + : ' No security issues detected.'; + const buildKitText = useBuildKit ? ' BuildKit recommended.' : ''; + const summary = `Build context prepared for ${finalTags[0] || 'image'}.${warningText}${buildKitText}`; + + const result: BuildImageResult = { + summary, + context: { + buildContextPath, + dockerfilePath: finalDockerfilePath, + dockerfileRelative, + hasDockerignore, + }, + securityAnalysis: { + warnings: securityWarnings, + riskLevel, + recommendations: securityRecommendations, + }, + buildConfig: { + finalTags, + buildArgs: finalBuildArgs, + platform, + }, + buildKitAnalysis: { + features: buildKitFeatures, + recommended: useBuildKit, + recommendations: buildKitRecommendations, + }, + dockerfileAnalysis, + nextAction: { + action: 'execute-build', + preChecks, + buildCommand, + ...(fallbackCommand && { fallbackCommand }), + postBuildSteps, + }, + }; + + timer.end({ tags: finalTags, riskLevel }); + return Success(result); + } catch (error) { + timer.error(error); + + return Failure(extractErrorMessage(error), { + message: extractErrorMessage(error), + hint: 'An unexpected error occurred during build context preparation', + resolution: + 'Check the error message above for details. Common issues include invalid paths or unreadable Dockerfile', + }); + } +} + +export const buildImageContext = handleBuildImage; + +import { tool } from '@/types/tool'; + +export default tool({ + name: 'build-image-context', + description: + 'Prepare Docker build context with security analysis and optimized build commands. Returns structured guidance for executing builds.', + version: '3.0.0', + schema: buildImageSchema, + metadata: { + knowledgeEnhanced: false, + }, + handler: handleBuildImage, +}); diff --git a/src/tools/build-image/schema.ts b/src/tools/build-image/schema.ts deleted file mode 100644 index 93b285874..000000000 --- a/src/tools/build-image/schema.ts +++ /dev/null @@ -1,29 +0,0 @@ -/** - * Schema definition for build-image tool - */ - -import { z } from 'zod'; -import { imageName, tags, buildArgs, platform } from '../shared/schemas'; - -export const buildImageSchema = z.object({ - path: z - .string() - .optional() - .describe('Build context path (use forward slashes: /path/to/context)'), - dockerfile: z.string().optional().describe('Dockerfile name (relative to context)'), - dockerfilePath: z - .string() - .optional() - .describe('Path to Dockerfile (use forward slashes: /path/to/Dockerfile)'), - imageName: imageName.optional(), - tags: tags.optional(), - buildArgs: buildArgs.optional(), - platform, - strictPlatformValidation: z - .boolean() - .optional() - .default(false) - .describe('Deprecated: No longer enforces platform flags. Reserved for future use.'), -}); - -export type BuildImageParams = z.infer; diff --git a/src/tools/build-image/tool.ts b/src/tools/build-image/tool.ts deleted file mode 100644 index faa4363ce..000000000 --- a/src/tools/build-image/tool.ts +++ /dev/null @@ -1,347 +0,0 @@ -/** - * Build Docker images from Dockerfiles. - * Handles multi-stage builds, build arguments, and platform-specific builds. - * - * @example - * ```typescript - * const result = await buildImage({ - * path: '/path/to/app', - * tags: ['myapp:latest', 'myapp:v1.0.0'], - * buildArgs: { NODE_ENV: 'production' } - * }, context); - * ``` - */ - -import path from 'path'; -import { normalizePath } from '@/lib/platform'; -import { setupToolContext } from '@/lib/tool-context-helpers'; -import type { ToolContext } from '@/core/context'; -import { createDockerClient, type DockerBuildOptions } from '@/infra/docker/client'; -import { validatePathOrFail, parseImageName } from '@/lib/validation-helpers'; -import { readDockerfile } from '@/lib/file-utils'; - -import { type Result, Success, Failure } from '@/types'; -import { extractErrorMessage } from '@/lib/errors'; -import { type BuildImageParams, buildImageSchema } from './schema'; -import { formatSize, formatDuration } from '@/lib/summary-helpers'; - -export interface BuildImageResult { - /** - * Natural language summary for user display. - * 1-3 sentences describing the build outcome, image details, and next steps. - * @example "βœ… Built image successfully. Image: myapp:latest (245MB). Build completed in 45s." - */ - summary?: string; - /** Whether the build completed successfully */ - success: boolean; - /** Generated Docker image ID (SHA256 hash) */ - imageId: string; - /** All tags that were requested/attempted to be created */ - requestedTags: string[]; - /** Successfully created tags with full image names */ - createdTags: string[]; - /** Tags that failed to be created (if any) */ - failedTags?: string[]; - /** Final image size in bytes */ - size: number; - /** Number of layers in the image */ - layers?: number; - /** Total build time in milliseconds */ - buildTime: number; - /** Complete build output logs */ - logs: string[]; - /** Security-related warnings discovered during build */ - securityWarnings?: string[]; -} - -/** - * Prepare build arguments by merging user-provided args with default build metadata - */ -async function prepareBuildArgs( - buildArgs: Record = {}, -): Promise> { - const defaults: Record = { - NODE_ENV: process.env.NODE_ENV ?? 'production', - BUILD_DATE: new Date().toISOString(), - VCS_REF: process.env.GIT_COMMIT ?? 'unknown', - }; - - return { ...defaults, ...buildArgs }; -} - -/** - * Combine image name with tag to create a full image reference - * If tag already contains a colon or slash (indicating it's a full reference), return as-is - * Otherwise, combine imageName with tag - */ -function combineImageNameAndTag(imageName: string | undefined, tag: string): string { - // If tag already looks like a full reference (contains : or /), use as-is - if (tag.includes(':') || tag.includes('/')) { - return tag; - } - - // If no imageName provided, use tag as-is (will get 'latest' appended by Docker if needed) - if (!imageName) { - return tag; - } - - // Combine imageName with tag - return `${imageName}:${tag}`; -} - -/** - * Apply additional tags to a built image, returning any tags that failed to apply - */ - -async function applyAdditionalTags( - imageId: string, - tags: string[], - dockerClient: ReturnType, - logger: ReturnType['logger'], -): Promise { - const failedTags: string[] = []; - for (const tag of tags) { - const parsedTag = parseImageName(tag); - if (!parsedTag.ok) { - logger.warn({ imageId, tag, error: parsedTag.error }, 'Failed to parse tag - skipping'); - failedTags.push(tag); - continue; - } - - const { repository, tag: tagName, registry } = parsedTag.value; - const fullRepository = registry ? `${registry}/${repository}` : repository; - - const tagResult = await dockerClient.tagImage(imageId, fullRepository, tagName); - if (!tagResult.ok) { - failedTags.push(tag); - logger.warn({ imageId, tag, error: tagResult.error }, 'Failed to apply tag'); - } - } - return failedTags; -} - -/** - * Analyze build for security issues - */ -function analyzeBuildSecurity(dockerfile: string, buildArgs: Record): string[] { - const warnings: string[] = []; - - // Check for secrets in build args - const sensitiveKeys = ['password', 'token', 'key', 'secret', 'api_key', 'apikey']; - for (const key of Object.keys(buildArgs)) { - if (sensitiveKeys.some((sensitive) => key.toLowerCase().includes(sensitive))) { - warnings.push(`Potential secret in build arg: ${key}`); - } - } - - // Check for sudo in Dockerfile - if (dockerfile.includes('sudo ')) { - warnings.push('Using sudo in Dockerfile - consider running as non-root'); - } - - // Check for latest tags - if (dockerfile.includes(':latest')) { - warnings.push('Using :latest tag - consider pinning versions for reproducibility'); - } - - // Check for root user - if (!dockerfile.includes('USER ') || dockerfile.includes('USER root')) { - warnings.push('Container may run as root - consider adding a non-root USER'); - } - - return warnings; -} - -/** - * Build Docker image handler - */ -async function handleBuildImage( - params: BuildImageParams, - context: ToolContext, -): Promise> { - if (!params || typeof params !== 'object') { - return Failure('Invalid parameters provided', { - message: 'Parameters must be a valid object', - hint: 'Tool received invalid or missing parameters', - resolution: 'Ensure parameters are provided as a JSON object', - }); - } - - const { logger, timer } = setupToolContext(context, 'build-image'); - - const { - path: rawBuildPath = '.', - dockerfile = 'Dockerfile', - dockerfilePath: rawDockerfilePath, - imageName = 'app:latest', - tags = [], - buildArgs = {}, - platform, - } = params; - - try { - // Validate build context path - const buildContextResult = await validatePathOrFail(rawBuildPath, { - mustExist: true, - mustBeDirectory: true, - }); - if (!buildContextResult.ok) return buildContextResult; - - // Normalize paths - const buildContext = normalizePath(buildContextResult.value); - const dockerfilePath = rawDockerfilePath ? normalizePath(rawDockerfilePath) : undefined; - const dockerfileRelativePath = dockerfilePath || dockerfile; - const finalDockerfilePath = path.resolve(buildContext, dockerfileRelativePath); - - const dockerClient = createDockerClient(logger); - - // Verify Docker daemon is available - logger.debug('Checking Docker daemon availability'); - const pingResult = await dockerClient.ping(); - if (!pingResult.ok) { - return Failure('Docker daemon is not available', { - message: pingResult.error, - hint: 'Docker daemon is not running or not accessible', - resolution: - 'Ensure Docker is installed and running. On Windows, verify Docker Desktop is started and running in Linux container mode.', - }); - } - logger.debug('Docker daemon is available'); - - // Read Dockerfile for security analysis - const dockerfileContentResult = await readDockerfile({ - path: finalDockerfilePath, - }); - - if (!dockerfileContentResult.ok) { - return dockerfileContentResult; - } - - const dockerfileContent = dockerfileContentResult.value; - - // Prepare build arguments - const finalBuildArgs = await prepareBuildArgs(buildArgs); - - // Analyze security - const securityWarnings = analyzeBuildSecurity(dockerfileContent, finalBuildArgs); - if (securityWarnings.length > 0) { - logger.warn({ warnings: securityWarnings }, 'Security warnings found in build'); - } - - // Determine final tags to apply to the image - let finalTags: string[] = []; - if (tags.length > 0) { - // Combine each tag with imageName (if tag is not already a full reference) - finalTags = tags.map((tag) => combineImageNameAndTag(imageName, tag)); - } else if (imageName) { - // No tags provided, use imageName as-is (Docker will default to 'latest' if no tag) - finalTags = [imageName]; - } - - // Log the final tags that will be applied - logger.info({ finalTags, originalTags: tags, imageName }, 'Determined final tags for build'); - - // Prepare Docker build options - const buildOptions: DockerBuildOptions = { - context: buildContext, - dockerfile: path.relative(buildContext, finalDockerfilePath), - buildargs: finalBuildArgs, - ...(platform !== undefined && { platform }), - ...(finalTags.length > 0 && finalTags[0] && { t: finalTags[0] }), - }; - - if (context.progress) { - buildOptions.onProgress = (message: string) => { - context.progress?.(message).catch((err) => { - logger.warn({ error: err, message }, 'Failed to send progress notification'); - }); - }; - } - - const buildResult = await dockerClient.buildImage(buildOptions); - - if (!buildResult.ok) { - const errorMessage = buildResult.error ?? 'Unknown error'; - - // Propagate Docker error guidance from infrastructure layer - const guidance = buildResult.guidance; - - return Failure(`Failed to build image: ${errorMessage}`, guidance); - } - - // Apply additional tags to the built image - let failedTags: string[] = []; - if (finalTags.length > 1 && buildResult.value.imageId) { - const additionalTags = finalTags.slice(1); - logger.info( - { imageId: buildResult.value.imageId, additionalTags }, - 'Applying additional tags', - ); - failedTags = await applyAdditionalTags( - buildResult.value.imageId, - additionalTags, - dockerClient, - logger, - ); - if (failedTags.length > 0) { - logger.warn({ failedTags }, 'Some tags failed to apply'); - } - } - - // Generate summary - const successfulTags = finalTags.filter((tag) => !failedTags.includes(tag)); - const imageTag = successfulTags[0] || buildResult.value.imageId; - const sizeText = buildResult.value.size ? ` (${formatSize(buildResult.value.size)})` : ''; - const timeText = buildResult.value.buildTime - ? ` Build completed in ${formatDuration(Math.round(buildResult.value.buildTime / 1000))}.` - : ''; - - const failedTagsText = - failedTags.length > 0 - ? ` ⚠️ Failed to apply ${failedTags.length} tag(s): ${failedTags.join(', ')}` - : ''; - - const summary = `βœ… Built image successfully. Image: ${imageTag}${sizeText}.${timeText}${failedTagsText}`; - - const result: BuildImageResult = { - summary, - success: true, - imageId: buildResult.value.imageId, - requestedTags: finalTags, - createdTags: successfulTags, - size: buildResult.value.size, - ...(buildResult.value.layers !== undefined && { layers: buildResult.value.layers }), - buildTime: buildResult.value.buildTime, - logs: buildResult.value.logs, - ...(securityWarnings.length > 0 && { securityWarnings }), - ...(failedTags.length > 0 && { failedTags }), - }; - - timer.end({ imageId: buildResult.value.imageId, buildTime: buildResult.value.buildTime }); - return Success(result); - } catch (error) { - timer.error(error); - - return Failure(extractErrorMessage(error), { - message: extractErrorMessage(error), - hint: 'An unexpected error occurred during the Docker build process', - resolution: - 'Check the error message above for details. Common issues include Docker daemon not running, insufficient permissions, or invalid build context', - }); - } -} - -export const buildImage = handleBuildImage; - -import { tool } from '@/types/tool'; - -export default tool({ - name: 'build-image', - description: 'Build Docker images from Dockerfiles with security analysis', - version: '2.0.0', - schema: buildImageSchema, - metadata: { - knowledgeEnhanced: false, - }, - handler: handleBuildImage, -}); diff --git a/src/tools/fix-dockerfile/tool.ts b/src/tools/fix-dockerfile/tool.ts index 3e4439fd6..ea0c83d9a 100644 --- a/src/tools/fix-dockerfile/tool.ts +++ b/src/tools/fix-dockerfile/tool.ts @@ -268,9 +268,10 @@ const runPattern = createKnowledgeTool< const totalIssues = rules.issueCount; const totalFixes = knowledgeMatches.length; const environment = input.environment || 'production'; - const summary = totalIssues > 0 - ? `βœ… Dockerfile validation complete for ${environment} environment. Found ${pluralize(totalIssues, 'issue')} (${securityIssues.length} security, ${performanceIssues.length} performance). ${pluralize(totalFixes, 'fix recommendation')} available. Validation score: ${validationScore}/100 (${validationGrade}).` - : `βœ… Dockerfile validation passed for ${environment} environment. Score: ${validationScore}/100 (${validationGrade}). No critical issues found.`; + const summary = + totalIssues > 0 + ? `βœ… Dockerfile validation complete for ${environment} environment. Found ${pluralize(totalIssues, 'issue')} (${securityIssues.length} security, ${performanceIssues.length} performance). ${pluralize(totalFixes, 'fix recommendation')} available. Validation score: ${validationScore}/100 (${validationGrade}).` + : `βœ… Dockerfile validation passed for ${environment} environment. Score: ${validationScore}/100 (${validationGrade}). No critical issues found.`; return { currentIssues: { @@ -431,7 +432,7 @@ export default tool({ }, chainHints: { success: - 'Dockerfile validation and analysis complete (includes built-in best practices + organizational policy validation if configured). Next: Apply recommended fixes, then call build-image to test the Dockerfile.', + 'Dockerfile validation and analysis complete (includes built-in best practices + organizational policy validation if configured). Next: Apply recommended fixes, then call build-image-context to test the Dockerfile.', failure: 'Dockerfile validation failed. Review validation errors, policy violations (if any), and apply recommended fixes.', }, diff --git a/src/tools/index.ts b/src/tools/index.ts index 33b54683c..4bd5355bc 100644 --- a/src/tools/index.ts +++ b/src/tools/index.ts @@ -1,5 +1,5 @@ import analyzeRepoTool from './analyze-repo/tool'; -import buildImageTool from './build-image/tool'; +import buildImageContextTool from './build-image-context/tool'; import fixDockerfileTool from './fix-dockerfile/tool'; import generateDockerfileTool from './generate-dockerfile/tool'; import generateK8sManifestsTool from './generate-k8s-manifests/tool'; @@ -12,7 +12,7 @@ import verifyDeployTool from './verify-deploy/tool'; const TOOL_NAME = { ANALYZE_REPO: 'analyze-repo', - BUILD_IMAGE: 'build-image', + BUILD_IMAGE_CONTEXT: 'build-image-context', FIX_DOCKERFILE: 'fix-dockerfile', GENERATE_DOCKERFILE: 'generate-dockerfile', GENERATE_K8S_MANIFESTS: 'generate-k8s-manifests', @@ -28,7 +28,7 @@ export type ToolName = (typeof TOOL_NAME)[keyof typeof TOOL_NAME]; // Ensure proper names on all tools analyzeRepoTool.name = TOOL_NAME.ANALYZE_REPO; -buildImageTool.name = TOOL_NAME.BUILD_IMAGE; +buildImageContextTool.name = TOOL_NAME.BUILD_IMAGE_CONTEXT; fixDockerfileTool.name = TOOL_NAME.FIX_DOCKERFILE; generateDockerfileTool.name = TOOL_NAME.GENERATE_DOCKERFILE; generateK8sManifestsTool.name = TOOL_NAME.GENERATE_K8S_MANIFESTS; @@ -42,7 +42,7 @@ verifyDeployTool.name = TOOL_NAME.VERIFY_DEPLOY; // Create a union type of all tool types for better type safety export type Tool = ( | typeof analyzeRepoTool - | typeof buildImageTool + | typeof buildImageContextTool | typeof fixDockerfileTool | typeof generateDockerfileTool | typeof generateK8sManifestsTool @@ -64,7 +64,7 @@ export const ALL_TOOLS: readonly Tool[] = [ generateK8sManifestsTool, // Operational/deterministic tools - buildImageTool, + buildImageContextTool, opsTool, prepareClusterTool, pushImageTool, @@ -76,7 +76,7 @@ export const ALL_TOOLS: readonly Tool[] = [ export { TOOL_NAME, analyzeRepoTool, - buildImageTool, + buildImageContextTool, fixDockerfileTool, generateDockerfileTool, generateK8sManifestsTool, diff --git a/src/tools/ops/tool.ts b/src/tools/ops/tool.ts index 9f1260a28..a97b9deb9 100644 --- a/src/tools/ops/tool.ts +++ b/src/tools/ops/tool.ts @@ -11,7 +11,7 @@ * - Server diagnostics and metadata * * **NOT for:** - * - Application containerization (use build-image, etc.) + * - Application containerization (use build-image-context, etc.) * - Docker operations (use Docker tools) * - Kubernetes operations (use K8s tools) * @@ -271,7 +271,8 @@ async function handleOps( return Failure(`Unknown operation: ${input.operation}`, { message: `Unknown operation: ${input.operation}`, hint: 'The requested operation is not supported', - resolution: 'Use one of the supported operations: "ping" for connectivity testing or "status" for server information', + resolution: + 'Use one of the supported operations: "ping" for connectivity testing or "status" for server information', }); } } @@ -283,7 +284,8 @@ import { tool } from '@/types/tool'; export default tool({ name: 'ops', - description: 'MCP server diagnostics: ping for connectivity testing, status for health metrics (memory, CPU, uptime). Use this for server monitoring, not application containerization.', + description: + 'MCP server diagnostics: ping for connectivity testing, status for health metrics (memory, CPU, uptime). Use this for server monitoring, not application containerization.', category: 'utility', version: '2.0.0', schema: opsToolSchema, diff --git a/test/__support__/utilities/mock-factories.ts b/test/__support__/utilities/mock-factories.ts index 23d288e3a..3c0523eff 100644 --- a/test/__support__/utilities/mock-factories.ts +++ b/test/__support__/utilities/mock-factories.ts @@ -1,6 +1,5 @@ import { AnalysisResult, - DockerBuildResult, DockerfileResult, ScanResult, K8sManifestResult, @@ -63,26 +62,6 @@ CMD ["node", "index"]`, }; } -export function createMockDockerBuildResult( - overrides?: Partial, -): DockerBuildResult { - return { - image_id: `sha256:${'a'.repeat(64)}`, - image_tag: 'test-app:latest', - size_bytes: 52428800, // 50MB - layers: [ - { id: `sha256:${'b'.repeat(64)}`, size: 5242880, command: 'FROM node:18-alpine' }, - { id: `sha256:${'c'.repeat(64)}`, size: 1048576, command: 'WORKDIR /app' }, - { id: `sha256:${'d'.repeat(64)}`, size: 41943040, command: 'RUN npm ci' }, - { id: `sha256:${'e'.repeat(64)}`, size: 4194304, command: 'COPY . .' }, - ], - build_duration_ms: 45000, - build_args: {}, - cache_used: true, - ...overrides, - }; -} - export function createMockScanResult(overrides?: Partial): ScanResult { return { scanner: 'trivy', @@ -403,7 +382,7 @@ export function createMockCoreServices(): { return { docker: { - build: jest.fn().mockResolvedValue(createMockDockerBuildResult()), + build: jest.fn().mockResolvedValue({ success: true }), scan: jest.fn().mockResolvedValue(createMockScanResult()), push: jest.fn().mockResolvedValue(undefined), tag: jest.fn().mockResolvedValue(undefined), @@ -442,8 +421,8 @@ export function createMockCoreServices(): { */ export function createMockDockerClient() { return { - build: jest.fn().mockResolvedValue(createMockDockerBuildResult()), - buildImage: jest.fn().mockResolvedValue(createMockDockerBuildResult()), + build: jest.fn().mockResolvedValue({ success: true }), + buildImage: jest.fn().mockResolvedValue({ success: true }), getImage: jest.fn().mockResolvedValue({ ok: true, value: { @@ -1098,15 +1077,7 @@ export function createMockAIService() { export function createMockDockerClientForService() { return { initialize: jest.fn().mockResolvedValue(undefined), - build: jest.fn().mockResolvedValue({ - image_id: 'sha256:mock-build-result', - image_tag: 'test-app:latest', - size_bytes: 52428800, - layers: [], - build_duration_ms: 45000, - build_args: {}, - cache_used: true, - }), + build: jest.fn().mockResolvedValue({ success: true }), scan: jest.fn().mockResolvedValue({ scanner: 'trivy', vulnerabilities: [], @@ -1163,7 +1134,7 @@ export function createMockDockerClientForService() { export function createMockDockerService() { return { initialize: jest.fn().mockResolvedValue(undefined), - buildImage: jest.fn().mockResolvedValue(createMockDockerBuildResult()), + buildImage: jest.fn().mockResolvedValue({ success: true }), scanImage: jest.fn().mockResolvedValue(createMockScanResult()), tagImage: jest.fn().mockResolvedValue(undefined), pushImage: jest.fn().mockResolvedValue(undefined), diff --git a/test/integration/README.md b/test/integration/README.md index 80e6d1fba..33a42f2ad 100644 --- a/test/integration/README.md +++ b/test/integration/README.md @@ -11,7 +11,7 @@ Tests complete containerization workflows by chaining tools together: - **containerization-workflow.test.ts** - Complete containerization workflow tests: - Repository Analysis: analyze-repo tool for Node.js and Python applications - Multi-Module Workflow: Detects and analyzes monorepo structures - - Docker Operations: build-image β†’ tag-image β†’ scan-image with real Docker operations + - Docker Operations: build-image-context β†’ tag-image β†’ scan-image with real Docker operations - Error Handling: Invalid paths, missing files, graceful degradation - Tests use direct tool imports without createApp to avoid ES module issues - NO AI sampling - all operations are deterministic @@ -152,7 +152,7 @@ Some tools require AI sampling (generate-dockerfile, generate-k8s-manifests). Te **Current Test Strategy:** - Tests import tools directly without `createApp` to avoid Kubernetes client import issues - NO AI sampling in integration tests - focus on deterministic operations only -- All tools being tested (analyze-repo, build-image, tag-image, scan-image) are AI-free +- All tools being tested (analyze-repo, build-image-context, tag-image, scan-image) are AI-free - Manual ToolContext creation to avoid transitive imports of Kubernetes client - Environment-aware test skipping (Docker/Trivy availability) - Full end-to-end workflows with AI tools can be tested via: `npm run smoke:journey` diff --git a/test/integration/error-recovery.test.ts b/test/integration/error-recovery.test.ts index 9eb55817e..fc52fd414 100644 --- a/test/integration/error-recovery.test.ts +++ b/test/integration/error-recovery.test.ts @@ -1,18 +1,10 @@ /** * Integration Tests: Error Recovery - * Tests error recovery patterns and resilience without being prescriptive about exact behavior + * Tests error recovery patterns and resilience for build-image context preparation */ import { jest } from '@jest/globals'; -function createSuccessResult(value: T) { - return { ok: true as const, value }; -} - -function createFailureResult(error: string, guidance?: { resolution?: string; hints?: string[] }) { - return { ok: false as const, error, guidance }; -} - function createMockLogger() { return { info: jest.fn(), @@ -29,64 +21,45 @@ function createMockToolContext() { return { logger: createMockLogger() } as any; } -const mockDockerClient = { - buildImage: jest.fn(), - tagImage: jest.fn(), - pushImage: jest.fn(), - ping: jest.fn(), -}; - -const mockK8sClient = { - applyManifest: jest.fn(), - getDeploymentStatus: jest.fn(), - ping: jest.fn(), -}; - -jest.mock('../../src/infra/docker/client', () => ({ - createDockerClient: jest.fn(() => mockDockerClient), -})); - -jest.mock('../../src/infra/kubernetes/client', () => ({ - createKubernetesClient: jest.fn(() => mockK8sClient), -})); - jest.mock('../../src/lib/logger', () => ({ createTimer: jest.fn(() => ({ end: jest.fn(), error: jest.fn() })), createLogger: jest.fn(() => createMockLogger()), })); -jest.mock('../../src/lib/validation', () => ({ - validatePath: jest.fn().mockImplementation(async (pathStr: string) => ({ ok: true, value: pathStr })), - validateImageName: jest.fn().mockImplementation((name: string) => ({ ok: true, value: name })), - validateNamespace: jest.fn().mockImplementation((name: string) => ({ ok: true, value: name })), +jest.mock('../../src/lib/validation-helpers', () => ({ + validatePathOrFail: jest.fn().mockImplementation(async (pathStr: string) => ({ ok: true, value: pathStr })), })); -jest.mock('node:fs', () => ({ - promises: { - access: jest.fn().mockResolvedValue(undefined), - readFile: jest.fn().mockResolvedValue('FROM node:18\nWORKDIR /app'), - writeFile: jest.fn().mockResolvedValue(undefined), - stat: jest.fn().mockResolvedValue({ isFile: () => true, isDirectory: () => false }), - constants: { R_OK: 4, W_OK: 2, X_OK: 1, F_OK: 0 }, - }, - constants: { R_OK: 4, W_OK: 2, X_OK: 1, F_OK: 0 }, +// Mock file-utils which is what the tool actually uses +const mockReadDockerfile = jest.fn<() => Promise<{ ok: true; value: string } | { ok: false; error: string }>>(); + +jest.mock('../../src/lib/file-utils', () => ({ + readDockerfile: mockReadDockerfile, })); -import { buildImage } from '../../src/tools/build-image/tool'; +// Mock fs/promises for path resolution +jest.mock('fs/promises', () => ({ + access: jest.fn().mockResolvedValue(undefined), + stat: jest.fn().mockResolvedValue({ isFile: () => true, isDirectory: () => false }), + readFile: jest.fn(), +})); + +import { buildImageContext } from '../../src/tools/build-image-context/tool'; describe('Error Recovery', () => { + const mockDockerfile = 'FROM node:18-alpine\nWORKDIR /app\nUSER appuser\nCMD ["node", "index.js"]'; + beforeEach(() => { jest.clearAllMocks(); - - mockDockerClient.ping.mockResolvedValue(createSuccessResult(undefined)); + mockReadDockerfile.mockResolvedValue({ ok: true, value: mockDockerfile }); }); describe('Error Handling Pattern', () => { it('should never throw exceptions on errors', async () => { - mockDockerClient.buildImage.mockRejectedValue(new Error('Unexpected error')); + mockReadDockerfile.mockRejectedValue(new Error('Unexpected error')); await expect( - buildImage( + buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ), @@ -94,9 +67,9 @@ describe('Error Recovery', () => { }); it('should return Result on all errors', async () => { - mockDockerClient.buildImage.mockRejectedValue(new Error('Network error')); + mockReadDockerfile.mockResolvedValue({ ok: false, error: 'Permission denied' }); - const result = await buildImage( + const result = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); @@ -109,10 +82,9 @@ describe('Error Recovery', () => { }); it('should propagate errors without losing context', async () => { - const originalError = new Error('Original error message'); - mockDockerClient.buildImage.mockRejectedValue(originalError); + mockReadDockerfile.mockResolvedValue({ ok: false, error: 'Original error message' }); - const result = await buildImage( + const result = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); @@ -121,39 +93,31 @@ describe('Error Recovery', () => { if (!result.ok) { expect(result.error).toBeDefined(); expect(result.error.length).toBeGreaterThan(0); + expect(result.error).toContain('Original error message'); } }); }); describe('Transient Errors', () => { - it('should handle transient network errors', async () => { + it('should handle transient filesystem errors', async () => { let callCount = 0; - mockDockerClient.buildImage.mockImplementation(() => { + mockReadDockerfile.mockImplementation(() => { callCount++; if (callCount === 1) { - return Promise.reject(new Error('ETIMEDOUT')); + return Promise.resolve({ ok: false, error: 'ETIMEOUT' }); } - return Promise.resolve(createSuccessResult({ - imageId: 'sha256:abc', - digest: 'sha256:def', - tags: ['test:latest'], - size: 100000, - layers: 5, - buildTime: 1000, - logs: [], - warnings: [], - })); + return Promise.resolve({ ok: true, value: mockDockerfile }); }); // First call fails - const firstResult = await buildImage( + const firstResult = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); expect(firstResult.ok).toBe(false); // Second call succeeds (simulating retry) - const secondResult = await buildImage( + const secondResult = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); @@ -161,35 +125,31 @@ describe('Error Recovery', () => { expect(callCount).toBe(2); }); - it('should handle service restart scenarios', async () => { - // Test the retry pattern with build-image tool instead - mockDockerClient.buildImage.mockRejectedValueOnce(new Error('Service unavailable')); - const firstResult = await buildImage( + it('should handle filesystem becoming available', async () => { + mockReadDockerfile + .mockResolvedValueOnce({ ok: false, error: 'File not found' }) + .mockResolvedValueOnce({ ok: true, value: mockDockerfile }); + + const firstResult = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); expect(firstResult.ok).toBe(false); - // After service restart - service becomes available again - mockDockerClient.buildImage.mockRejectedValueOnce(new Error('Service unavailable')); - const secondResult = await buildImage( + // File becomes available + const secondResult = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); - // May still fail until service fully restarts - that's ok - expect(secondResult).toHaveProperty('ok'); + expect(secondResult.ok).toBe(true); }); }); describe('Permanent Errors', () => { it('should fail gracefully on permanent errors', async () => { - mockDockerClient.buildImage.mockResolvedValue( - createFailureResult('Dockerfile syntax error', { - resolution: 'Fix the Dockerfile', - }), - ); + mockReadDockerfile.mockResolvedValue({ ok: false, error: 'EACCES: permission denied' }); - const result = await buildImage( + const result = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); @@ -201,12 +161,10 @@ describe('Error Recovery', () => { }); it('should provide error context on permanent failures', async () => { - const permError = new Error('Permission denied'); - (permError as any).code = 'EACCES'; - mockDockerClient.buildImage.mockRejectedValue(permError); + mockReadDockerfile.mockResolvedValue({ ok: false, error: 'Path does not exist' }); - const result = await buildImage( - { path: '/protected', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, + const result = await buildImageContext( + { path: '/nonexistent', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); @@ -219,14 +177,9 @@ describe('Error Recovery', () => { describe('Error Messages', () => { it('should provide meaningful error messages', async () => { - mockDockerClient.buildImage.mockResolvedValue( - createFailureResult('Build failed: unknown instruction COPPY', { - resolution: 'Fix the typo (COPPY β†’ COPY)', - hints: ['Check Dockerfile syntax'], - }), - ); + mockReadDockerfile.mockResolvedValue({ ok: false, error: 'ENOENT: no such file or directory' }); - const result = await buildImage( + const result = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); @@ -234,25 +187,6 @@ describe('Error Recovery', () => { expect(result.ok).toBe(false); if (!result.ok) { expect(result.error.length).toBeGreaterThan(0); - if (result.guidance) { - expect(result.guidance.resolution).toBeDefined(); - } - } - }); - - it('should include relevant context in errors', async () => { - mockDockerClient.buildImage.mockResolvedValue( - createFailureResult('Build failed for image myapp:v1.0'), - ); - - const result = await buildImage( - { path: '/test', dockerfile: 'Dockerfile', imageName: 'myapp:v1.0', tags: [], buildArgs: {} }, - createMockToolContext(), - ); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toBeDefined(); } }); }); @@ -262,8 +196,8 @@ describe('Error Recovery', () => { const errors = ['Error 1', 'Error 2', 'Error 3']; for (const error of errors) { - mockDockerClient.buildImage.mockRejectedValueOnce(new Error(error)); - const result = await buildImage( + mockReadDockerfile.mockResolvedValueOnce({ ok: false, error }); + const result = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); @@ -272,30 +206,36 @@ describe('Error Recovery', () => { }); it('should recover after errors', async () => { - mockDockerClient.buildImage.mockRejectedValueOnce(new Error('Temporary error')); - const failResult = await buildImage( + mockReadDockerfile.mockResolvedValueOnce({ ok: false, error: 'Temporary error' }); + const failResult = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); expect(failResult.ok).toBe(false); - mockDockerClient.buildImage.mockResolvedValueOnce( - createSuccessResult({ - imageId: 'sha256:abc', - digest: 'sha256:def', - tags: ['test:latest'], - size: 100000, - layers: 5, - buildTime: 1000, - logs: [], - warnings: [], - }), - ); - const successResult = await buildImage( + // Now succeed + mockReadDockerfile.mockResolvedValueOnce({ ok: true, value: mockDockerfile }); + const successResult = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); expect(successResult.ok).toBe(true); }); }); + + describe('Success Cases', () => { + it('should succeed with valid Dockerfile', async () => { + const result = await buildImageContext( + { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: ['v1.0.0'], buildArgs: {} }, + createMockToolContext(), + ); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.summary).toBeDefined(); + expect(result.value.context).toBeDefined(); + expect(result.value.nextAction.buildCommand.command).toBeDefined(); + } + }); + }); }); diff --git a/test/integration/infrastructure/docker/client-error-handling.test.ts b/test/integration/infrastructure/docker/client-error-handling.test.ts index 8e752893e..bb8c6dd8f 100644 --- a/test/integration/infrastructure/docker/client-error-handling.test.ts +++ b/test/integration/infrastructure/docker/client-error-handling.test.ts @@ -1,444 +1,132 @@ /** * Docker Client Error Handling Integration Tests * - * These tests verify that the enhanced Docker error handling correctly - * identifies and categorizes real Docker daemon errors with specific, - * actionable error messages. + * These tests verify that the Docker client correctly handles errors + * for image operations (get, tag, push, remove). + * + * Note: Build operations are now handled by the agent executing docker build + * commands directly - see build-image tool for context preparation. * * Prerequisites: - * - Docker daemon must be runnings - * - Network connectivity for registry tests - * - Sufficient disk space for image operations + * - Docker daemon must be running */ import { createDockerClient } from '../../../../src/infra/docker/client'; import { createLogger } from '../../../../src/lib/logger'; -import type { DockerBuildOptions, DockerClient } from '../../../../src/infra/docker/client'; -import { DockerTestCleaner, TEST_IMAGE_NAME } from '../../../__support__/utilities/docker-test-cleaner'; -import { promises as fs } from 'node:fs'; -import path from 'node:path'; -import { createTestTempDir } from '../../../__support__/utilities/tmp-helpers'; -import type { DirResult } from 'tmp'; +import type { DockerClient } from '../../../../src/infra/docker/client'; describe('Docker Client Error Handling Integration Tests', () => { let dockerClient: DockerClient; - let testDir: DirResult; - let cleanup: () => Promise; - let testCleaner: DockerTestCleaner; const logger = createLogger({ level: 'debug' }); - const testTimeout = 60000; // 60 seconds for Docker operations + const testTimeout = 30000; beforeAll(async () => { - // Initialize the test cleaner with verification enabled dockerClient = createDockerClient(logger); - testCleaner = new DockerTestCleaner(logger, dockerClient, { verifyCleanup: true }); - - // Create a temporary directory for test Dockerfiles - const result = createTestTempDir('docker-error-tests-'); - testDir = result.dir; - cleanup = result.cleanup; - - // Wrap buildImage to track successful builds for cleanup - const original = dockerClient.buildImage.bind(dockerClient); - dockerClient.buildImage = async (options: DockerBuildOptions) => { - const result = await original(options); - if (result.ok && result.value.imageId) { - // Only track the actual image ID (SHA256) that was created - testCleaner.trackImage(result.value.imageId); - logger.debug(`Tracking created image: ${result.value.imageId}`); - } - return result; - }; - }); - - afterAll(async () => { - // Clean up all tracked Docker resources - await testCleaner.cleanup(); - - // Clean up test directory - await cleanup(); }); - afterEach(async () => { - // Clean up any test containers after each test - await testCleaner.cleanupContainers(); - }); - - // Helper function to create test Dockerfile - async function createTestDockerfile(content: string, filename = 'Dockerfile'): Promise { - const dockerfilePath = path.join(testDir.name, filename); - await fs.writeFile(dockerfilePath, content, 'utf-8'); - return dockerfilePath; - } - - describe('Network Connectivity Error Detection', () => { - test('should detect registry connectivity issues (ENOTFOUND)', async () => { - await createTestDockerfile( - 'FROM nonexistent-registry.invalid/library/alpine:latest\nRUN echo "test"', - 'Dockerfile.connectivity' - ); - - const invalidRegistryOptions: DockerBuildOptions = { - dockerfile: 'Dockerfile.connectivity', - context: testDir.name, - t: TEST_IMAGE_NAME - }; - - const result = await dockerClient.buildImage(invalidRegistryOptions); + describe('Image Operations Error Detection', () => { + test('should detect errors when getting non-existent images', async () => { + const result = await dockerClient.getImage('nonexistent-image-12345:latest'); expect(result.ok).toBe(false); if (!result.ok) { - // Should detect network connectivity issues with meaningful error message - expect(result.error).toMatch(/network|connectivity|ENOTFOUND|getaddrinfo|connection|no such host/i); - expect(result.error).not.toBe('Build failed: Unknown error'); + expect(result.error).toMatch(/not found|does not exist|404|No such image/i); } }, testTimeout); - test('should detect registry connection refused (ECONNREFUSED)', async () => { - await createTestDockerfile( - 'FROM localhost:9999/library/alpine:latest\nRUN echo "test"', - 'Dockerfile.refused' - ); - - const connectionRefusedOptions: DockerBuildOptions = { - dockerfile: 'Dockerfile.refused', - context: testDir.name, - t: TEST_IMAGE_NAME - }; - - const result = await dockerClient.buildImage(connectionRefusedOptions); + test('should detect errors when inspecting non-existent images', async () => { + const result = await dockerClient.inspectImage('nonexistent-image-12345:latest'); expect(result.ok).toBe(false); if (!result.ok) { - // Should detect connection refused with meaningful error message - expect(result.error).toMatch(/network|connectivity|ECONNREFUSED|connection.*refused|context deadline exceeded|timeout/i); - expect(result.error).not.toBe('Build failed: Unknown error'); + expect(result.error).toMatch(/not found|does not exist|404|No such image/i); } }, testTimeout); - }); - - describe('Registry Authentication Error Detection', () => { - test('should detect authentication failures (401/403)', async () => { - // This test assumes a private registry that requires authentication - // Skip if no test registry is configured - const testRegistry = process.env.TEST_PRIVATE_REGISTRY; - if (!testRegistry) { - console.log('Skipping authentication test - TEST_PRIVATE_REGISTRY not configured'); - return; - } - await createTestDockerfile( - `FROM ${testRegistry}/private/image:latest\nRUN echo "test"`, - 'Dockerfile.auth' - ); - - const authFailureOptions: DockerBuildOptions = { - dockerfile: 'Dockerfile.auth', - context: testDir.name, - t: TEST_IMAGE_NAME - }; - - const result = await dockerClient.buildImage(authFailureOptions); - - expect(result.ok).toBe(false); - if (!result.ok) { - // Should detect authentication issues with meaningful error message - expect(result.error).toMatch(/authentication|unauthorized|access.*denied|401|403/i); - expect(result.error).not.toBe('Build failed: Unknown error'); - } - }, testTimeout); - }); - - describe('Image Not Found Error Detection', () => { - test('should detect missing base images (404)', async () => { - await createTestDockerfile( - 'FROM alpine:nonexistent-tag-12345\nRUN echo "test"', - 'Dockerfile.missing' - ); - - const missingImageOptions: DockerBuildOptions = { - dockerfile: 'Dockerfile.missing', - context: testDir.name, - t: TEST_IMAGE_NAME - }; - - const result = await dockerClient.buildImage(missingImageOptions); + test('should detect errors when tagging non-existent images', async () => { + const result = await dockerClient.tagImage('nonexistent-image-12345:latest', 'new-repo', 'latest'); expect(result.ok).toBe(false); if (!result.ok) { - // Should detect missing images with meaningful error message - expect(result.error).toMatch(/not found|does not exist|404|no such image/i); - expect(result.error).not.toBe('Build failed: Unknown error'); + expect(result.error).toMatch(/not found|does not exist|no such image/i); } }, testTimeout); - test('should detect missing images in non-existent repositories', async () => { - await createTestDockerfile( - 'FROM library/totally-nonexistent-image-name-12345:latest\nRUN echo "test"', - 'Dockerfile.nonexistent' - ); - - const nonExistentRepoOptions: DockerBuildOptions = { - dockerfile: 'Dockerfile.nonexistent', - context: testDir.name, - t: TEST_IMAGE_NAME - }; - - const result = await dockerClient.buildImage(nonExistentRepoOptions); + test('should detect errors when removing non-existent images', async () => { + const result = await dockerClient.removeImage('nonexistent-image-12345:latest'); expect(result.ok).toBe(false); if (!result.ok) { - // Should detect non-existent repositories with meaningful error message - expect(result.error).toMatch(/not found|does not exist|404|no such image/i); - expect(result.error).not.toBe('Build failed: Unknown error'); - } - }, testTimeout); - }); - - describe('Registry Server Error Detection', () => { - test('should detect registry server errors (5xx)', async () => { - // This test would require a registry that returns 5xx errors - // We'll create a scenario that might trigger this by using an overloaded registry - await createTestDockerfile( - 'FROM registry.hub.docker.com/library/alpine:latest\nRUN echo "test"', - 'Dockerfile.server' - ); - - const serverErrorOptions: DockerBuildOptions = { - dockerfile: 'Dockerfile.server', - context: testDir.name, - t: TEST_IMAGE_NAME - }; - - const result = await dockerClient.buildImage(serverErrorOptions); - - // This test might pass if the registry is working correctly - // We mainly want to ensure our error handling can detect 5xx errors when they occur - if (!result.ok && result.error.includes('server error')) { - expect(result.error).toMatch(/server error|internal error|503|502|500/i); - expect(result.error).not.toBe('Build failed: Unknown error'); + expect(result.error).toMatch(/not found|does not exist|no such image/i); } }, testTimeout); }); - describe('Dockerfile Syntax Error Detection', () => { - test('should detect malformed Dockerfile syntax', async () => { - await createTestDockerfile( - 'INVALID_INSTRUCTION this is not valid\nFROM alpine:latest', - 'Dockerfile.syntax' - ); - - const malformedDockerfileOptions: DockerBuildOptions = { - dockerfile: 'Dockerfile.syntax', - context: testDir.name, - t: TEST_IMAGE_NAME - }; - - const result = await dockerClient.buildImage(malformedDockerfileOptions); + describe('Container Operations Error Detection', () => { + test('should detect errors when removing non-existent containers', async () => { + const result = await dockerClient.removeContainer('nonexistent-container-12345'); expect(result.ok).toBe(false); if (!result.ok) { - // Should detect syntax errors with meaningful error message - expect(result.error).toMatch(/syntax|instruction|invalid|unknown instruction/i); - expect(result.error).not.toBe('Build failed: Unknown error'); + expect(result.error).toMatch(/not found|does not exist|no such container/i); } }, testTimeout); - test('should detect missing FROM instruction', async () => { - await createTestDockerfile( - 'RUN echo "test without FROM"', - 'Dockerfile.nofrom' - ); - - const noFromOptions: DockerBuildOptions = { - dockerfile: 'Dockerfile.nofrom', - context: testDir.name, - t: TEST_IMAGE_NAME - }; + test('should successfully list containers', async () => { + const result = await dockerClient.listContainers({ all: false }); - const result = await dockerClient.buildImage(noFromOptions); - - expect(result.ok).toBe(false); - if (!result.ok) { - // Should detect missing FROM instruction with meaningful error message - expect(result.error).toMatch(/FROM|base image|instruction|must begin with|no build stage|context/i); - expect(result.error).not.toBe('Build failed: Unknown error'); + expect(result.ok).toBe(true); + if (result.ok) { + expect(Array.isArray(result.value)).toBe(true); } }, testTimeout); }); - describe('Context Path Error Detection', () => { - test('should detect missing Dockerfile in valid context', async () => { - // Use a valid context but reference a non-existent Dockerfile - const invalidDockerfileOptions: DockerBuildOptions = { - dockerfile: 'NonExistentDockerfile', // This doesn't exist in testDir - context: testDir.name, - t: TEST_IMAGE_NAME - }; - - const result = await dockerClient.buildImage(invalidDockerfileOptions); + describe('Docker Daemon Connectivity', () => { + test('should successfully ping Docker daemon', async () => { + const result = await dockerClient.ping(); - expect(result.ok).toBe(false); - if (!result.ok) { - // Should detect missing Dockerfile with meaningful error message - expect(result.error).toMatch(/dockerfile|not found|ENOENT|no such file|cannot find/i); - expect(result.error).not.toBe('Build failed: Unknown error'); - } + expect(result.ok).toBe(true); }, testTimeout); }); - - describe('Image Operations Error Detection', () => { - test('should detect errors when getting non-existent images', async () => { - const result = await dockerClient.getImage('nonexistent-image:latest'); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toMatch(/not found|does not exist|404/i); - } - }); - - test('should detect errors when tagging non-existent images', async () => { - const result = await dockerClient.tagImage('nonexistent-image:latest', 'new-repo', 'latest'); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toMatch(/not found|does not exist|no such image/i); - } - }); - - test('should detect errors when pushing to unauthorized registries', async () => { - // First create a small test image - await createTestDockerfile( - 'FROM alpine:latest\nRUN echo "test"', - 'Dockerfile.push' + describe('Push Operations Error Detection', () => { + test('should detect errors when pushing non-existent local images', async () => { + // Try to push a non-existent local image + const pushResult = await dockerClient.pushImage( + 'nonexistent-image-for-push-test-12345', + 'latest' ); - const buildResult = await dockerClient.buildImage({ - dockerfile: 'Dockerfile.push', - context: testDir.name, - t: 'test-push-unauthorized' - }); - - if (buildResult.ok) { - // Tag the image with a registry prefix to trigger actual push attempt - const tagResult = await dockerClient.tagImage( - 'test-push-unauthorized:latest', - 'docker.io/unauthorized-test-repo', - 'latest' + expect(pushResult.ok).toBe(false); + if (!pushResult.ok) { + // Should detect image not found error + expect(pushResult.error).toMatch( + /not found|does not exist|no such image|reference does not exist/i ); - - if (tagResult.ok) { - // Try to push to Docker Hub without authentication - const pushResult = await dockerClient.pushImage('docker.io/unauthorized-test-repo', 'latest'); - - expect(pushResult.ok).toBe(false); - if (!pushResult.ok) { - // Should detect authentication issues with meaningful error message - expect(pushResult.error).toMatch(/authentication|unauthorized|access denied|denied|401|403|X-Registry-Auth|bad parameters/i); - expect(pushResult.error).not.toBe('Failed to push image: Unknown error'); - } - } - } - }, testTimeout); - }); - - describe('Build Progress Error Handling', () => { - test('should handle errors in build progress stream', async () => { - await createTestDockerfile(` -FROM alpine:latest -RUN exit 1 -`, 'Dockerfile.progress'); - - const progressErrorOptions: DockerBuildOptions = { - dockerfile: 'Dockerfile.progress', - context: testDir.name, - t: 'test-progress-error' - }; - - const result = await dockerClient.buildImage(progressErrorOptions); - - expect(result.ok).toBe(false); - if (!result.ok) { - // Should detect build command failures with meaningful error message - expect(result.error).toMatch(/build failed|command.*failed|exit.*1|non-zero code/i); - expect(result.error).not.toBe('Build failed: Unknown error'); - } - }, testTimeout); - - test('should capture detailed error information from build steps', async () => { - await createTestDockerfile(` -FROM alpine:latest -RUN echo "Before error" -RUN /nonexistent/command/that/fails -RUN echo "After error" -`, 'Dockerfile.step'); - - const stepErrorOptions: DockerBuildOptions = { - dockerfile: 'Dockerfile.step', - context: testDir.name, - t: 'test-step-error' - }; - - const result = await dockerClient.buildImage(stepErrorOptions); - - expect(result.ok).toBe(false); - if (!result.ok) { - // Should detect step failures with meaningful error message - expect(result.error).toMatch(/nonexistent|command|not found|failed|no such file/i); - expect(result.error).not.toBe('Build failed: Unknown error'); } }, testTimeout); }); describe('Error Message Quality', () => { test('should never return generic "Unknown error" for real Docker failures', async () => { - // Create test Dockerfiles for various error scenarios - await createTestDockerfile('FROM nonexistent-registry.invalid/alpine:latest', 'Dockerfile.unknown1'); - await createTestDockerfile('FROM alpine:nonexistent-tag-12345', 'Dockerfile.unknown2'); - await createTestDockerfile('INVALID_INSTRUCTION\nFROM alpine:latest', 'Dockerfile.unknown3'); - - const testCases: DockerBuildOptions[] = [ - { dockerfile: 'Dockerfile.unknown1', context: testDir, t: 'test-no-unknown-1' }, - { dockerfile: 'Dockerfile.unknown2', context: testDir, t: 'test-no-unknown-2' }, - { dockerfile: 'Dockerfile.unknown3', context: testDir, t: 'test-no-unknown-3' } + const testCases = [ + () => dockerClient.getImage('nonexistent-image-12345:latest'), + () => dockerClient.tagImage('nonexistent-image-12345:latest', 'repo', 'tag'), + () => dockerClient.removeImage('nonexistent-image-12345:latest'), + () => dockerClient.removeContainer('nonexistent-container-12345'), ]; - for (const options of testCases) { - const result = await dockerClient.buildImage(options); + for (const testFn of testCases) { + const result = await testFn(); expect(result.ok).toBe(false); if (!result.ok) { - // Should never return generic "Unknown error" messages - expect(result.error).not.toBe('Build failed: Unknown error'); + expect(result.error).not.toBe('Unknown error'); expect(result.error).not.toContain('Unknown error'); - expect(result.error.length).toBeGreaterThan(20); // Should have meaningful detail + expect(result.error.length).toBeGreaterThan(10); } } - }, testTimeout * 3); // 3 test cases - - test('should provide actionable error messages', async () => { - await createTestDockerfile( - 'FROM nonexistent-registry.invalid/alpine:latest', - 'Dockerfile.actionable' - ); - - const networkErrorOptions: DockerBuildOptions = { - dockerfile: 'Dockerfile.actionable', - context: testDir.name, - t: 'test-actionable-error' - }; - - const result = await dockerClient.buildImage(networkErrorOptions); - - expect(result.ok).toBe(false); - if (!result.ok) { - // Error should be specific and actionable, not generic - // Accept either network-related errors OR disk space errors (both are actionable) - expect(result.error).toMatch(/registry|connectivity|network|dns|not found|pull|manifest|disk space|no space left on device/i); - expect(result.error).not.toBe('Build failed: Unknown error'); - expect(result.error.length).toBeGreaterThan(20); // Should have meaningful detail - } }, testTimeout); }); }); diff --git a/test/integration/workflows/complete-journey.test.ts b/test/integration/workflows/complete-journey.test.ts index 2800290bb..46c669328 100644 --- a/test/integration/workflows/complete-journey.test.ts +++ b/test/integration/workflows/complete-journey.test.ts @@ -2,7 +2,7 @@ * Integration Test: Complete Containerization Journey * * Tests the complete end-to-end containerization workflow: - * analyze-repo β†’ generate-dockerfile β†’ build-image β†’ scan-image β†’ + * analyze-repo β†’ generate-dockerfile β†’ build-image-context β†’ scan-image β†’ * tag-image β†’ generate-k8s-manifests β†’ prepare-cluster β†’ kubectl apply β†’ verify-deploy * * This mirrors the smoke journey test but as a comprehensive integration test @@ -27,7 +27,7 @@ import { createDockerClient } from '@/infra/docker/client'; // Import all tools for complete workflow import analyzeRepoTool from '@/tools/analyze-repo/tool'; import generateDockerfileTool from '@/tools/generate-dockerfile/tool'; -import buildImageTool from '@/tools/build-image/tool'; +import buildImageContextTool from '@/tools/build-image-context/tool'; import scanImageTool from '@/tools/scan-image/tool'; import tagImageTool from '@/tools/tag-image/tool'; import generateK8sManifestsTool from '@/tools/generate-k8s-manifests/tool'; @@ -35,7 +35,8 @@ import prepareClusterTool from '@/tools/prepare-cluster/tool'; import verifyDeployTool from '@/tools/verify-deploy/tool'; import type { RepositoryAnalysis } from '@/tools/analyze-repo/schema'; -import type { BuildImageResult } from '@/tools/build-image/tool'; +import type { BuildImageResult } from '@/tools/build-image-context/schema'; +import { execSync } from 'child_process'; describe('Complete Containerization Journey', () => { let testDir: DirResult; @@ -155,7 +156,7 @@ describe('Complete Containerization Journey', () => { journeyLog.push('Step 3: Building Docker image...'); const imageName = `journey-test-node:${timestamp}`; - const buildResult = await buildImageTool.handler( + const buildResult = await buildImageContextTool.handler( { path: testRepo, dockerfile: dockerfileToUse.replace(testRepo + '/', ''), @@ -166,19 +167,43 @@ describe('Complete Containerization Journey', () => { expect(buildResult.ok).toBe(true); if (!buildResult.ok) { - journeyLog.push(`Build failed: ${buildResult.error}`); + journeyLog.push(`Build preparation failed: ${buildResult.error}`); console.log(journeyLog.join('\n')); return; } const build = buildResult.value as BuildImageResult; - testCleaner.trackImage(build.imageId); - journeyLog.push(`βœ“ Image built: ${build.imageId.substring(0, 12)}`); + + // Execute the build command + let builtImageTag: string | undefined; + try { + execSync(build.nextAction.buildCommand.command, { + cwd: testRepo, + encoding: 'utf-8', + env: { ...process.env, ...build.nextAction.buildCommand.environment }, + stdio: ['pipe', 'pipe', 'pipe'], + }); + builtImageTag = build.buildConfig.finalTags[0]; + if (builtImageTag) { + testCleaner.trackImage(builtImageTag); + } + } catch (error) { + journeyLog.push(`Build execution failed: ${error}`); + console.log(journeyLog.join('\n')); + return; + } + + if (!builtImageTag) { + journeyLog.push('βœ— No image tag available after build'); + console.log(journeyLog.join('\n')); + return; + } + journeyLog.push(`βœ“ Image built: ${builtImageTag}`); // ===== STEP 4: Scan Image ===== journeyLog.push('Step 4: Scanning image for vulnerabilities...'); const scanResult = await scanImageTool.handler( - { imageId: build.imageId }, + { imageId: builtImageTag }, toolContext ); @@ -193,7 +218,7 @@ describe('Complete Containerization Journey', () => { const finalTag = `journey-test-node:v1.0.0`; const tagResult = await tagImageTool.handler( { - imageId: build.imageId, + imageId: builtImageTag, tag: finalTag, }, toolContext @@ -397,7 +422,7 @@ CMD ["python", "app.py"]` journeyLog.push('Step 3: Building Docker image...'); const imageName = `journey-test-python:${timestamp}`; - const buildResult = await buildImageTool.handler( + const buildResult = await buildImageContextTool.handler( { path: testRepo, dockerfile: 'Dockerfile', @@ -408,13 +433,37 @@ CMD ["python", "app.py"]` if (buildResult.ok) { const build = buildResult.value as BuildImageResult; - testCleaner.trackImage(build.imageId); - journeyLog.push(`βœ“ Image built: ${build.imageId.substring(0, 12)}`); + + // Execute the build command + let builtImageTag: string | undefined; + try { + execSync(build.nextAction.buildCommand.command, { + cwd: testRepo, + encoding: 'utf-8', + env: { ...process.env, ...build.nextAction.buildCommand.environment }, + stdio: ['pipe', 'pipe', 'pipe'], + }); + builtImageTag = build.buildConfig.finalTags[0]; + if (builtImageTag) { + testCleaner.trackImage(builtImageTag); + } + } catch (error) { + journeyLog.push(`Build execution failed: ${error}`); + console.log(journeyLog.join('\n')); + return; + } + + if (!builtImageTag) { + journeyLog.push('βœ— No image tag available after build'); + console.log(journeyLog.join('\n')); + return; + } + journeyLog.push(`βœ“ Image built: ${builtImageTag}`); // Tag image const tagResult = await tagImageTool.handler( { - imageId: build.imageId, + imageId: builtImageTag, tag: `journey-test-python:latest`, }, toolContext @@ -480,7 +529,7 @@ CMD ["node", "index.js"]`; } const imageName = `multi-journey-${app.name}:${timestamp}`; - const buildResult = await buildImageTool.handler( + const buildResult = await buildImageContextTool.handler( { path: app.path, dockerfile: 'Dockerfile', @@ -491,8 +540,23 @@ CMD ["node", "index.js"]`; if (buildResult.ok) { const build = buildResult.value as BuildImageResult; - testCleaner.trackImage(build.imageId); - results.push({ name: app.name, success: true }); + + // Execute the build command + try { + execSync(build.nextAction.buildCommand.command, { + cwd: app.path, + encoding: 'utf-8', + env: { ...process.env, ...build.nextAction.buildCommand.environment }, + stdio: ['pipe', 'pipe', 'pipe'], + }); + const builtImageTag = build.buildConfig.finalTags[0]; + if (builtImageTag) { + testCleaner.trackImage(builtImageTag); + } + results.push({ name: app.name, success: true }); + } catch { + results.push({ name: app.name, success: false }); + } } else { results.push({ name: app.name, success: false }); } @@ -526,7 +590,7 @@ CMD ["node", "index.js"]`; // Test 2: Invalid Dockerfile journeyLog.push('Test 2: Invalid Dockerfile path'); - const buildResult = await buildImageTool.handler( + const buildResult = await buildImageContextTool.handler( { dockerfilePath: '/nonexistent/Dockerfile', context: testDir.name, @@ -608,7 +672,7 @@ CMD ["node", "index.js"]`; } const imageName = `perf-test:${Date.now()}`; - const buildResult = await buildImageTool.handler( + const buildResult = await buildImageContextTool.handler( { path: testRepo, dockerfile: 'Dockerfile', @@ -619,16 +683,33 @@ CMD ["node", "index.js"]`; if (buildResult.ok) { const build = buildResult.value as BuildImageResult; - testCleaner.trackImage(build.imageId); - - await tagImageTool.handler( - { - imageId: build.imageId, - tag: 'perf-test:latest', - }, - toolContext - ); + + // Execute the build command + let builtImageTag: string | undefined; + try { + execSync(build.nextAction.buildCommand.command, { + cwd: testRepo, + encoding: 'utf-8', + env: { ...process.env, ...build.nextAction.buildCommand.environment }, + stdio: ['pipe', 'pipe', 'pipe'], + }); + builtImageTag = build.buildConfig.finalTags[0]; + if (builtImageTag) { + testCleaner.trackImage(builtImageTag); + } + } catch { + // Build execution failed, skip tagging + } + if (builtImageTag) { + await tagImageTool.handler( + { + imageId: builtImageTag, + tag: 'perf-test:latest', + }, + toolContext + ); + } } const duration = Date.now() - startTime; diff --git a/test/integration/workflows/containerization-workflow.test.ts b/test/integration/workflows/containerization-workflow.test.ts index 8d53329fb..7b10f9750 100644 --- a/test/integration/workflows/containerization-workflow.test.ts +++ b/test/integration/workflows/containerization-workflow.test.ts @@ -2,7 +2,7 @@ * Integration Test: Complete Containerization Workflow * * Tests the entire containerization journey by chaining tools together: - * analyze-repo β†’ generate-dockerfile β†’ build-image β†’ scan-image β†’ + * analyze-repo β†’ generate-dockerfile β†’ build-image-context β†’ scan-image β†’ * tag-image β†’ generate-k8s-manifests * * Prerequisites: @@ -22,7 +22,8 @@ import { createDockerClient } from '@/infra/docker/client'; // Import tools directly to avoid createApp dependency import analyzeRepoTool from '@/tools/analyze-repo/tool'; -import buildImageTool, { type BuildImageResult } from '@/tools/build-image/tool'; +import buildImageContextTool from '@/tools/build-image-context/tool'; +import type { BuildImageResult } from '@/tools/build-image-context/schema'; import tagImageTool from '@/tools/tag-image/tool'; import scanImageTool from '@/tools/scan-image/tool'; @@ -174,7 +175,7 @@ describe('Complete Containerization Workflow Integration', () => { }); describe('Docker Operations Integration', () => { - it('should build, tag, and scan image with existing Dockerfile', async () => { + it('should prepare build context, execute build, tag, and scan image', async () => { if (!dockerAvailable) { console.log('Skipping: Docker not available'); return; @@ -204,43 +205,84 @@ COPY index.js ./ CMD ["node", "index.js"]` ); - // Build image - const imageName = `docker-ops-test:${Date.now()}`; - const buildResult = await buildImageTool.handler({ + // Prepare build context (build-image-context returns context, not execution) + const imageName = `docker-ops-test-${Date.now()}`; + const buildResult = await buildImageContextTool.handler({ path: appPath, dockerfile: 'Dockerfile', imageName, + tags: ['latest'], }, toolContext); - if (buildResult.ok) { - const build = buildResult.value as BuildImageResult; - expect(build.imageId).toBeDefined(); - expect(build.createdTags).toContain(imageName); - testCleaner.trackImage(build.imageId); - - // Tag image - const tagResult = await tagImageTool.handler({ - imageId: build.imageId, - tag: `docker-ops-test:latest`, - }, toolContext); + expect(buildResult.ok).toBe(true); + if (!buildResult.ok) { + console.log('Build preparation failed:', buildResult.error); + return; + } - if (tagResult.ok) { - expect(tagResult.value).toBeDefined(); + const build = buildResult.value as BuildImageResult; + + // Validate new result structure + expect(build.summary).toBeDefined(); + expect(build.context.buildContextPath).toBe(appPath); + expect(build.context.dockerfilePath).toContain('Dockerfile'); + expect(build.nextAction.buildCommand.command).toBeDefined(); + expect(build.buildConfig.finalTags.length).toBeGreaterThan(0); + + // Execute the actual build using the command provided + const { execSync } = await import('child_process'); + let builtImageId: string | undefined; + + try { + // Execute the build command returned by the tool + const output = execSync(build.nextAction.buildCommand.command, { + cwd: appPath, + encoding: 'utf-8', + env: { ...process.env, ...build.nextAction.buildCommand.environment }, + stdio: ['pipe', 'pipe', 'pipe'], + }); + + // Extract image ID from build output + const idMatch = output.match(/Successfully built ([a-f0-9]+)/i) || + output.match(/writing image sha256:([a-f0-9]+)/i); + if (idMatch) { + builtImageId = idMatch[1]; + testCleaner.trackImage(builtImageId); } - - // Scan image - const scanResult = await scanImageTool.handler({ - imageId: build.imageId, - }, toolContext); - - // Scan may fail if Trivy not installed - that's OK - if (!scanResult.ok) { - console.log('Scan skipped (Trivy may not be installed)'); - } else { - expect(scanResult.value).toBeDefined(); + + // Get the tagged image reference + const taggedImage = build.buildConfig.finalTags[0]; + if (taggedImage) { + testCleaner.trackImage(taggedImage); + } + + // Tag image with an additional tag + if (taggedImage) { + const tagResult = await tagImageTool.handler({ + imageId: taggedImage, + tag: `${imageName}:v1.0.0`, + }, toolContext); + + if (tagResult.ok) { + expect(tagResult.value).toBeDefined(); + testCleaner.trackImage(`${imageName}:v1.0.0`); + } + + // Scan image + const scanResult = await scanImageTool.handler({ + imageId: taggedImage, + }, toolContext); + + // Scan may fail if Trivy not installed - that's OK + if (!scanResult.ok) { + console.log('Scan skipped (Trivy may not be installed)'); + } else { + expect(scanResult.value).toBeDefined(); + } } - } else { - console.log('Build failed:', buildResult.error); + } catch (error) { + // Build execution failed - this is still valid for testing context preparation + console.log('Build execution failed (testing context preparation only):', error); } }, testTimeout); }); @@ -259,7 +301,7 @@ CMD ["node", "index.js"]` }); it('should handle missing Dockerfile in build step', async () => { - const result = await buildImageTool.handler({ + const result = await buildImageContextTool.handler({ dockerfilePath: '/nonexistent/Dockerfile', context: testDir.name, imageName: 'test:latest', diff --git a/test/integration/workflows/docker-workflow.test.ts b/test/integration/workflows/docker-workflow.test.ts index 89fd86278..22aeee516 100644 --- a/test/integration/workflows/docker-workflow.test.ts +++ b/test/integration/workflows/docker-workflow.test.ts @@ -2,7 +2,7 @@ * Integration Test: Docker Workflow * * Tests the complete Docker containerization workflow: - * analyze-repo β†’ generate-dockerfile β†’ build-image β†’ scan-image β†’ tag-image + * analyze-repo β†’ generate-dockerfile β†’ build-image-context β†’ scan-image β†’ tag-image * * Prerequisites: * - Docker daemon running @@ -20,13 +20,14 @@ import { createDockerClient } from '@/infra/docker/client'; // Import tools import analyzeRepoTool from '@/tools/analyze-repo/tool'; import generateDockerfileTool from '@/tools/generate-dockerfile/tool'; -import buildImageTool from '@/tools/build-image/tool'; +import buildImageContextTool from '@/tools/build-image-context/tool'; import scanImageTool from '@/tools/scan-image/tool'; import tagImageTool from '@/tools/tag-image/tool'; import type { RepositoryAnalysis } from '@/tools/analyze-repo/schema'; import type { GenerateDockerfileResult } from '@/tools/generate-dockerfile/schema'; -import type { BuildImageResult } from '@/tools/build-image/tool'; +import type { BuildImageResult } from '@/tools/build-image-context/schema'; +import { execSync } from 'child_process'; describe('Docker Workflow Integration', () => { let testCleaner: DockerTestCleaner; @@ -61,226 +62,271 @@ describe('Docker Workflow Integration', () => { }); describe('Complete Docker Workflow', () => { - it('should complete analyze β†’ generate β†’ build β†’ scan β†’ tag workflow for Node.js app', async () => { - if (!dockerAvailable) { - console.log('Skipping: Docker not available'); - return; - } - - const testRepo = join(fixtureBasePath, 'node-express'); - - if (!existsSync(testRepo) || !existsSync(join(testRepo, 'package.json'))) { - console.log('Skipping: node-express fixture not found'); - return; - } - - // Step 1: Analyze repository - const analyzeResult = await analyzeRepoTool.handler( - { repositoryPath: testRepo }, - toolContext - ); - - expect(analyzeResult.ok).toBe(true); - if (!analyzeResult.ok) { - console.log('Analysis failed:', analyzeResult.error); - return; - } - - const analysis = analyzeResult.value as RepositoryAnalysis; - expect(analysis.modules).toBeDefined(); - expect(analysis.modules.length).toBeGreaterThan(0); - expect(analysis.modules[0].language).toBe('javascript'); - - // Step 2: Generate Dockerfile (AI-based, may fail if API unavailable) - const dockerfilePath = join(testRepo, 'Dockerfile.test'); - const generateResult = await generateDockerfileTool.handler( - { - repositoryPath: testRepo, - analysis: JSON.stringify(analysis), - outputPath: dockerfilePath, - targetPlatform: 'linux/amd64', - }, - toolContext - ); - - // If Dockerfile generation fails (AI not available), use existing fixture - let dockerfileToUse = join(testRepo, 'Dockerfile'); - if (generateResult.ok) { - const dockerfile = generateResult.value as GenerateDockerfileResult; - if (dockerfile && existsSync(dockerfilePath)) { - dockerfileToUse = dockerfilePath; + it( + 'should complete analyze β†’ generate β†’ build β†’ scan β†’ tag workflow for Node.js app', + async () => { + if (!dockerAvailable) { + console.log('Skipping: Docker not available'); + return; } - } else { - console.log('Dockerfile generation skipped (AI unavailable), using fixture'); - } - if (!existsSync(dockerfileToUse)) { - console.log('No Dockerfile available for build step'); - return; - } - - // Step 3: Build image - const imageName = `docker-workflow-test:${Date.now()}`; - const buildResult = await buildImageTool.handler( - { - path: testRepo, - dockerfile: dockerfileToUse.replace(testRepo + '/', ''), - imageName, - }, - toolContext - ); + const testRepo = join(fixtureBasePath, 'node-express'); - expect(buildResult.ok).toBe(true); - if (!buildResult.ok) { - console.log('Build failed:', buildResult.error); - return; - } - - const build = buildResult.value as BuildImageResult; - expect(build.imageId).toBeDefined(); - expect(build.createdTags).toContain(imageName); - testCleaner.trackImage(build.imageId); + if (!existsSync(testRepo) || !existsSync(join(testRepo, 'package.json'))) { + console.log('Skipping: node-express fixture not found'); + return; + } - // Step 4: Scan image (may fail if Trivy not installed) - const scanResult = await scanImageTool.handler( - { imageId: build.imageId }, - toolContext - ); + // Step 1: Analyze repository + const analyzeResult = await analyzeRepoTool.handler( + { repositoryPath: testRepo }, + toolContext, + ); - if (!scanResult.ok) { - console.log('Scan skipped (Trivy may not be installed)'); - } else { - expect(scanResult.value).toBeDefined(); - } + expect(analyzeResult.ok).toBe(true); + if (!analyzeResult.ok) { + console.log('Analysis failed:', analyzeResult.error); + return; + } - // Step 5: Tag image (with retry for potential race condition after build) - const newTag = `docker-workflow-test:v1.0`; - let tagResult; - const maxRetries = 3; + const analysis = analyzeResult.value as RepositoryAnalysis; + expect(analysis.modules).toBeDefined(); + expect(analysis.modules.length).toBeGreaterThan(0); + expect(analysis.modules[0].language).toBe('javascript'); - for (let attempt = 0; attempt < maxRetries; attempt++) { - tagResult = await tagImageTool.handler( + // Step 2: Generate Dockerfile (AI-based, may fail if API unavailable) + const dockerfilePath = join(testRepo, 'Dockerfile.test'); + const generateResult = await generateDockerfileTool.handler( { - imageId: build.imageId, - tag: newTag, + repositoryPath: testRepo, + analysis: JSON.stringify(analysis), + outputPath: dockerfilePath, + targetPlatform: 'linux/amd64', }, - toolContext + toolContext, ); - if (tagResult.ok) { - break; + // If Dockerfile generation fails (AI not available), use existing fixture + let dockerfileToUse = join(testRepo, 'Dockerfile'); + if (generateResult.ok) { + const dockerfile = generateResult.value as GenerateDockerfileResult; + if (dockerfile && existsSync(dockerfilePath)) { + dockerfileToUse = dockerfilePath; + } + } else { + console.log('Dockerfile generation skipped (AI unavailable), using fixture'); } - // Log error details for debugging - console.log(`Tag attempt ${attempt + 1} failed:`, { - error: tagResult.error, - guidance: tagResult.guidance, - imageId: build.imageId, - tag: newTag, - }); - - // If tag failed and we have retries left, wait briefly and retry - if (attempt < maxRetries - 1) { - await new Promise((resolve) => setTimeout(resolve, 100 * Math.pow(2, attempt))); + if (!existsSync(dockerfileToUse)) { + console.log('No Dockerfile available for build step'); + return; } - } - - expect(tagResult?.ok).toBe(true); - if (tagResult?.ok) { - expect(tagResult.value).toBeDefined(); - } else { - console.log('All tag attempts failed. Final result:', tagResult); - } - }, testTimeout); - it('should complete workflow for Python Flask app', async () => { - if (!dockerAvailable) { - console.log('Skipping: Docker not available'); - return; - } - - const testRepo = join(fixtureBasePath, 'python-flask'); - - if (!existsSync(testRepo)) { - console.log('Skipping: python-flask fixture not found'); - return; - } - - // Step 1: Analyze - const analyzeResult = await analyzeRepoTool.handler( - { repositoryPath: testRepo }, - toolContext - ); - - if (!analyzeResult.ok) { - console.log('Analysis failed:', analyzeResult.error); - return; - } + // Step 3: Build image - now returns context preparation, we execute the command + const imageName = `docker-workflow-test:${Date.now()}`; + const buildResult = await buildImageContextTool.handler( + { + path: testRepo, + dockerfile: dockerfileToUse.replace(testRepo + '/', ''), + imageName, + }, + toolContext, + ); - const analysis = analyzeResult.value as RepositoryAnalysis; - expect(analysis.modules).toBeDefined(); + expect(buildResult.ok).toBe(true); + if (!buildResult.ok) { + console.log('Build preparation failed:', buildResult.error); + return; + } - // Step 2: Generate or use existing Dockerfile - const dockerfilePath = join(testRepo, 'Dockerfile'); + const build = buildResult.value as BuildImageResult; + expect(build.summary).toBeDefined(); + expect(build.nextAction.buildCommand.command).toBeDefined(); + + // Execute the build command + let builtImageTag: string | undefined; + try { + execSync(build.nextAction.buildCommand.command, { + cwd: testRepo, + encoding: 'utf-8', + env: { ...process.env, ...build.nextAction.buildCommand.environment }, + stdio: ['pipe', 'pipe', 'pipe'], + }); + builtImageTag = build.buildConfig.finalTags[0]; + if (builtImageTag) { + testCleaner.trackImage(builtImageTag); + } + } catch (error) { + console.log('Build execution failed:', error); + return; + } - // Create a simple test Dockerfile if none exists - if (!existsSync(dockerfilePath)) { - writeFileSync( - dockerfilePath, - `FROM python:3.11-slim -WORKDIR /app -COPY requirements.txt* ./ -RUN pip install --no-cache-dir -r requirements.txt 2>/dev/null || true -COPY . . -EXPOSE 5000 -CMD ["python", "app.py"]` - ); - } + if (!builtImageTag) { + console.log('No image tag available after build'); + return; + } - // Step 3: Build image - const imageName = `docker-workflow-python:${Date.now()}`; - const buildResult = await buildImageTool.handler( - { - path: testRepo, - dockerfile: 'Dockerfile', - imageName, - }, - toolContext - ); + // Step 4: Scan image (may fail if Trivy not installed) + const scanResult = await scanImageTool.handler({ imageId: builtImageTag }, toolContext); - if (buildResult.ok) { - const build = buildResult.value as BuildImageResult; - testCleaner.trackImage(build.imageId); + if (!scanResult.ok) { + console.log('Scan skipped (Trivy may not be installed)'); + } else { + expect(scanResult.value).toBeDefined(); + } - // Step 4: Tag the image (with retry for potential race condition) + // Step 5: Tag image (with retry for potential race condition after build) + const newTag = `docker-workflow-test:v1.0`; let tagResult; const maxRetries = 3; for (let attempt = 0; attempt < maxRetries; attempt++) { tagResult = await tagImageTool.handler( { - imageId: build.imageId, - tag: `docker-workflow-python:latest`, + imageId: builtImageTag, + tag: newTag, }, - toolContext + toolContext, ); if (tagResult.ok) { break; } + // Log error details for debugging + console.log(`Tag attempt ${attempt + 1} failed:`, { + error: tagResult.error, + guidance: tagResult.guidance, + imageId: builtImageTag, + }); + + // If tag failed and we have retries left, wait briefly and retry if (attempt < maxRetries - 1) { - console.log(`Tag attempt ${attempt + 1} failed, retrying...`); await new Promise((resolve) => setTimeout(resolve, 100 * Math.pow(2, attempt))); } } - if (!tagResult.ok) { - console.log('Tagging failed:', tagResult.error); + expect(tagResult?.ok).toBe(true); + if (tagResult?.ok) { + expect(tagResult.value).toBeDefined(); + } else { + console.log('All tag attempts failed. Final result:', tagResult); + } + }, + testTimeout, + ); + + it( + 'should complete workflow for Python Flask app', + async () => { + if (!dockerAvailable) { + console.log('Skipping: Docker not available'); return; } - } - }, testTimeout); + + const testRepo = join(fixtureBasePath, 'python-flask'); + + if (!existsSync(testRepo)) { + console.log('Skipping: python-flask fixture not found'); + return; + } + + // Step 1: Analyze + const analyzeResult = await analyzeRepoTool.handler( + { repositoryPath: testRepo }, + toolContext, + ); + + if (!analyzeResult.ok) { + console.log('Analysis failed:', analyzeResult.error); + return; + } + + const analysis = analyzeResult.value as RepositoryAnalysis; + expect(analysis.modules).toBeDefined(); + + // Step 2: Generate or use existing Dockerfile + const dockerfilePath = join(testRepo, 'Dockerfile'); + + // Create a simple test Dockerfile if none exists + if (!existsSync(dockerfilePath)) { + writeFileSync( + dockerfilePath, + `FROM python:3.11-slim +WORKDIR /app +COPY requirements.txt* ./ +RUN pip install --no-cache-dir -r requirements.txt 2>/dev/null || true +COPY . . +EXPOSE 5000 +CMD ["python", "app.py"]`, + ); + } + + // Step 3: Build image + const imageName = `docker-workflow-python:${Date.now()}`; + const buildResult = await buildImageContextTool.handler( + { + path: testRepo, + dockerfile: 'Dockerfile', + imageName, + }, + toolContext, + ); + + if (buildResult.ok) { + const build = buildResult.value as BuildImageResult; + + // Execute build command + let builtImageTag: string | undefined; + try { + execSync(build.nextAction.buildCommand.command, { + cwd: testRepo, + encoding: 'utf-8', + env: { ...process.env, ...build.nextAction.buildCommand.environment }, + stdio: ['pipe', 'pipe', 'pipe'], + }); + builtImageTag = build.buildConfig.finalTags[0]; + if (builtImageTag) { + testCleaner.trackImage(builtImageTag); + } + } catch (error) { + console.log('Build execution failed:', error); + return; + } + + if (builtImageTag) { + // Step 4: Tag the image (with retry for potential race condition) + let tagResult; + const maxRetries = 3; + + for (let attempt = 0; attempt < maxRetries; attempt++) { + tagResult = await tagImageTool.handler( + { + imageId: builtImageTag, + tag: `docker-workflow-python:latest`, + }, + toolContext, + ); + + if (tagResult.ok) { + break; + } + + if (attempt < maxRetries - 1) { + console.log(`Tag attempt ${attempt + 1} failed, retrying...`); + await new Promise((resolve) => setTimeout(resolve, 100 * Math.pow(2, attempt))); + } + } + + if (!tagResult?.ok) { + console.log('Tagging failed:', tagResult?.error); + return; + } + } + } + }, + testTimeout, + ); }); describe('Workflow Error Handling', () => { @@ -288,7 +334,7 @@ CMD ["python", "app.py"]` // Test error in middle of workflow - invalid repository const analyzeResult = await analyzeRepoTool.handler( { repositoryPath: '/nonexistent/path' }, - toolContext + toolContext, ); expect(analyzeResult.ok).toBe(false); @@ -298,13 +344,13 @@ CMD ["python", "app.py"]` } // Test build error - invalid Dockerfile - const buildResult = await buildImageTool.handler( + const buildResult = await buildImageContextTool.handler( { dockerfilePath: '/nonexistent/Dockerfile', context: fixtureBasePath, imageName: 'test:invalid', }, - toolContext + toolContext, ); expect(buildResult.ok).toBe(false); @@ -323,7 +369,7 @@ CMD ["python", "app.py"]` const analyzeResult = await analyzeRepoTool.handler( { repositoryPath: testRepo }, - toolContext + toolContext, ); if (analyzeResult.ok) { @@ -335,136 +381,167 @@ CMD ["python", "app.py"]` }); describe('Partial Workflow Scenarios', () => { - it('should support build β†’ scan β†’ tag without analysis', async () => { - if (!dockerAvailable) { - console.log('Skipping: Docker not available'); - return; - } - - const testRepo = join(fixtureBasePath, 'node-express'); - const dockerfilePath = join(testRepo, 'Dockerfile'); - - if (!existsSync(testRepo) || !existsSync(dockerfilePath)) { - console.log('Skipping: fixture not found'); - return; - } + it( + 'should support build β†’ scan β†’ tag without analysis', + async () => { + if (!dockerAvailable) { + console.log('Skipping: Docker not available'); + return; + } - // Start directly with build - const imageName = `partial-workflow-test:${Date.now()}`; - const buildResult = await buildImageTool.handler( - { - path: testRepo, - dockerfile: 'Dockerfile', - imageName, - }, - toolContext - ); + const testRepo = join(fixtureBasePath, 'node-express'); + const dockerfilePath = join(testRepo, 'Dockerfile'); - if (buildResult.ok) { - const build = buildResult.value as BuildImageResult; - testCleaner.trackImage(build.imageId); + if (!existsSync(testRepo) || !existsSync(dockerfilePath)) { + console.log('Skipping: fixture not found'); + return; + } - // Continue with scan and tag - const scanResult = await scanImageTool.handler( - { imageId: build.imageId }, - toolContext + // Start directly with build + const imageName = `partial-workflow-test:${Date.now()}`; + const buildResult = await buildImageContextTool.handler( + { + path: testRepo, + dockerfile: 'Dockerfile', + imageName, + }, + toolContext, ); - // Scan may fail if Trivy not available - that's OK - expect(scanResult.ok !== undefined).toBe(true); - - // Tag with retry for potential race condition - let tagResult; - const maxRetries = 3; - - for (let attempt = 0; attempt < maxRetries; attempt++) { - tagResult = await tagImageTool.handler( - { - imageId: build.imageId, - tag: `partial-workflow-test:latest`, - }, - toolContext - ); - - if (tagResult.ok) { - break; + if (buildResult.ok) { + const build = buildResult.value as BuildImageResult; + + // Execute build command + let builtImageTag: string | undefined; + try { + execSync(build.nextAction.buildCommand.command, { + cwd: testRepo, + encoding: 'utf-8', + env: { ...process.env, ...build.nextAction.buildCommand.environment }, + stdio: ['pipe', 'pipe', 'pipe'], + }); + builtImageTag = build.buildConfig.finalTags[0]; + if (builtImageTag) { + testCleaner.trackImage(builtImageTag); + } + } catch (error) { + console.log('Build execution failed:', error); + return; } - if (attempt < maxRetries - 1) { - console.log(`Tag attempt ${attempt + 1} failed, retrying...`); - await new Promise((resolve) => setTimeout(resolve, 100 * Math.pow(2, attempt))); + if (builtImageTag) { + // Continue with scan and tag + const scanResult = await scanImageTool.handler({ imageId: builtImageTag }, toolContext); + + // Scan may fail if Trivy not available - that's OK + expect(scanResult.ok !== undefined).toBe(true); + + // Tag with retry for potential race condition + let tagResult; + const maxRetries = 3; + + for (let attempt = 0; attempt < maxRetries; attempt++) { + tagResult = await tagImageTool.handler( + { + imageId: builtImageTag, + tag: `partial-workflow-test:latest`, + }, + toolContext, + ); + + if (tagResult.ok) { + break; + } + + if (attempt < maxRetries - 1) { + console.log(`Tag attempt ${attempt + 1} failed, retrying...`); + await new Promise((resolve) => setTimeout(resolve, 100 * Math.pow(2, attempt))); + } + } + + if (!tagResult?.ok) { + console.log('Tagging failed:', tagResult?.error); + return; + } } } - - if (!tagResult.ok) { - console.log('Tagging failed:', tagResult.error); - return; - } - } - }, testTimeout); + }, + testTimeout, + ); }); describe('Concurrent Docker Operations', () => { // TODO: This test has an intermittent issue where tool handlers return undefined // Other concurrent operation tests in complete-journey.test.ts provide coverage - it.skip('should handle concurrent builds of different apps', async () => { - if (!dockerAvailable) { - console.log('Skipping: Docker not available'); - return; - } + it.skip( + 'should handle concurrent builds of different apps', + async () => { + if (!dockerAvailable) { + console.log('Skipping: Docker not available'); + return; + } - const nodeRepo = join(fixtureBasePath, 'node-express'); - const pythonRepo = join(fixtureBasePath, 'python-flask'); + const nodeRepo = join(fixtureBasePath, 'node-express'); + const pythonRepo = join(fixtureBasePath, 'python-flask'); - if (!existsSync(nodeRepo) || !existsSync(pythonRepo)) { - console.log('Skipping: fixtures not available'); - return; - } + if (!existsSync(nodeRepo) || !existsSync(pythonRepo)) { + console.log('Skipping: fixtures not available'); + return; + } - // Ensure both have Dockerfiles - const nodeDockerfile = join(nodeRepo, 'Dockerfile'); - const pythonDockerfile = join(pythonRepo, 'Dockerfile'); + // Ensure both have Dockerfiles + const nodeDockerfile = join(nodeRepo, 'Dockerfile'); + const pythonDockerfile = join(pythonRepo, 'Dockerfile'); - // Require both Dockerfiles to exist for this test - if (!existsSync(nodeDockerfile) || !existsSync(pythonDockerfile)) { - console.log('Skipping: Both Dockerfiles required for concurrent test'); - return; - } + // Require both Dockerfiles to exist for this test + if (!existsSync(nodeDockerfile) || !existsSync(pythonDockerfile)) { + console.log('Skipping: Both Dockerfiles required for concurrent test'); + return; + } - // Build both concurrently (Docker handles this well) - const timestamp = Date.now(); - const [nodeResult, pythonResult] = await Promise.all([ - buildImageTool.handler( - { - path: nodeRepo, - dockerfile: 'Dockerfile', - imageName: `concurrent-node:${timestamp}`, - }, - toolContext - ), - buildImageTool.handler( - { - path: pythonRepo, - dockerfile: 'Dockerfile', - imageName: `concurrent-python:${timestamp}`, - }, - toolContext - ), - ]); - - // Track images for cleanup - if (nodeResult?.ok) { - const build = nodeResult.value as BuildImageResult; - testCleaner.trackImage(build.imageId); - } + // Build both concurrently (Docker handles this well) + const timestamp = Date.now(); + const [nodeResult, pythonResult] = await Promise.all([ + buildImageContextTool.handler( + { + path: nodeRepo, + dockerfile: 'Dockerfile', + imageName: `concurrent-node:${timestamp}`, + }, + toolContext, + ), + buildImageContextTool.handler( + { + path: pythonRepo, + dockerfile: 'Dockerfile', + imageName: `concurrent-python:${timestamp}`, + }, + toolContext, + ), + ]); + + // Track images for cleanup (these are now context-providing, but the test is skipped) + // After executing builds, we would track the built tags + if (nodeResult?.ok) { + const build = nodeResult.value as BuildImageResult; + const builtTag = build.buildConfig.finalTags[0]; + if (builtTag) { + testCleaner.trackImage(builtTag); + } + } - if (pythonResult?.ok) { - const build = pythonResult.value as BuildImageResult; - testCleaner.trackImage(build.imageId); - } + if (pythonResult?.ok) { + const build = pythonResult.value as BuildImageResult; + const builtTag = build.buildConfig.finalTags[0]; + if (builtTag) { + testCleaner.trackImage(builtTag); + } + } - // At least one should succeed (both should if Docker is healthy) - expect(nodeResult?.ok || pythonResult?.ok).toBe(true); - }, testTimeout * 2); + // At least one should succeed (both should if Docker is healthy) + expect(nodeResult?.ok || pythonResult?.ok).toBe(true); + }, + testTimeout * 2, + ); }); }); diff --git a/test/unit/error-scenarios.test.ts b/test/unit/error-scenarios.test.ts index 3fce7e35a..3ba0db6cb 100644 --- a/test/unit/error-scenarios.test.ts +++ b/test/unit/error-scenarios.test.ts @@ -22,7 +22,7 @@ import * as scannerModule from '@/infra/security/scanner'; // Import tools import analyzeRepoTool from '@/tools/analyze-repo/tool'; import generateDockerfileTool from '@/tools/generate-dockerfile/tool'; -import buildImageTool from '@/tools/build-image/tool'; +import buildImageContextTool from '@/tools/build-image-context/tool'; import scanImageTool from '@/tools/scan-image/tool'; import tagImageTool from '@/tools/tag-image/tool'; import pushImageTool from '@/tools/push-image/tool'; @@ -86,7 +86,7 @@ describe('Error Scenario Coverage', () => { it('should reject analyze-repo with invalid path', async () => { const result = await analyzeRepoTool.handler( { repositoryPath: '/absolutely/nonexistent/path/12345' }, - toolContext + toolContext, ); expect(result.ok).toBe(false); @@ -99,7 +99,7 @@ describe('Error Scenario Coverage', () => { it('should reject generate-dockerfile with empty repository path', async () => { const result = await generateDockerfileTool.handler( { repositoryPath: '', environment: 'production', targetPlatform: 'linux/amd64' }, - toolContext + toolContext, ); expect(result.ok).toBe(false); @@ -109,13 +109,13 @@ describe('Error Scenario Coverage', () => { }); it('should reject build-image with missing Dockerfile', async () => { - const result = await buildImageTool.handler( + const result = await buildImageContextTool.handler( { dockerfilePath: '/nonexistent/Dockerfile', context: '/tmp', imageName: 'test:latest', }, - toolContext + toolContext, ); expect(result.ok).toBe(false); @@ -130,7 +130,7 @@ describe('Error Scenario Coverage', () => { imageId: 'totally-invalid-image-id-12345', tag: 'test:v1', }, - toolContext + toolContext, ); expect(result.ok).toBe(false); @@ -142,7 +142,7 @@ describe('Error Scenario Coverage', () => { it('should reject scan-image with nonexistent image', async () => { const result = await scanImageTool.handler( { imageId: 'nonexistent-image:12345' }, - toolContext + toolContext, ); expect(result.ok).toBe(false); @@ -157,7 +157,7 @@ describe('Error Scenario Coverage', () => { imageId: 'test:latest', registry: 'invalid://registry', }, - toolContext + toolContext, ); expect(result.ok).toBe(false); @@ -169,7 +169,7 @@ describe('Error Scenario Coverage', () => { it('should reject fix-dockerfile with nonexistent file', async () => { const result = await fixDockerfileTool.handler( { dockerfilePath: '/nonexistent/Dockerfile' }, - toolContext + toolContext, ); expect(result.ok).toBe(false); @@ -182,7 +182,7 @@ describe('Error Scenario Coverage', () => { it('should reject fix-dockerfile with invalid path', async () => { const result = await fixDockerfileTool.handler( { dockerfilePath: '/nonexistent/Dockerfile' }, - toolContext + toolContext, ); expect(result.ok).toBe(false); @@ -197,7 +197,7 @@ describe('Error Scenario Coverage', () => { analysis: '', imageName: 'test:latest', } as any, - toolContext + toolContext, ); // May either fail or handle gracefully @@ -207,7 +207,7 @@ describe('Error Scenario Coverage', () => { it('should reject prepare-cluster with invalid namespace', async () => { const result = await prepareClusterTool.handler( { namespace: '-invalid-namespace' }, - toolContext + toolContext, ); expect(result.ok).toBe(false); @@ -227,7 +227,7 @@ describe('Error Scenario Coverage', () => { analysis: '{invalid json[}', targetPlatform: 'linux/amd64', }, - toolContext + toolContext, ); // Should handle gracefully @@ -244,7 +244,7 @@ describe('Error Scenario Coverage', () => { dockerfilePath, validationReport: 'not a valid report', }, - toolContext + toolContext, ); await cleanup(); @@ -254,10 +254,7 @@ describe('Error Scenario Coverage', () => { }); it('should handle empty strings in required fields', async () => { - const result = await analyzeRepoTool.handler( - { repositoryPath: '' }, - toolContext - ); + const result = await analyzeRepoTool.handler({ repositoryPath: '' }, toolContext); expect(result.ok).toBe(false); if (!result.ok) { @@ -273,13 +270,13 @@ describe('Error Scenario Coverage', () => { const dockerfilePath = join(dir.name, 'Dockerfile'); writeFileSync(dockerfilePath, 'FROM node:18-alpine\nCMD ["node"]'); - const result = await buildImageTool.handler( + const result = await buildImageContextTool.handler( { dockerfilePath, context: dir.name, imageName: 'test:latest', }, - toolContext + toolContext, ); await cleanup(); @@ -296,10 +293,7 @@ describe('Error Scenario Coverage', () => { it('should handle Trivy scanner unavailable', async () => { // Scan will fail if Trivy not installed - const result = await scanImageTool.handler( - { imageId: 'alpine:latest' }, - toolContext - ); + const result = await scanImageTool.handler({ imageId: 'alpine:latest' }, toolContext); if (!result.ok) { expect(result.error).toBeDefined(); @@ -325,7 +319,7 @@ describe('Error Scenario Coverage', () => { const result = await analyzeRepoTool.handler( { repositoryPath: restrictedPath }, - toolContext + toolContext, ); // Restore permissions before cleanup @@ -361,7 +355,7 @@ describe('Error Scenario Coverage', () => { outputPath: join(readOnlyDir, 'Dockerfile'), targetPlatform: 'linux/amd64', }, - toolContext + toolContext, ); chmodSync(readOnlyDir, 0o755); @@ -383,10 +377,7 @@ describe('Error Scenario Coverage', () => { const { dir, cleanup } = createTestTempDir('missing-pkg-'); writeFileSync(join(dir.name, 'index.js'), 'console.log("hi");'); - const result = await analyzeRepoTool.handler( - { repositoryPath: dir.name }, - toolContext - ); + const result = await analyzeRepoTool.handler({ repositoryPath: dir.name }, toolContext); await cleanup(); @@ -400,10 +391,7 @@ describe('Error Scenario Coverage', () => { const { dir, cleanup } = createTestTempDir('missing-req-'); writeFileSync(join(dir.name, 'app.py'), 'print("hello")'); - const result = await analyzeRepoTool.handler( - { repositoryPath: dir.name }, - toolContext - ); + const result = await analyzeRepoTool.handler({ repositoryPath: dir.name }, toolContext); await cleanup(); @@ -412,7 +400,6 @@ describe('Error Scenario Coverage', () => { expect(result.value).toBeDefined(); } }); - }); describe('Validation Failures', () => { @@ -421,10 +408,7 @@ describe('Error Scenario Coverage', () => { const dockerfilePath = join(dir.name, 'Dockerfile'); writeFileSync(dockerfilePath, 'INVALID_INSTRUCTION node:18\nBAD SYNTAX'); - const result = await fixDockerfileTool.handler( - { dockerfilePath }, - toolContext - ); + const result = await fixDockerfileTool.handler({ dockerfilePath }, toolContext); await cleanup(); @@ -445,13 +429,10 @@ describe('Error Scenario Coverage', () => { USER root RUN chmod 777 /app COPY . . -CMD ["node", "app.js"]` +CMD ["node", "app.js"]`, ); - const result = await fixDockerfileTool.handler( - { dockerfilePath }, - toolContext - ); + const result = await fixDockerfileTool.handler({ dockerfilePath }, toolContext); await cleanup(); @@ -465,7 +446,7 @@ CMD ["node", "app.js"]` it('should reject invalid namespace format', async () => { const result = await prepareClusterTool.handler( { namespace: 'Invalid_Namespace_Name!' }, - toolContext + toolContext, ); expect(result.ok).toBe(false); @@ -479,10 +460,7 @@ CMD ["node", "app.js"]` it('should handle very long file paths', async () => { const longPath = '/tmp/' + 'a'.repeat(200) + '/test'; - const result = await analyzeRepoTool.handler( - { repositoryPath: longPath }, - toolContext - ); + const result = await analyzeRepoTool.handler({ repositoryPath: longPath }, toolContext); expect(result.ok).toBe(false); if (!result.ok) { @@ -493,7 +471,7 @@ CMD ["node", "app.js"]` it('should handle special characters in paths', async () => { const result = await analyzeRepoTool.handler( { repositoryPath: '/tmp/test-app-with-special-chars-!@#$%' }, - toolContext + toolContext, ); // Should handle gracefully (will fail because path doesn't exist) @@ -511,7 +489,7 @@ CMD ["node", "app.js"]` mkdirSync(appPath); writeFileSync( join(appPath, 'package.json'), - JSON.stringify({ name: `app${i}`, version: '1.0.0' }) + JSON.stringify({ name: `app${i}`, version: '1.0.0' }), ); } @@ -524,7 +502,7 @@ CMD ["node", "app.js"]` await cleanup(); expect(results).toHaveLength(3); - results.forEach(result => { + results.forEach((result) => { expect(result.ok !== undefined).toBe(true); }); }); @@ -532,10 +510,7 @@ CMD ["node", "app.js"]` describe('Guidance Messages', () => { it('should provide error messages for common failures', async () => { - const result = await analyzeRepoTool.handler( - { repositoryPath: '/nonexistent' }, - toolContext - ); + const result = await analyzeRepoTool.handler({ repositoryPath: '/nonexistent' }, toolContext); expect(result.ok).toBe(false); if (!result.ok) { @@ -552,10 +527,7 @@ CMD ["node", "app.js"]` const dockerfilePath = join(dir.name, 'Dockerfile'); writeFileSync(dockerfilePath, 'FROM node:latest\n'); - const result = await fixDockerfileTool.handler( - { dockerfilePath }, - toolContext - ); + const result = await fixDockerfileTool.handler({ dockerfilePath }, toolContext); await cleanup(); diff --git a/test/unit/error-scenarios/docker-errors.test.ts b/test/unit/error-scenarios/docker-errors.test.ts index dc0c5b80b..5f4e52bbd 100644 --- a/test/unit/error-scenarios/docker-errors.test.ts +++ b/test/unit/error-scenarios/docker-errors.test.ts @@ -1,25 +1,10 @@ /** * Unit Tests: Docker Error Scenarios - * Tests Docker error handling patterns without being prescriptive about exact error messages + * Tests Docker error handling patterns for build-image context preparation */ import { jest } from '@jest/globals'; -function createSuccessResult(value: T) { - return { - ok: true as const, - value, - }; -} - -function createFailureResult(error: string, guidance?: { resolution?: string; hints?: string[] }) { - return { - ok: false as const, - error, - guidance, - }; -} - function createMockLogger() { return { info: jest.fn(), @@ -38,19 +23,6 @@ function createMockToolContext() { } as any; } -const mockDockerClient = { - buildImage: jest.fn(), - tagImage: jest.fn(), - pushImage: jest.fn(), - pullImage: jest.fn(), - inspectImage: jest.fn(), - ping: jest.fn(), -}; - -jest.mock('../../../src/infra/docker/client', () => ({ - createDockerClient: jest.fn(() => mockDockerClient), -})); - jest.mock('../../../src/lib/logger', () => ({ createTimer: jest.fn(() => ({ end: jest.fn(), @@ -59,47 +31,41 @@ jest.mock('../../../src/lib/logger', () => ({ createLogger: jest.fn(() => createMockLogger()), })); -jest.mock('../../../src/lib/validation', () => ({ - validatePath: jest.fn().mockImplementation(async (pathStr: string) => { +jest.mock('../../../src/lib/validation-helpers', () => ({ + validatePathOrFail: jest.fn().mockImplementation(async (pathStr: string) => { return { ok: true, value: pathStr }; }), - validateImageName: jest.fn().mockImplementation((name: string) => ({ ok: true, value: name })), })); -jest.mock('node:fs', () => ({ - promises: { - access: jest.fn(), - readFile: jest.fn(), - writeFile: jest.fn(), - stat: jest.fn(), - constants: { R_OK: 4, W_OK: 2, X_OK: 1, F_OK: 0 }, - }, - constants: { R_OK: 4, W_OK: 2, X_OK: 1, F_OK: 0 }, +// Mock file-utils which is what the tool actually uses +const mockReadDockerfile = jest.fn<() => Promise<{ ok: true; value: string } | { ok: false; error: string }>>(); + +jest.mock('../../../src/lib/file-utils', () => ({ + readDockerfile: mockReadDockerfile, })); -import { promises as fs } from 'node:fs'; -import { buildImage } from '../../../src/tools/build-image/tool'; +// Mock fs/promises for path resolution +jest.mock('fs/promises', () => ({ + access: jest.fn().mockResolvedValue(undefined), + stat: jest.fn().mockResolvedValue({ isFile: () => true, isDirectory: () => false }), + readFile: jest.fn(), +})); -const mockFs = fs as jest.Mocked; +import { buildImageContext } from '../../../src/tools/build-image-context/tool'; -describe('Docker Error Scenarios', () => { +describe('Build Image Error Scenarios', () => { const mockDockerfile = `FROM node:18-alpine\nWORKDIR /app\nCOPY . .\nUSER appuser\nCMD ["node", "index.js"]`; beforeEach(() => { jest.clearAllMocks(); - mockFs.access.mockResolvedValue(undefined); - mockFs.stat.mockResolvedValue({ isFile: () => true, isDirectory: () => false } as any); - mockFs.readFile.mockResolvedValue(mockDockerfile); - mockFs.writeFile.mockResolvedValue(undefined); - - mockDockerClient.ping.mockResolvedValue(createSuccessResult(undefined)); + mockReadDockerfile.mockResolvedValue({ ok: true, value: mockDockerfile }); }); describe('Error Handling Pattern', () => { - it('should return Result on Docker client errors', async () => { - mockDockerClient.buildImage.mockRejectedValue(new Error('Docker error')); + it('should return Result on filesystem errors', async () => { + mockReadDockerfile.mockResolvedValue({ ok: false, error: 'Permission denied' }); - const result = await buildImage( + const result = await buildImageContext( { path: '/test/repo', dockerfile: 'Dockerfile', @@ -119,10 +85,10 @@ describe('Docker Error Scenarios', () => { }); it('should never throw exceptions', async () => { - mockDockerClient.buildImage.mockRejectedValue(new Error('Unexpected error')); + mockReadDockerfile.mockRejectedValue(new Error('Unexpected error')); await expect( - buildImage( + buildImageContext( { path: '/test/repo', dockerfile: 'Dockerfile', @@ -135,14 +101,8 @@ describe('Docker Error Scenarios', () => { ).resolves.not.toThrow(); }); - it('should propagate errors through Result without throwing', async () => { - const testError = new Error('Test error'); - mockDockerClient.buildImage.mockResolvedValue(createFailureResult(testError.message)); - - const result = await buildImage( - { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, - createMockToolContext(), - ); + it('should return failure for invalid parameters', async () => { + const result = await buildImageContext(null as any, createMockToolContext()); expect(result.ok).toBe(false); if (!result.ok) { @@ -151,13 +111,11 @@ describe('Docker Error Scenarios', () => { }); }); - describe('Connection Errors', () => { - it('should handle ECONNREFUSED errors', async () => { - const err = new Error('ECONNREFUSED'); - (err as any).code = 'ECONNREFUSED'; - mockDockerClient.buildImage.mockRejectedValue(err); + describe('Filesystem Errors', () => { + it('should handle EACCES errors', async () => { + mockReadDockerfile.mockResolvedValue({ ok: false, error: 'EACCES: permission denied' }); - const result = await buildImage( + const result = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); @@ -165,12 +123,10 @@ describe('Docker Error Scenarios', () => { expect(result.ok).toBe(false); }); - it('should handle EACCES errors', async () => { - const err = new Error('EACCES'); - (err as any).code = 'EACCES'; - mockDockerClient.buildImage.mockRejectedValue(err); + it('should handle ENOENT errors', async () => { + mockReadDockerfile.mockResolvedValue({ ok: false, error: 'ENOENT: no such file or directory' }); - const result = await buildImage( + const result = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); @@ -178,12 +134,10 @@ describe('Docker Error Scenarios', () => { expect(result.ok).toBe(false); }); - it('should handle ENOENT errors', async () => { - const err = new Error('ENOENT'); - (err as any).code = 'ENOENT'; - mockDockerClient.buildImage.mockRejectedValue(err); + it('should handle EISDIR errors', async () => { + mockReadDockerfile.mockResolvedValue({ ok: false, error: 'EISDIR: illegal operation on a directory' }); - const result = await buildImage( + const result = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); @@ -192,78 +146,67 @@ describe('Docker Error Scenarios', () => { }); }); - describe('Operation Errors', () => { - it('should handle build failures', async () => { - mockDockerClient.buildImage.mockResolvedValue(createFailureResult('Build failed')); + describe('Guidance Structure', () => { + it('should provide guidance on errors', async () => { + mockReadDockerfile.mockResolvedValue({ ok: false, error: 'File not found' }); - const result = await buildImage( + const result = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); expect(result.ok).toBe(false); + // Note: guidance is optional and may not be provided for all errors + if (!result.ok) { + expect(result.error).toBeDefined(); + } }); + }); - it('should handle different operation failures', async () => { - mockDockerClient.buildImage.mockResolvedValue(createFailureResult('Operation failed')); - - const result = await buildImage( - { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, + describe('Success Cases', () => { + it('should succeed when Dockerfile is readable', async () => { + const result = await buildImageContext( + { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: ['v1.0.0'], buildArgs: {} }, createMockToolContext(), ); - expect(result.ok).toBe(false); + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.summary).toBeDefined(); + expect(result.value.nextAction.buildCommand.command).toBeDefined(); + } }); - }); - describe('Guidance Structure', () => { - it('should optionally provide guidance on errors', async () => { - mockDockerClient.buildImage.mockResolvedValue( - createFailureResult('Error', { - resolution: 'Fix the issue', - hints: ['Hint 1', 'Hint 2'], - }), - ); + it('should detect security issues in Dockerfile', async () => { + // Dockerfile running as root + mockReadDockerfile.mockResolvedValue({ + ok: true, + value: 'FROM node:18\nWORKDIR /app\nCOPY . .\nCMD ["node", "index.js"]' + }); - const result = await buildImage( + const result = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); - expect(result.ok).toBe(false); - // Guidance is optional, but if present should have correct structure - if (!result.ok && result.guidance) { - if (result.guidance.resolution) { - expect(typeof result.guidance.resolution).toBe('string'); - } - if (result.guidance.hints) { - expect(Array.isArray(result.guidance.hints)).toBe(true); - } + expect(result.ok).toBe(true); + if (result.ok) { + // Should detect running as root (no USER directive) + expect(result.value.securityAnalysis.warnings.length).toBeGreaterThan(0); } }); - }); - describe('Success Cases', () => { - it('should succeed when Docker operations work', async () => { - mockDockerClient.buildImage.mockResolvedValue( - createSuccessResult({ - imageId: 'sha256:abc', - digest: 'sha256:def', - tags: ['test:latest'], - size: 100000, - layers: 5, - buildTime: 1000, - logs: ['Success'], - warnings: [], - }), - ); - - const result = await buildImage( + it('should include BuildKit analysis', async () => { + const result = await buildImageContext( { path: '/test', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, createMockToolContext(), ); expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.buildKitAnalysis).toBeDefined(); + expect(result.value.buildKitAnalysis.features).toBeDefined(); + } }); }); }); diff --git a/test/unit/error-scenarios/validation-errors.test.ts b/test/unit/error-scenarios/validation-errors.test.ts index c78ef0c71..c5d41b8e9 100644 --- a/test/unit/error-scenarios/validation-errors.test.ts +++ b/test/unit/error-scenarios/validation-errors.test.ts @@ -48,7 +48,7 @@ jest.mock('../../../src/lib/logger', () => ({ createLogger: jest.fn(() => createMockLogger()), })); -import { buildImage } from '../../../src/tools/build-image/tool'; +import { buildImageContext } from '../../../src/tools/build-image-context/tool'; describe('Validation Error Scenarios', () => { beforeEach(() => { @@ -57,12 +57,17 @@ describe('Validation Error Scenarios', () => { describe('Path Validation', () => { it('should validate path parameter', async () => { - mockValidation.validatePath.mockResolvedValue( - createFailureResult('Invalid path'), - ); - - const result = await buildImage( - { path: '', dockerfile: 'Dockerfile', imageName: 'test:latest', tags: [], buildArgs: {} }, + mockValidation.validatePath.mockResolvedValue(createFailureResult('Invalid path')); + + const result = await buildImageContext( + { + path: '', + dockerfile: 'Dockerfile', + imageName: 'test:latest', + tags: [], + buildArgs: {}, + platform: 'linux/amd64', + }, createMockToolContext(), ); @@ -70,9 +75,7 @@ describe('Validation Error Scenarios', () => { }); it('should accept valid paths', async () => { - mockValidation.validatePath.mockResolvedValue( - createSuccessResult('/valid/path'), - ); + mockValidation.validatePath.mockResolvedValue(createSuccessResult('/valid/path')); const result = await mockValidation.validatePath('/valid/path'); @@ -83,18 +86,14 @@ describe('Validation Error Scenarios', () => { describe('Image Name Validation', () => { it('should validate image name format', () => { - mockValidation.validateImageName.mockReturnValue( - createFailureResult('Invalid image name'), - ); + mockValidation.validateImageName.mockReturnValue(createFailureResult('Invalid image name')); const result = mockValidation.validateImageName('Invalid@Name'); expect(result.ok).toBe(false); }); it('should accept valid image names', () => { - mockValidation.validateImageName.mockReturnValue( - createSuccessResult('valid-image:latest'), - ); + mockValidation.validateImageName.mockReturnValue(createSuccessResult('valid-image:latest')); const result = mockValidation.validateImageName('valid-image:latest'); expect(result.ok).toBe(true); @@ -103,18 +102,14 @@ describe('Validation Error Scenarios', () => { describe('Namespace Validation', () => { it('should validate namespace format', () => { - mockValidation.validateNamespace.mockReturnValue( - createFailureResult('Invalid namespace'), - ); + mockValidation.validateNamespace.mockReturnValue(createFailureResult('Invalid namespace')); const result = mockValidation.validateNamespace('Invalid_Namespace'); expect(result.ok).toBe(false); }); it('should accept valid namespaces', () => { - mockValidation.validateNamespace.mockReturnValue( - createSuccessResult('valid-namespace'), - ); + mockValidation.validateNamespace.mockReturnValue(createSuccessResult('valid-namespace')); const result = mockValidation.validateNamespace('valid-namespace'); expect(result.ok).toBe(true); @@ -123,18 +118,14 @@ describe('Validation Error Scenarios', () => { describe('K8s Resource Name Validation', () => { it('should validate K8s resource names', () => { - mockValidation.validateK8sName.mockReturnValue( - createFailureResult('Invalid resource name'), - ); + mockValidation.validateK8sName.mockReturnValue(createFailureResult('Invalid resource name')); const result = mockValidation.validateK8sName('Invalid_Name'); expect(result.ok).toBe(false); }); it('should accept valid K8s names', () => { - mockValidation.validateK8sName.mockReturnValue( - createSuccessResult('valid-name'), - ); + mockValidation.validateK8sName.mockReturnValue(createSuccessResult('valid-name')); const result = mockValidation.validateK8sName('valid-name'); expect(result.ok).toBe(true); @@ -164,9 +155,7 @@ describe('Validation Error Scenarios', () => { }); it('should return Result on validation success', () => { - mockValidation.validateImageName.mockReturnValue( - createSuccessResult('valid:image'), - ); + mockValidation.validateImageName.mockReturnValue(createSuccessResult('valid:image')); const result = mockValidation.validateImageName('valid:image'); diff --git a/test/unit/infra/docker/buildkit-decoder.test.ts b/test/unit/infra/docker/buildkit-decoder.test.ts deleted file mode 100644 index 57feb775f..000000000 --- a/test/unit/infra/docker/buildkit-decoder.test.ts +++ /dev/null @@ -1,334 +0,0 @@ -/** - * Unit tests for BuildKit trace decoder - */ - -import { describe, it, expect, jest } from '@jest/globals'; -import { decodeBuildKitTrace, formatBuildKitStatus } from '@/infra/docker/buildkit-decoder'; -import type { Logger } from 'pino'; -import protobuf from 'protobufjs'; - -describe('BuildKit decoder', () => { - let mockLogger: Logger; - - beforeEach(() => { - mockLogger = { - debug: jest.fn(), - info: jest.fn(), - warn: jest.fn(), - error: jest.fn(), - } as unknown as Logger; - }); - - // Helper function to create protobuf root with full schema - function createProtobufRoot(): protobuf.Root { - const Timestamp = new protobuf.Type('Timestamp') - .add(new protobuf.Field('seconds', 1, 'int64')) - .add(new protobuf.Field('nanos', 2, 'int32')); - - const Vertex = new protobuf.Type('Vertex') - .add(new protobuf.Field('digest', 1, 'string')) - .add(new protobuf.Field('inputs', 2, 'string', 'repeated')) - .add(new protobuf.Field('name', 3, 'string')) - .add(new protobuf.Field('cached', 4, 'bool')) - .add(new protobuf.Field('started', 5, 'google.protobuf.Timestamp')) - .add(new protobuf.Field('completed', 6, 'google.protobuf.Timestamp')) - .add(new protobuf.Field('error', 7, 'string')); - - const VertexStatus = new protobuf.Type('VertexStatus') - .add(new protobuf.Field('ID', 1, 'string')) - .add(new protobuf.Field('vertex', 2, 'string')); - - const VertexLog = new protobuf.Type('VertexLog') - .add(new protobuf.Field('vertex', 1, 'string')) - .add(new protobuf.Field('timestamp', 2, 'google.protobuf.Timestamp')) - .add(new protobuf.Field('stream', 3, 'int64')) - .add(new protobuf.Field('msg', 4, 'bytes')); - - const VertexWarning = new protobuf.Type('VertexWarning') - .add(new protobuf.Field('vertex', 1, 'string')) - .add(new protobuf.Field('level', 2, 'int64')) - .add(new protobuf.Field('short', 3, 'bytes')) - .add(new protobuf.Field('detail', 4, 'bytes', 'repeated')) - .add(new protobuf.Field('url', 5, 'string')); - - const StatusResponse = new protobuf.Type('StatusResponse') - .add(new protobuf.Field('vertexes', 1, 'Vertex', 'repeated')) - .add(new protobuf.Field('statuses', 2, 'VertexStatus', 'repeated')) - .add(new protobuf.Field('logs', 3, 'VertexLog', 'repeated')) - .add(new protobuf.Field('warnings', 4, 'VertexWarning', 'repeated')); - - const googleProtobuf = new protobuf.Namespace('google.protobuf'); - googleProtobuf.add(Timestamp); - - const mobyBuildkit = new protobuf.Namespace('moby.buildkit.v1'); - mobyBuildkit.add(Vertex); - mobyBuildkit.add(VertexStatus); - mobyBuildkit.add(VertexLog); - mobyBuildkit.add(VertexWarning); - mobyBuildkit.add(StatusResponse); - - const root = new protobuf.Root(); - root.add(googleProtobuf); - root.add(mobyBuildkit); - - return root; - } - - describe('formatBuildKitStatus', () => { - it('should return null for empty status', () => { - const result = formatBuildKitStatus({ - steps: [], - logs: [], - warnings: [], - errors: [], - }); - expect(result).toBeNull(); - }); - - it('should prioritize errors over other messages', () => { - const result = formatBuildKitStatus({ - steps: ['Step 1'], - logs: ['Log message'], - warnings: ['Warning message'], - errors: ['Error message'], - }); - expect(result).toBe('Error message'); - }); - - it('should return completed steps when no errors', () => { - const result = formatBuildKitStatus({ - steps: ['[1/3] FROM node:18', '[2/3] COPY package.json .'], - logs: [], - warnings: [], - errors: [], - }); - expect(result).toBe('[2/3] COPY package.json .'); - }); - - it('should return logs when no errors or steps', () => { - const result = formatBuildKitStatus({ - steps: [], - logs: ['npm install started', 'npm install complete'], - warnings: [], - errors: [], - }); - expect(result).toBe('npm install complete'); - }); - - it('should return warnings with emoji prefix', () => { - const result = formatBuildKitStatus({ - steps: [], - logs: [], - warnings: ['Deprecated package detected'], - errors: [], - }); - expect(result).toBe('⚠️ Deprecated package detected'); - }); - }); - - describe('decodeBuildKitTrace', () => { - it('should return null for invalid input', () => { - const result = decodeBuildKitTrace(null, mockLogger); - expect(result).toBeNull(); - }); - - it('should return null for non-string input', () => { - const result = decodeBuildKitTrace({ foo: 'bar' }, mockLogger); - expect(result).toBeNull(); - }); - - it('should return null for invalid base64', () => { - const result = decodeBuildKitTrace('not-valid-base64!!!', mockLogger); - expect(result).toBeNull(); - }); - - it('should handle empty protobuf message', () => { - const result = decodeBuildKitTrace('', mockLogger); - expect(result).toBeNull(); - }); - - it('should decode valid protobuf with all data types', () => { - const root = createProtobufRoot(); - const StatusResponseType = root.lookupType('moby.buildkit.v1.StatusResponse'); - - const message = StatusResponseType.create({ - vertexes: [ - { - digest: 'sha256:step1', - name: '[1/3] FROM node:18', - started: { seconds: 1000, nanos: 0 }, - completed: { seconds: 1010, nanos: 0 }, - }, - { - digest: 'sha256:step2', - name: '[2/3] COPY package.json .', - started: { seconds: 1020, nanos: 0 }, - completed: { seconds: 1025, nanos: 0 }, - }, - { - digest: 'sha256:error-step', - name: '[3/3] RUN npm install', - error: 'ENOENT: no such file or directory', - }, - ], - logs: [ - { - vertex: 'sha256:step2', - timestamp: { seconds: 1021, nanos: 0 }, - stream: 1, - msg: Buffer.from('Copying files...'), - }, - ], - warnings: [ - { - vertex: 'sha256:step1', - level: 1, - short: Buffer.from('Using latest tag'), - }, - ], - }); - - const encoded = StatusResponseType.encode(message).finish(); - const base64 = Buffer.from(encoded).toString('base64'); - - const result = decodeBuildKitTrace(base64, mockLogger); - - expect(result).not.toBeNull(); - // Verify steps extraction - expect(result?.steps).toHaveLength(2); - expect(result?.steps).toContain('[1/3] FROM node:18'); - expect(result?.steps).toContain('[2/3] COPY package.json .'); - // Verify logs extraction - expect(result?.logs).toHaveLength(1); - expect(result?.logs[0]).toBe('Copying files...'); - // Verify warnings extraction - expect(result?.warnings).toHaveLength(1); - expect(result?.warnings[0]).toBe('Using latest tag'); - // Verify errors extraction - expect(result?.errors).toHaveLength(1); - expect(result?.errors[0]).toBe('[3/3] RUN npm install: ENOENT: no such file or directory'); - }); - - it('should filter out empty and whitespace-only log messages', () => { - const root = createProtobufRoot(); - const StatusResponseType = root.lookupType('moby.buildkit.v1.StatusResponse'); - - const message = StatusResponseType.create({ - logs: [ - { - vertex: 'sha256:abc', - timestamp: { seconds: 1000, nanos: 0 }, - stream: 1, - msg: Buffer.from('Valid log message'), - }, - { - vertex: 'sha256:def', - timestamp: { seconds: 1001, nanos: 0 }, - stream: 1, - msg: Buffer.from(' '), // Whitespace only - should be filtered - }, - { - vertex: 'sha256:ghi', - timestamp: { seconds: 1002, nanos: 0 }, - stream: 1, - msg: Buffer.from(''), // Empty - should be filtered - }, - ], - }); - - const encoded = StatusResponseType.encode(message).finish(); - const base64 = Buffer.from(encoded).toString('base64'); - - const result = decodeBuildKitTrace(base64, mockLogger); - - expect(result).not.toBeNull(); - expect(result?.logs).toHaveLength(1); - expect(result?.logs[0]).toBe('Valid log message'); - }); - - it('should only include vertices with both started and completed timestamps', () => { - const root = createProtobufRoot(); - const StatusResponseType = root.lookupType('moby.buildkit.v1.StatusResponse'); - - const message = StatusResponseType.create({ - vertexes: [ - { - digest: 'sha256:completed', - name: '[1/3] FROM node:18', - started: { seconds: 1000, nanos: 0 }, - completed: { seconds: 1010, nanos: 0 }, - }, - { - digest: 'sha256:started-only', - name: '[2/3] COPY package.json .', - started: { seconds: 1020, nanos: 0 }, - }, - { - digest: 'sha256:not-started', - name: '[3/3] RUN npm install', - }, - ], - }); - - const encoded = StatusResponseType.encode(message).finish(); - const base64 = Buffer.from(encoded).toString('base64'); - - const result = decodeBuildKitTrace(base64, mockLogger); - - expect(result).not.toBeNull(); - expect(result?.steps).toHaveLength(1); - expect(result?.steps[0]).toBe('[1/3] FROM node:18'); - }); - - it('should handle warnings with non-Buffer short field', () => { - const root = createProtobufRoot(); - const StatusResponseType = root.lookupType('moby.buildkit.v1.StatusResponse'); - - const message = StatusResponseType.create({ - warnings: [ - { - vertex: 'sha256:warn1', - level: 1, - short: Buffer.from('Warning as string'), - }, - ], - }); - - const encoded = StatusResponseType.encode(message).finish(); - const base64 = Buffer.from(encoded).toString('base64'); - - const result = decodeBuildKitTrace(base64, mockLogger); - - expect(result).not.toBeNull(); - expect(result?.warnings).toHaveLength(1); - expect(typeof result?.warnings[0]).toBe('string'); - expect(result?.warnings[0]).toContain('Warning'); - }); - - it('should handle logs with various msg encodings', () => { - const root = createProtobufRoot(); - const StatusResponseType = root.lookupType('moby.buildkit.v1.StatusResponse'); - - const message = StatusResponseType.create({ - logs: [ - { - vertex: 'sha256:log1', - timestamp: { seconds: 1000, nanos: 0 }, - stream: 1, - msg: Buffer.from('String log message'), - }, - ], - }); - - const encoded = StatusResponseType.encode(message).finish(); - const base64 = Buffer.from(encoded).toString('base64'); - - const result = decodeBuildKitTrace(base64, mockLogger); - - expect(result).not.toBeNull(); - expect(result?.logs).toHaveLength(1); - expect(typeof result?.logs[0]).toBe('string'); - expect(result?.logs[0]).toContain('String log'); - }); - }); -}); diff --git a/test/unit/infra/docker/progress.test.ts b/test/unit/infra/docker/progress.test.ts deleted file mode 100644 index e15b78406..000000000 --- a/test/unit/infra/docker/progress.test.ts +++ /dev/null @@ -1,211 +0,0 @@ -/** - * Unit tests for Docker build progress tracking - */ - -import { describe, it, expect, jest, beforeEach } from '@jest/globals'; -import { createProgressTracker, type ProgressCallback } from '@/infra/docker/progress'; -import type { Logger } from 'pino'; -import protobuf from 'protobufjs'; - -// Helper function to create valid BuildKit protobuf -function createValidBuildKitTrace(stepName: string): string { - const Timestamp = new protobuf.Type('Timestamp') - .add(new protobuf.Field('seconds', 1, 'int64')) - .add(new protobuf.Field('nanos', 2, 'int32')); - - const Vertex = new protobuf.Type('Vertex') - .add(new protobuf.Field('digest', 1, 'string')) - .add(new protobuf.Field('name', 3, 'string')) - .add(new protobuf.Field('started', 5, 'google.protobuf.Timestamp')) - .add(new protobuf.Field('completed', 6, 'google.protobuf.Timestamp')); - - const VertexStatus = new protobuf.Type('VertexStatus') - .add(new protobuf.Field('ID', 1, 'string')); - - const VertexLog = new protobuf.Type('VertexLog') - .add(new protobuf.Field('vertex', 1, 'string')); - - const VertexWarning = new protobuf.Type('VertexWarning') - .add(new protobuf.Field('vertex', 1, 'string')); - - const StatusResponse = new protobuf.Type('StatusResponse') - .add(new protobuf.Field('vertexes', 1, 'Vertex', 'repeated')) - .add(new protobuf.Field('statuses', 2, 'VertexStatus', 'repeated')) - .add(new protobuf.Field('logs', 3, 'VertexLog', 'repeated')) - .add(new protobuf.Field('warnings', 4, 'VertexWarning', 'repeated')); - - const googleProtobuf = new protobuf.Namespace('google.protobuf'); - googleProtobuf.add(Timestamp); - - const mobyBuildkit = new protobuf.Namespace('moby.buildkit.v1'); - mobyBuildkit.add(Vertex); - mobyBuildkit.add(VertexStatus); - mobyBuildkit.add(VertexLog); - mobyBuildkit.add(VertexWarning); - mobyBuildkit.add(StatusResponse); - - const root = new protobuf.Root(); - root.add(googleProtobuf); - root.add(mobyBuildkit); - - const StatusResponseType = root.lookupType('moby.buildkit.v1.StatusResponse'); - const message = StatusResponseType.create({ - vertexes: [ - { - digest: 'sha256:abc123', - name: stepName, - started: { seconds: 1000, nanos: 0 }, - completed: { seconds: 1010, nanos: 0 }, - }, - ], - }); - - const encoded = StatusResponseType.encode(message).finish(); - return Buffer.from(encoded).toString('base64'); -} - -describe('ProgressTracker', () => { - let mockLogger: Logger; - let mockProgressCallback: ProgressCallback; - - beforeEach(() => { - mockLogger = { - debug: jest.fn(), - info: jest.fn(), - warn: jest.fn(), - error: jest.fn(), - } as unknown as Logger; - - mockProgressCallback = jest.fn(); - }); - - describe('constructor', () => { - it('should create a progress tracker without onProgress callback', () => { - const tracker = createProgressTracker({ - logger: mockLogger, - }); - - expect(tracker).toBeDefined(); - }); - - it('should create a progress tracker with onProgress callback', () => { - const tracker = createProgressTracker({ - logger: mockLogger, - onProgress: mockProgressCallback, - }); - - expect(tracker).toBeDefined(); - }); - }); - - describe('processBuildKitTrace', () => { - it('should return empty string for invalid protobuf data', () => { - const tracker = createProgressTracker({ - logger: mockLogger, - onProgress: mockProgressCallback, - }); - - const fakeProtobuf = Buffer.from('fake').toString('base64'); - const result = tracker.processBuildKitTrace(fakeProtobuf); - - expect(result).toBe(''); - }); - - it('should return empty string for null auxData', () => { - const tracker = createProgressTracker({ - logger: mockLogger, - onProgress: mockProgressCallback, - }); - - const result = tracker.processBuildKitTrace(null); - - expect(result).toBe(''); - expect(mockProgressCallback).not.toHaveBeenCalled(); - }); - - it('should return empty string for undefined auxData', () => { - const tracker = createProgressTracker({ - logger: mockLogger, - onProgress: mockProgressCallback, - }); - - const result = tracker.processBuildKitTrace(undefined); - - expect(result).toBe(''); - expect(mockProgressCallback).not.toHaveBeenCalled(); - }); - - it('should return empty string for non-string auxData', () => { - const tracker = createProgressTracker({ - logger: mockLogger, - onProgress: mockProgressCallback, - }); - - const result = tracker.processBuildKitTrace({ some: 'object' }); - - expect(result).toBe(''); - expect(mockProgressCallback).not.toHaveBeenCalled(); - }); - - it('should successfully decode valid protobuf and call onProgress', () => { - const tracker = createProgressTracker({ - logger: mockLogger, - onProgress: mockProgressCallback, - }); - - const validTrace = createValidBuildKitTrace('[1/3] FROM node:18'); - const result = tracker.processBuildKitTrace(validTrace); - - expect(result).toBe('[1/3] FROM node:18'); - expect(mockProgressCallback).toHaveBeenCalledWith('[1/3] FROM node:18'); - expect(mockProgressCallback).toHaveBeenCalledTimes(1); - }); - - it('should successfully decode valid protobuf without callback', () => { - const tracker = createProgressTracker({ - logger: mockLogger, - }); - - const validTrace = createValidBuildKitTrace('[1/3] FROM node:18'); - const result = tracker.processBuildKitTrace(validTrace); - - expect(result).toBe('[1/3] FROM node:18'); - }); - - it('should filter out duplicate messages', () => { - const tracker = createProgressTracker({ - logger: mockLogger, - onProgress: mockProgressCallback, - }); - - const validTrace = createValidBuildKitTrace('[1/3] FROM node:18'); - - // Process the same message twice - const result1 = tracker.processBuildKitTrace(validTrace); - const result2 = tracker.processBuildKitTrace(validTrace); - - expect(result1).toBe('[1/3] FROM node:18'); - expect(result2).toBe(''); // Second call returns empty because it's a duplicate - expect(mockProgressCallback).toHaveBeenCalledTimes(1); // Callback only called once - }); - - it('should handle different messages sequentially', () => { - const tracker = createProgressTracker({ - logger: mockLogger, - onProgress: mockProgressCallback, - }); - - const trace1 = createValidBuildKitTrace('[1/3] FROM node:18'); - const trace2 = createValidBuildKitTrace('[2/3] COPY package.json .'); - - const result1 = tracker.processBuildKitTrace(trace1); - const result2 = tracker.processBuildKitTrace(trace2); - - expect(result1).toBe('[1/3] FROM node:18'); - expect(result2).toBe('[2/3] COPY package.json .'); - expect(mockProgressCallback).toHaveBeenCalledTimes(2); - expect(mockProgressCallback).toHaveBeenNthCalledWith(1, '[1/3] FROM node:18'); - expect(mockProgressCallback).toHaveBeenNthCalledWith(2, '[2/3] COPY package.json .'); - }); - }); -}); diff --git a/test/unit/infrastructure/docker/client.test.ts b/test/unit/infrastructure/docker/client.test.ts index b012afa0f..2bf7686ae 100644 --- a/test/unit/infrastructure/docker/client.test.ts +++ b/test/unit/infrastructure/docker/client.test.ts @@ -10,7 +10,6 @@ describe('Docker Client', () => { expect(content).toContain('createDockerClient'); expect(content).toContain('DockerClient'); - expect(content).toContain('buildImage'); expect(content).toContain('getImage'); expect(content).toContain('tagImage'); expect(content).toContain('pushImage'); @@ -20,10 +19,9 @@ describe('Docker Client', () => { const clientPath = join(__dirname, '../../../../src/infra/docker/client.ts'); const content = readFileSync(clientPath, 'utf-8'); - expect(content).toContain('DockerBuildOptions'); - expect(content).toContain('DockerBuildResult'); expect(content).toContain('DockerPushResult'); expect(content).toContain('DockerImageInfo'); + expect(content).toContain('DockerContainerInfo'); }); it('should use Result pattern for error handling', () => { @@ -45,14 +43,14 @@ describe('Docker Client', () => { }); describe('Client Configuration', () => { - it('should support build configuration options', () => { + it('should support socket configuration options', () => { const clientPath = join(__dirname, '../../../../src/infra/docker/client.ts'); const content = readFileSync(clientPath, 'utf-8'); - expect(content).toContain('dockerfile'); - expect(content).toContain('buildargs'); - expect(content).toContain('context'); - expect(content).toContain('platform'); + expect(content).toContain('socketPath'); + expect(content).toContain('host'); + expect(content).toContain('port'); + expect(content).toContain('timeout'); }); it('should support logging integration', () => { @@ -116,26 +114,50 @@ describe('Docker Client', () => { }); }); - describe('Progress Error Handling', () => { - it('should contain enhanced progress error handling for buildImage', () => { + describe('Push Error Handling', () => { + it('should contain enhanced progress error handling for pushImage', () => { const clientPath = join(__dirname, '../../../../src/infra/docker/client.ts'); const content = readFileSync(clientPath, 'utf-8'); // Verify enhanced followProgress callback is implemented - expect(content).toContain('Docker build followProgress error'); - expect(content).toContain('errorDetails: guidance.details'); - expect(content).toContain('hint: guidance.hint'); - expect(content).toContain('resolution: guidance.resolution'); - expect(content).toContain('Docker build error event received'); + expect(content).toContain('Docker push followProgress error'); + expect(content).toContain('Docker push error event (may be intermediate)'); }); + }); - it('should contain enhanced progress error handling for pushImage', () => { + describe('Image Operations', () => { + it('should have getImage operation', () => { const clientPath = join(__dirname, '../../../../src/infra/docker/client.ts'); const content = readFileSync(clientPath, 'utf-8'); - // Verify enhanced followProgress callback is implemented - expect(content).toContain('Docker push followProgress error'); - expect(content).toContain('Docker push error event (may be intermediate)'); + expect(content).toContain('async getImage'); + expect(content).toContain('Docker get image failed'); + }); + + it('should have tagImage operation', () => { + const clientPath = join(__dirname, '../../../../src/infra/docker/client.ts'); + const content = readFileSync(clientPath, 'utf-8'); + + expect(content).toContain('async tagImage'); + expect(content).toContain('Image tagged successfully'); + expect(content).toContain('Docker tag image failed'); + }); + + it('should have removeImage operation', () => { + const clientPath = join(__dirname, '../../../../src/infra/docker/client.ts'); + const content = readFileSync(clientPath, 'utf-8'); + + expect(content).toContain('async removeImage'); + expect(content).toContain('Docker remove image failed'); + }); + + it('should have ping operation for daemon health check', () => { + const clientPath = join(__dirname, '../../../../src/infra/docker/client.ts'); + const content = readFileSync(clientPath, 'utf-8'); + + expect(content).toContain('async ping'); + expect(content).toContain('Docker daemon is available'); + expect(content).toContain('Docker ping failed'); }); }); }); diff --git a/test/unit/lib/telemetry-utils.test.ts b/test/unit/lib/telemetry-utils.test.ts index 6cd406e32..7dbd0e000 100644 --- a/test/unit/lib/telemetry-utils.test.ts +++ b/test/unit/lib/telemetry-utils.test.ts @@ -236,8 +236,8 @@ describe('telemetry-utils', () => { expect(metrics).not.toHaveProperty('path'); }); - it('should extract safe metrics from build-image results', () => { - const metrics = extractSafeTelemetryMetrics('build-image' as ToolName, { + it('should extract safe metrics from build-image-context results', () => { + const metrics = extractSafeTelemetryMetrics('build-image-context' as ToolName, { imageId: 'sha256:abc123', size: 245678234, buildTime: 45000, @@ -305,13 +305,13 @@ describe('telemetry-utils', () => { describe('createSafeTelemetryEvent', () => { it('should create complete safe telemetry event for success', () => { const event = createSafeTelemetryEvent( - 'build-image' as ToolName, + 'build-image-context' as ToolName, { path: '/home/user/app', imageName: 'myapp' }, { ok: true, value: { imageId: 'abc', size: 1000, buildTime: 5000, tags: [] } }, 5000, ); - expect(event.toolName).toBe('build-image'); + expect(event.toolName).toBe('build-image-context'); expect(event.success).toBe(true); expect(event.durationMs).toBe(5000); // Path has 3 segments, so all should be hashed @@ -326,13 +326,13 @@ describe('telemetry-utils', () => { it('should create complete safe telemetry event for error', () => { const event = createSafeTelemetryEvent( - 'build-image' as ToolName, + 'build-image-context' as ToolName, { path: '/home/user/app', imageName: 'myapp' }, { ok: false, error: 'Build failed: file not found' }, 2000, ); - expect(event.toolName).toBe('build-image'); + expect(event.toolName).toBe('build-image-context'); expect(event.success).toBe(false); expect(event.durationMs).toBe(2000); // Path has 3 segments, so all should be hashed @@ -409,7 +409,10 @@ describe('telemetry-utils', () => { buildTime: 45000, }; - const metrics = extractSafeTelemetryMetrics('build-image' as ToolName, sensitiveResult); + const metrics = extractSafeTelemetryMetrics( + 'build-image-context' as ToolName, + sensitiveResult, + ); // Verify only safe metrics are included const metricsStr = JSON.stringify(metrics); @@ -425,7 +428,7 @@ describe('telemetry-utils', () => { it('should create telemetry events with no customer data leakage', () => { const event = createSafeTelemetryEvent( - 'build-image' as ToolName, + 'build-image-context' as ToolName, { path: '/home/john.doe/acme-corp/payment-service', imageName: 'acme-corp/payment-api:v2.1.0', diff --git a/test/unit/mcp/formatters/natural-language-formatters.test.ts b/test/unit/mcp/formatters/natural-language-formatters.test.ts index 14636af11..0b24bfcd0 100644 --- a/test/unit/mcp/formatters/natural-language-formatters.test.ts +++ b/test/unit/mcp/formatters/natural-language-formatters.test.ts @@ -11,7 +11,7 @@ import { } from '@/mcp/formatters/natural-language-formatters'; import type { ScanImageResult } from '@/tools/scan-image/tool'; import type { DockerfilePlan } from '@/tools/generate-dockerfile/schema'; -import type { BuildImageResult } from '@/tools/build-image/tool'; +import type { BuildImageResult } from '@/tools/build-image-context/schema'; import type { RepositoryAnalysis } from '@/tools/analyze-repo/schema'; describe('natural-language-formatters', () => { @@ -135,7 +135,8 @@ describe('natural-language-formatters', () => { const plan: DockerfilePlan = { nextAction: { action: 'create-files', - instruction: 'Create a new Dockerfile at ./Dockerfile using the base images, security considerations, optimizations, and best practices from recommendations.', + instruction: + 'Create a new Dockerfile at ./Dockerfile using the base images, security considerations, optimizations, and best practices from recommendations.', files: [ { path: './Dockerfile', @@ -183,7 +184,8 @@ describe('natural-language-formatters', () => { bestPractices: [], }, confidence: 0.9, - summary: 'πŸ”¨ ACTION REQUIRED: Create Dockerfile\nPath: ./Dockerfile\nLanguage: javascript 18.0.0 (Express)\nStrategy: Multi-stage build\nβœ… Ready to create Dockerfile based on recommendations.', + summary: + 'πŸ”¨ ACTION REQUIRED: Create Dockerfile\nPath: ./Dockerfile\nLanguage: javascript 18.0.0 (Express)\nStrategy: Multi-stage build\nβœ… Ready to create Dockerfile based on recommendations.', }; const narrative = formatDockerfilePlanNarrative(plan); @@ -205,7 +207,8 @@ describe('natural-language-formatters', () => { const plan: DockerfilePlan = { nextAction: { action: 'update-files', - instruction: 'Update the existing Dockerfile at ./Dockerfile by applying the enhancement recommendations.', + instruction: + 'Update the existing Dockerfile at ./Dockerfile by applying the enhancement recommendations.', files: [ { path: './Dockerfile', @@ -229,7 +232,8 @@ describe('natural-language-formatters', () => { bestPractices: [], }, confidence: 0.85, - summary: 'πŸ”¨ ACTION REQUIRED: Update Dockerfile\nPath: ./Dockerfile\nLanguage: python 3.11\nβœ… Ready to update Dockerfile with enhancements.', + summary: + 'πŸ”¨ ACTION REQUIRED: Update Dockerfile\nPath: ./Dockerfile\nLanguage: python 3.11\nβœ… Ready to update Dockerfile with enhancements.', existingDockerfile: { path: '/app/Dockerfile', content: 'FROM python:3.11\nWORKDIR /app', @@ -268,7 +272,8 @@ describe('natural-language-formatters', () => { const plan: DockerfilePlan = { nextAction: { action: 'create-files', - instruction: 'Create a new Dockerfile at ./Dockerfile using the base images and recommendations.', + instruction: + 'Create a new Dockerfile at ./Dockerfile using the base images and recommendations.', files: [ { path: './Dockerfile', @@ -291,7 +296,8 @@ describe('natural-language-formatters', () => { bestPractices: [], }, confidence: 0.8, - summary: 'πŸ”¨ ACTION REQUIRED: Create Dockerfile\nPath: ./Dockerfile\nLanguage: java\nβœ… Ready to create Dockerfile based on recommendations.', + summary: + 'πŸ”¨ ACTION REQUIRED: Create Dockerfile\nPath: ./Dockerfile\nLanguage: java\nβœ… Ready to create Dockerfile based on recommendations.', policyValidation: { passed: false, violations: [ @@ -325,7 +331,8 @@ describe('natural-language-formatters', () => { const plan: DockerfilePlan = { nextAction: { action: 'create-files', - instruction: 'Create a new Dockerfile at ./Dockerfile using the base images and recommendations.', + instruction: + 'Create a new Dockerfile at ./Dockerfile using the base images and recommendations.', files: [ { path: './Dockerfile', @@ -366,72 +373,209 @@ describe('natural-language-formatters', () => { expect(narrative).toContain('✨ CREATE DOCKERFILE'); expect(narrative).toContain('node:18-alpine'); expect(narrative).not.toContain('Next Steps:'); - expect(narrative).not.toContain('Build image with build-image tool'); + expect(narrative).not.toContain('Build image with build-image-context tool'); }); }); - describe('formatBuildImageNarrative', () => { it('should format successful build with all details', () => { const result: BuildImageResult = { - success: true, - imageId: 'sha256:abc123def456', - requestedTags: ['myapp:latest', 'myapp:1.0.0', 'myapp:production'], - createdTags: ['myapp:latest', 'myapp:1.0.0', 'myapp:production'], - size: 245000000, - buildTime: 45000, - layers: 12, - logs: [], + summary: 'Build context ready for myapp with 3 tags', + context: { + buildContextPath: '/app', + dockerfilePath: '/app/Dockerfile', + dockerfileRelative: 'Dockerfile', + hasDockerignore: true, + }, + securityAnalysis: { + warnings: [ + { + id: 'ROOT_USER', + severity: 'medium', + message: 'Running as root user', + line: 15, + remediation: 'Add USER directive', + }, + ], + riskLevel: 'medium', + recommendations: ['Add non-root user'], + }, + buildConfig: { + finalTags: ['myapp:latest', 'myapp:1.0.0', 'myapp:production'], + buildArgs: {}, + platform: 'linux/amd64', + }, + buildKitAnalysis: { + features: { + cacheMount: false, + secretMount: false, + sshMount: false, + multiStage: true, + stageCount: 2, + copyFrom: true, + heredoc: false, + }, + recommended: true, + recommendations: ['Use BuildKit for multi-stage builds'], + }, + dockerfileAnalysis: { + baseImages: ['node:18-alpine'], + exposedPorts: [3000], + finalUser: undefined, + hasHealthcheck: true, + layerCount: 12, + }, + nextAction: { + action: 'execute-build', + preChecks: ['Verify Docker daemon is running'], + buildCommand: { + command: 'docker build -t myapp:latest -t myapp:1.0.0 -t myapp:production .', + parts: { + executable: 'docker', + subcommand: 'build', + flags: ['-t', 'myapp:latest'], + context: '.', + }, + environment: { DOCKER_BUILDKIT: '1' }, + }, + postBuildSteps: ['Scan for vulnerabilities'], + }, }; const narrative = formatBuildImageNarrative(result); - expect(narrative).toContain('βœ… Image Built Successfully'); - expect(narrative).toContain('**Image:** sha256:abc123def456'); - expect(narrative).toContain('**Tags Created:** myapp:latest, myapp:1.0.0, myapp:production'); - expect(narrative).toContain('**Size:** 234MB'); // 245000000 bytes = 234MB - expect(narrative).toContain('**Build Time:** 45s'); - expect(narrative).toContain('**Layers:** 12'); + expect(narrative).toContain('πŸ“¦ Build Context Ready'); + expect(narrative).toContain('**Tags:** myapp:latest, myapp:1.0.0, myapp:production'); + expect(narrative).toContain('**Platform:** linux/amd64'); + expect(narrative).toContain('Estimated Layers: 12'); expect(narrative).toContain('Next Steps:'); - expect(narrative).toContain('Scan image for vulnerabilities'); + expect(narrative).toContain('Scan built image for vulnerabilities'); }); it('should handle minimal build result', () => { const result: BuildImageResult = { - success: true, - imageId: 'sha256:minimal', - requestedTags: [], - createdTags: [], - size: 100000000, - buildTime: 30000, - logs: [], + summary: 'Minimal build context ready', + context: { + buildContextPath: '/app', + dockerfilePath: '/app/Dockerfile', + dockerfileRelative: 'Dockerfile', + hasDockerignore: false, + }, + securityAnalysis: { + warnings: [], + riskLevel: 'low', + recommendations: [], + }, + buildConfig: { + finalTags: [], + buildArgs: {}, + platform: 'linux/amd64', + }, + buildKitAnalysis: { + features: { + cacheMount: false, + secretMount: false, + sshMount: false, + multiStage: false, + stageCount: 1, + copyFrom: false, + heredoc: false, + }, + recommended: false, + recommendations: [], + }, + dockerfileAnalysis: { + baseImages: ['alpine:latest'], + exposedPorts: [], + hasHealthcheck: false, + layerCount: 3, + }, + nextAction: { + action: 'execute-build', + preChecks: [], + buildCommand: { + command: 'docker build .', + parts: { + executable: 'docker', + subcommand: 'build', + flags: [], + context: '.', + }, + environment: {}, + }, + postBuildSteps: [], + }, }; const narrative = formatBuildImageNarrative(result); - expect(narrative).toContain('βœ… Image Built Successfully'); - expect(narrative).toContain('**Image:** sha256:minimal'); - expect(narrative).not.toContain('**Tags Created:**'); - expect(narrative).not.toContain('**Layers:**'); + expect(narrative).toContain('πŸ“¦ Build Context Ready'); + expect(narrative).toContain('**Summary:** Minimal build context ready'); + expect(narrative).not.toContain('**Tags:**'); + expect(narrative).toContain('Estimated Layers: 3'); }); it('should omit next steps when chainHintsMode is disabled', () => { const result: BuildImageResult = { - success: true, - imageId: 'sha256:abc123def456', - tags: ['myapp:latest'], - size: 245000000, - buildTime: 45000, - layers: 12, - logs: [], + summary: 'Build context ready', + context: { + buildContextPath: '/app', + dockerfilePath: '/app/Dockerfile', + dockerfileRelative: 'Dockerfile', + hasDockerignore: true, + }, + securityAnalysis: { + warnings: [], + riskLevel: 'low', + recommendations: [], + }, + buildConfig: { + finalTags: ['myapp:latest'], + buildArgs: {}, + platform: 'linux/amd64', + }, + buildKitAnalysis: { + features: { + cacheMount: false, + secretMount: false, + sshMount: false, + multiStage: false, + stageCount: 1, + copyFrom: false, + heredoc: false, + }, + recommended: false, + recommendations: [], + }, + dockerfileAnalysis: { + baseImages: ['node:18-alpine'], + exposedPorts: [3000], + hasHealthcheck: false, + layerCount: 8, + }, + nextAction: { + action: 'execute-build', + preChecks: ['Verify Docker daemon'], + buildCommand: { + command: 'docker build -t myapp:latest .', + parts: { + executable: 'docker', + subcommand: 'build', + flags: ['-t', 'myapp:latest'], + context: '.', + }, + environment: {}, + }, + postBuildSteps: [], + }, }; const narrative = formatBuildImageNarrative(result, 'disabled'); - expect(narrative).toContain('βœ… Image Built Successfully'); - expect(narrative).toContain('**Image:** sha256:abc123def456'); + expect(narrative).toContain('πŸ“¦ Build Context Ready'); + expect(narrative).toContain('**Tags:** myapp:latest'); expect(narrative).not.toContain('Next Steps:'); - expect(narrative).not.toContain('Scan image for vulnerabilities'); + expect(narrative).not.toContain('Scan built image for vulnerabilities'); }); }); diff --git a/test/unit/mcp/mcp-server.test.ts b/test/unit/mcp/mcp-server.test.ts index d782d16a9..d0f255d17 100644 --- a/test/unit/mcp/mcp-server.test.ts +++ b/test/unit/mcp/mcp-server.test.ts @@ -146,7 +146,6 @@ describe('registerToolsWithServer', () => { }); }); - describe('formatOutput', () => { it('formats as JSON when format is JSON', () => { const input = { name: 'test', version: 1 }; @@ -295,25 +294,67 @@ describe('formatOutput', () => { expect(result).toContain('Next Steps:'); }); - it('detects and formats build-image results', () => { + it('detects and formats build-image-context results', () => { const buildResult = { - summary: 'βœ… Built image', - success: true, - imageId: 'sha256:abc123', - requestedTags: ['myapp:latest', 'myapp:1.0.0'], - createdTags: ['myapp:latest', 'myapp:1.0.0'], - size: 245000000, - buildTime: 45000, - logs: [], + summary: 'Build context ready for myapp', + context: { + buildContextPath: '/app', + dockerfilePath: '/app/Dockerfile', + dockerfileRelative: 'Dockerfile', + hasDockerignore: true, + }, + securityAnalysis: { + warnings: [], + riskLevel: 'low', + recommendations: [], + }, + buildConfig: { + finalTags: ['myapp:latest', 'myapp:1.0.0'], + buildArgs: {}, + platform: 'linux/amd64', + }, + buildKitAnalysis: { + features: { + cacheMount: false, + secretMount: false, + sshMount: false, + multiStage: false, + stageCount: 1, + copyFrom: false, + heredoc: false, + }, + recommended: false, + recommendations: [], + }, + dockerfileAnalysis: { + baseImages: ['node:18-alpine'], + exposedPorts: [3000], + hasHealthcheck: false, + layerCount: 8, + }, + nextAction: { + action: 'execute-build', + preChecks: ['Verify Docker daemon'], + buildCommand: { + command: 'docker build -t myapp:latest -t myapp:1.0.0 .', + parts: { + executable: 'docker', + subcommand: 'build', + flags: ['-t', 'myapp:latest'], + context: '.', + }, + environment: {}, + }, + postBuildSteps: [], + }, }; const result = formatOutput(buildResult, OUTPUTFORMAT.NATURAL_LANGUAGE); - expect(result).toContain('Image Built Successfully'); - expect(result).toContain('**Image:**'); - expect(result).toContain('**Tags Created:**'); - expect(result).toContain('**Size:**'); - expect(result).toContain('**Build Time:**'); + expect(result).toContain('Build Context Ready'); + expect(result).toContain('**Tags:**'); + expect(result).toContain('**Platform:**'); + expect(result).toContain('**Dockerfile Analysis:**'); expect(result).toContain('Next Steps:'); }); diff --git a/test/unit/sdk/index.test.ts b/test/unit/sdk/index.test.ts index 6027295c7..5557511ea 100644 --- a/test/unit/sdk/index.test.ts +++ b/test/unit/sdk/index.test.ts @@ -10,7 +10,7 @@ import { analyzeRepo, generateDockerfile, fixDockerfile, - buildImage, + buildImageContext, scanImage, tagImage, pushImage, @@ -39,8 +39,8 @@ describe('SDK Exports', () => { expect(typeof fixDockerfile).toBe('function'); }); - test('exports buildImage function', () => { - expect(typeof buildImage).toBe('function'); + test('exports buildImageContext function', () => { + expect(typeof buildImageContext).toBe('function'); }); test('exports scanImage function', () => { @@ -92,9 +92,9 @@ describe('SDK Exports', () => { expect(tools.fixDockerfile.name).toBe('fix-dockerfile'); }); - test('exports tools.buildImage', () => { - expect(tools.buildImage).toBeDefined(); - expect(tools.buildImage.name).toBe('build-image'); + test('exports tools.buildImageContext', () => { + expect(tools.buildImageContext).toBeDefined(); + expect(tools.buildImageContext.name).toBe('build-image-context'); }); test('exports tools.scanImage', () => { diff --git a/test/unit/sdk/schemas.test.ts b/test/unit/sdk/schemas.test.ts index be2b79148..56801e19f 100644 --- a/test/unit/sdk/schemas.test.ts +++ b/test/unit/sdk/schemas.test.ts @@ -14,15 +14,12 @@ describe('JSON Schema generation', () => { const ajv = new Ajv({ strict: false }); describe('schema validity', () => { - it.each(Object.entries(jsonSchemas))( - '%s schema is valid JSON Schema', - (name, schema) => { - // Compile should not throw for valid schemas - const validate = ajv.compile(schema); - expect(validate).toBeDefined(); - expect(typeof validate).toBe('function'); - }, - ); + it.each(Object.entries(jsonSchemas))('%s schema is valid JSON Schema', (name, schema) => { + // Compile should not throw for valid schemas + const validate = ajv.compile(schema); + expect(validate).toBeDefined(); + expect(typeof validate).toBe('function'); + }); }); describe('analyzeRepo schema', () => { @@ -49,19 +46,20 @@ describe('JSON Schema generation', () => { }); }); - describe('buildImage schema', () => { + describe('buildImageContext schema', () => { it('validates input with path and imageName', () => { - const validate = ajv.compile(jsonSchemas.buildImage); + const validate = ajv.compile(jsonSchemas.buildImageContext); const valid = validate({ path: '/path/to/app', imageName: 'myapp:latest', + platform: 'linux/amd64', }); expect(valid).toBe(true); expect(validate.errors).toBeNull(); }); it('validates input with optional fields', () => { - const validate = ajv.compile(jsonSchemas.buildImage); + const validate = ajv.compile(jsonSchemas.buildImageContext); const valid = validate({ path: '/path/to/app', imageName: 'myapp:latest', @@ -71,14 +69,14 @@ describe('JSON Schema generation', () => { expect(valid).toBe(true); }); - it('accepts path only (imageName is optional)', () => { - const validate = ajv.compile(jsonSchemas.buildImage); - const valid = validate({ path: '/path/to/app' }); + it('accepts path only with platform (imageName is optional)', () => { + const validate = ajv.compile(jsonSchemas.buildImageContext); + const valid = validate({ path: '/path/to/app', platform: 'linux/amd64' }); expect(valid).toBe(true); }); it('rejects wrong type for path', () => { - const validate = ajv.compile(jsonSchemas.buildImage); + const validate = ajv.compile(jsonSchemas.buildImageContext); const valid = validate({ path: 123 }); expect(valid).toBe(false); }); @@ -176,7 +174,7 @@ describe('JSON Schema generation', () => { 'analyzeRepo', 'generateDockerfile', 'fixDockerfile', - 'buildImage', + 'buildImageContext', 'scanImage', 'tagImage', 'pushImage', @@ -199,9 +197,9 @@ describe('JSON Schema generation', () => { }, ); - it('buildImage.buildArgs preserves additionalProperties for dynamic keys', () => { + it('buildImageContext.buildArgs preserves additionalProperties for dynamic keys', () => { // z.record() schemas should preserve additionalProperties - const buildArgsSchema = jsonSchemas.buildImage.properties?.buildArgs; + const buildArgsSchema = jsonSchemas.buildImageContext.properties?.buildArgs; expect(buildArgsSchema).toBeDefined(); expect(buildArgsSchema).toHaveProperty('additionalProperties'); expect(buildArgsSchema.additionalProperties).toEqual({ type: 'string' }); diff --git a/test/unit/sdk/vscode-integration.test.ts b/test/unit/sdk/vscode-integration.test.ts index 1a3e91fb9..e38520dab 100644 --- a/test/unit/sdk/vscode-integration.test.ts +++ b/test/unit/sdk/vscode-integration.test.ts @@ -47,7 +47,7 @@ describe('VS Code Extension Integration Exports', () => { 'analyzeRepo', 'generateDockerfile', 'fixDockerfile', - 'buildImage', + 'buildImageContext', 'scanImage', 'tagImage', 'pushImage', @@ -74,8 +74,8 @@ describe('VS Code Extension Integration Exports', () => { expect(properties).toHaveProperty('repositoryPath'); }); - it('buildImage schema has required properties', () => { - const schema = jsonSchemas.buildImage as Record; + it('buildImageContext schema has required properties', () => { + const schema = jsonSchemas.buildImageContext as Record; const properties = schema.properties as Record; expect(properties).toHaveProperty('path'); expect(properties).toHaveProperty('imageName'); @@ -92,7 +92,7 @@ describe('VS Code Extension Integration Exports', () => { 'analyzeRepo', 'generateDockerfile', 'fixDockerfile', - 'buildImage', + 'buildImageContext', 'scanImage', 'tagImage', 'pushImage', @@ -119,54 +119,55 @@ describe('VS Code Extension Integration Exports', () => { it('metadata has user-friendly display names', () => { expect(toolMetadata.analyzeRepo.displayName).toBe('Analyze Repository'); - expect(toolMetadata.buildImage.displayName).toBe('Build Docker Image'); + expect(toolMetadata.buildImageContext.displayName).toBe('Prepare Docker Build Context'); expect(toolMetadata.scanImage.displayName).toBe('Scan Docker Image'); }); it('metadata includes confirmation config', () => { - const meta = toolMetadata.buildImage; + const meta = toolMetadata.buildImageContext; expect(meta.confirmation).toBeDefined(); expect(meta.confirmation.title).toBeDefined(); expect(meta.confirmation.messageTemplate).toBeDefined(); - expect(meta.confirmation.isReadOnly).toBe(false); - expect(meta.confirmation.warning).toBeDefined(); + expect(meta.confirmation.isReadOnly).toBe(true); }); it('read-only operations are marked correctly', () => { expect(toolMetadata.analyzeRepo.confirmation.isReadOnly).toBe(true); expect(toolMetadata.scanImage.confirmation.isReadOnly).toBe(true); - expect(toolMetadata.buildImage.confirmation.isReadOnly).toBe(false); + expect(toolMetadata.buildImageContext.confirmation.isReadOnly).toBe(true); expect(toolMetadata.pushImage.confirmation.isReadOnly).toBe(false); }); it('metadata includes suggested next tools', () => { expect(toolMetadata.analyzeRepo.suggestedNextTools).toContain('generate_dockerfile'); - expect(toolMetadata.buildImage.suggestedNextTools).toContain('scan_image'); + expect(toolMetadata.buildImageContext.suggestedNextTools).toContain('scan_image'); }); it('metadata includes category', () => { expect(toolMetadata.analyzeRepo.category).toBe('analysis'); - expect(toolMetadata.buildImage.category).toBe('image'); + expect(toolMetadata.buildImageContext.category).toBe('image'); expect(toolMetadata.generateK8sManifests.category).toBe('kubernetes'); }); it('metadata includes external dependencies info', () => { expect(toolMetadata.analyzeRepo.requiresExternalDeps).toEqual([]); - // buildImage requires docker (structured format) - expect(toolMetadata.buildImage.requiresExternalDeps).toContainEqual( - expect.objectContaining({ id: 'docker' }) + // buildImageContext doesn't require external deps (context prep only) + expect(toolMetadata.buildImageContext.requiresExternalDeps).toEqual([]); + // scanImage requires docker + expect(toolMetadata.scanImage.requiresExternalDeps).toContainEqual( + expect.objectContaining({ id: 'docker' }), ); }); it('standardWorkflow has correct order', () => { expect(standardWorkflow[0]).toBe('analyzeRepo'); - expect(standardWorkflow).toContain('buildImage'); + expect(standardWorkflow).toContain('buildImageContext'); expect(standardWorkflow).toContain('generateK8sManifests'); expect(standardWorkflow.indexOf('analyzeRepo')).toBeLessThan( - standardWorkflow.indexOf('generateDockerfile') + standardWorkflow.indexOf('generateDockerfile'), ); - expect(standardWorkflow.indexOf('buildImage')).toBeLessThan( - standardWorkflow.indexOf('scanImage') + expect(standardWorkflow.indexOf('buildImageContext')).toBeLessThan( + standardWorkflow.indexOf('scanImage'), ); }); @@ -191,7 +192,7 @@ describe('VS Code Extension Integration Exports', () => { 'analyzeRepo', 'generateDockerfile', 'fixDockerfile', - 'buildImage', + 'buildImageContext', 'scanImage', 'tagImage', 'pushImage', @@ -208,7 +209,7 @@ describe('VS Code Extension Integration Exports', () => { it('formatters are functions', () => { expect(typeof resultFormatters.analyzeRepo).toBe('function'); - expect(typeof resultFormatters.buildImage).toBe('function'); + expect(typeof resultFormatters.buildImageContext).toBe('function'); expect(typeof resultFormatters.scanImage).toBe('function'); }); }); @@ -332,7 +333,9 @@ describe('VS Code Extension Integration Exports', () => { it('returns absolute Windows path unchanged', () => { // Only test on Windows - on Unix, Windows paths aren't recognized as absolute if (process.platform === 'win32') { - expect(resolveWorkspacePath('C:\\absolute\\path', '/workspace')).toBe('C:\\absolute\\path'); + expect(resolveWorkspacePath('C:\\absolute\\path', '/workspace')).toBe( + 'C:\\absolute\\path', + ); } }); @@ -425,7 +428,7 @@ describe('VS Code Extension Integration Exports', () => { 'analyzeRepo', 'generateDockerfile', 'fixDockerfile', - 'buildImage', + 'buildImageContext', 'scanImage', 'tagImage', 'pushImage', diff --git a/test/unit/telemetry-wrapper.test.ts b/test/unit/telemetry-wrapper.test.ts index 3a0090451..17a2f85cf 100644 --- a/test/unit/telemetry-wrapper.test.ts +++ b/test/unit/telemetry-wrapper.test.ts @@ -5,7 +5,7 @@ */ import { describe, it, expect, jest } from '@jest/globals'; -import { buildImageTool, analyzeRepoTool } from '../../src/tools/index'; +import { buildImageContextTool, analyzeRepoTool } from '../../src/tools/index'; import type { ToolContext } from '../../src/mcp/context'; // Mock tool context for testing @@ -27,24 +27,24 @@ describe('Telemetry Wrapper Pattern', () => { describe('Tool Interface Properties', () => { it('should expose all required properties for telemetry wrapping', () => { // Verify build-image tool has all required properties - expect(buildImageTool).toHaveProperty('name'); - expect(buildImageTool).toHaveProperty('description'); - expect(buildImageTool).toHaveProperty('inputSchema'); - expect(buildImageTool).toHaveProperty('parse'); - expect(buildImageTool).toHaveProperty('handler'); - expect(buildImageTool).toHaveProperty('schema'); - expect(buildImageTool).toHaveProperty('metadata'); + expect(buildImageContextTool).toHaveProperty('name'); + expect(buildImageContextTool).toHaveProperty('description'); + expect(buildImageContextTool).toHaveProperty('inputSchema'); + expect(buildImageContextTool).toHaveProperty('parse'); + expect(buildImageContextTool).toHaveProperty('handler'); + expect(buildImageContextTool).toHaveProperty('schema'); + expect(buildImageContextTool).toHaveProperty('metadata'); // Verify property types - expect(typeof buildImageTool.name).toBe('string'); - expect(typeof buildImageTool.description).toBe('string'); - expect(typeof buildImageTool.inputSchema).toBe('object'); - expect(typeof buildImageTool.parse).toBe('function'); - expect(typeof buildImageTool.handler).toBe('function'); + expect(typeof buildImageContextTool.name).toBe('string'); + expect(typeof buildImageContextTool.description).toBe('string'); + expect(typeof buildImageContextTool.inputSchema).toBe('object'); + expect(typeof buildImageContextTool.parse).toBe('function'); + expect(typeof buildImageContextTool.handler).toBe('function'); }); it('should expose properties for all tools', () => { - const tools = [buildImageTool, analyzeRepoTool]; + const tools = [buildImageContextTool, analyzeRepoTool]; for (const tool of tools) { expect(tool).toHaveProperty('name'); @@ -63,18 +63,18 @@ describe('Telemetry Wrapper Pattern', () => { describe('inputSchema Property', () => { it('should expose ZodRawShape for MCP SDK registration', () => { - expect(buildImageTool.inputSchema).toBeDefined(); - expect(typeof buildImageTool.inputSchema).toBe('object'); + expect(buildImageContextTool.inputSchema).toBeDefined(); + expect(typeof buildImageContextTool.inputSchema).toBe('object'); // inputSchema should have the shape properties - expect(buildImageTool.inputSchema).toHaveProperty('path'); + expect(buildImageContextTool.inputSchema).toHaveProperty('path'); }); it('should be directly usable with MCP server.tool()', () => { // This simulates how App Mod team will use it - const { name, description, inputSchema } = buildImageTool; + const { name, description, inputSchema } = buildImageContextTool; - expect(name).toBe('build-image'); + expect(name).toBe('build-image-context'); expect(typeof description).toBe('string'); expect(typeof inputSchema).toBe('object'); expect(inputSchema).toHaveProperty('path'); @@ -89,7 +89,7 @@ describe('Telemetry Wrapper Pattern', () => { buildArgs: { NODE_ENV: 'production' }, }; - const typedInput = buildImageTool.parse(validParams); + const typedInput = buildImageContextTool.parse(validParams); expect(typedInput).toMatchObject({ path: '/app', @@ -105,7 +105,7 @@ describe('Telemetry Wrapper Pattern', () => { }; expect(() => { - buildImageTool.parse(invalidParams); + buildImageContextTool.parse(invalidParams); }).toThrow(); // Zod will throw ZodError }); @@ -116,20 +116,19 @@ describe('Telemetry Wrapper Pattern', () => { // Should not throw - parse should handle optional params expect(() => { - buildImageTool.parse(minimalParams); + buildImageContextTool.parse(minimalParams); }).not.toThrow(); }); it('should throw on invalid input types', () => { expect(() => { - buildImageTool.parse({ path: 123 }); // path should be string + buildImageContextTool.parse({ path: 123 }); // path should be string }).toThrow(); expect(() => { - buildImageTool.parse({ imageName: ['not', 'a', 'string'] }); // imageName should be string + buildImageContextTool.parse({ imageName: ['not', 'a', 'string'] }); // imageName should be string }).toThrow(); }); - }); describe('handler Method', () => { @@ -144,11 +143,11 @@ describe('Telemetry Wrapper Pattern', () => { imageName: 'test:v1', }; - const typedInput = buildImageTool.parse(validParams); + const typedInput = buildImageContextTool.parse(validParams); // Handler should accept the typed input // We expect it to fail due to missing Dockerfile, but that's ok - const result = await buildImageTool.handler(typedInput, mockContext); + const result = await buildImageContextTool.handler(typedInput, mockContext); expect(result).toBeDefined(); expect(result).toHaveProperty('ok'); @@ -166,21 +165,21 @@ describe('Telemetry Wrapper Pattern', () => { try { // Step 1: Parse to strongly-typed input (uses Zod validation) - const typedInput = buildImageTool.parse(args); + const typedInput = buildImageContextTool.parse(args); // Step 2: Record telemetry with typed input properties telemetryData.push({ - toolName: buildImageTool.name, + toolName: buildImageContextTool.name, parameters: typedInput, timestamp: startTime, }); // Step 3: Execute tool handler with typed input - const result = await buildImageTool.handler(typedInput, mockContext); + const result = await buildImageContextTool.handler(typedInput, mockContext); // Step 4: Record result metrics telemetryData.push({ - toolName: buildImageTool.name, + toolName: buildImageContextTool.name, success: result.ok, duration: Date.now() - startTime, }); @@ -188,7 +187,7 @@ describe('Telemetry Wrapper Pattern', () => { return result; } catch (error) { telemetryData.push({ - toolName: buildImageTool.name, + toolName: buildImageContextTool.name, error: true, duration: Date.now() - startTime, }); @@ -206,7 +205,7 @@ describe('Telemetry Wrapper Pattern', () => { // Verify telemetry was recorded expect(telemetryData.length).toBeGreaterThanOrEqual(2); - expect(telemetryData[0]).toHaveProperty('toolName', 'build-image'); + expect(telemetryData[0]).toHaveProperty('toolName', 'build-image-context'); expect(telemetryData[0]).toHaveProperty('parameters'); expect(telemetryData[1]).toHaveProperty('success'); expect(telemetryData[1]).toHaveProperty('duration'); @@ -221,7 +220,7 @@ describe('Telemetry Wrapper Pattern', () => { }; // Parse returns typed input - const typedInput = buildImageTool.parse(params); + const typedInput = buildImageContextTool.parse(params); // TypeScript should infer the correct type for typedInput expect(typedInput).toHaveProperty('path'); @@ -242,7 +241,7 @@ describe('Telemetry Wrapper Pattern', () => { platform: 'linux/amd64', }; - const typedInput = buildImageTool.parse(params); + const typedInput = buildImageContextTool.parse(params); // Extract telemetry properties (what App Mod team will do) const telemetryProps = { @@ -268,11 +267,11 @@ describe('Telemetry Wrapper Pattern', () => { const trackErrors = async (args: any) => { try { - const typedInput = buildImageTool.parse(args); - return await buildImageTool.handler(typedInput, mockContext); + const typedInput = buildImageContextTool.parse(args); + return await buildImageContextTool.handler(typedInput, mockContext); } catch (error) { errorLog.push({ - tool: buildImageTool.name, + tool: buildImageContextTool.name, error: error instanceof Error ? error.message : String(error), args, }); @@ -288,27 +287,27 @@ describe('Telemetry Wrapper Pattern', () => { } expect(errorLog.length).toBeGreaterThan(0); - expect(errorLog[0]).toHaveProperty('tool', 'build-image'); + expect(errorLog[0]).toHaveProperty('tool', 'build-image-context'); expect(errorLog[0]).toHaveProperty('error'); }); }); describe('Metadata Property', () => { it('should expose metadata for all tools', () => { - expect(buildImageTool.metadata).toBeDefined(); - expect(buildImageTool.metadata).toHaveProperty('knowledgeEnhanced'); - expect(typeof buildImageTool.metadata.knowledgeEnhanced).toBe('boolean'); + expect(buildImageContextTool.metadata).toBeDefined(); + expect(buildImageContextTool.metadata).toHaveProperty('knowledgeEnhanced'); + expect(typeof buildImageContextTool.metadata.knowledgeEnhanced).toBe('boolean'); }); it('should allow telemetry to track tool capabilities', () => { // Telemetry can use metadata to categorize tools const toolCapabilities = { - name: buildImageTool.name, - knowledgeEnhanced: buildImageTool.metadata.knowledgeEnhanced, + name: buildImageContextTool.name, + knowledgeEnhanced: buildImageContextTool.metadata.knowledgeEnhanced, }; expect(toolCapabilities).toMatchObject({ - name: 'build-image', + name: 'build-image-context', knowledgeEnhanced: false, }); }); diff --git a/test/unit/tools/build-image-context.test.ts b/test/unit/tools/build-image-context.test.ts new file mode 100644 index 000000000..03eff1706 --- /dev/null +++ b/test/unit/tools/build-image-context.test.ts @@ -0,0 +1,658 @@ +/** + * Unit Tests: Build Image Context Tool + * Tests the build-image-context tool - context preparation and security analysis + */ + +import { jest } from '@jest/globals'; +import { promises as fs } from 'node:fs'; +import { createMockValidatePath } from '../../__support__/utilities/mocks'; + +function createMockLogger() { + return { + info: jest.fn(), + warn: jest.fn(), + error: jest.fn(), + debug: jest.fn(), + trace: jest.fn(), + fatal: jest.fn(), + child: jest.fn().mockReturnThis(), + } as any; +} + +// Mock the validation library to bypass path validation in tests +jest.mock('../../../src/lib/validation', () => ({ + validatePath: createMockValidatePath(), + validateImageName: jest.fn().mockImplementation((name: string) => ({ ok: true, value: name })), + validateK8sName: jest.fn().mockImplementation((name: string) => ({ ok: true, value: name })), + validateNamespace: jest.fn().mockImplementation((ns: string) => ({ ok: true, value: ns })), +})); + +// Mock validation-helpers +jest.mock('../../../src/lib/validation-helpers', () => ({ + validatePathOrFail: jest.fn().mockImplementation(async (...args: any[]) => { + const { validatePath } = require('../../../src/lib/validation'); + return validatePath(...args); + }), + parseImageName: jest.fn().mockImplementation((imageName: string) => { + const colonIndex = imageName.lastIndexOf(':'); + if (colonIndex > 0 && !imageName.substring(colonIndex + 1).includes('/')) { + const imagePath = imageName.substring(0, colonIndex); + const tag = imageName.substring(colonIndex + 1); + return { + ok: true, + value: { + repository: imagePath, + tag: tag || 'latest', + }, + }; + } + return { + ok: true, + value: { + repository: imageName, + tag: 'latest', + }, + }; + }), +})); + +// Mock filesystem +jest.mock('node:fs', () => ({ + promises: { + access: jest.fn(), + readFile: jest.fn(), + writeFile: jest.fn(), + stat: jest.fn(), + constants: { + R_OK: 4, + W_OK: 2, + X_OK: 1, + F_OK: 0, + }, + }, + constants: { + R_OK: 4, + W_OK: 2, + X_OK: 1, + F_OK: 0, + }, +})); + +jest.mock('../../../src/lib/logger', () => ({ + createTimer: jest.fn(() => ({ + end: jest.fn(), + error: jest.fn(), + })), + createLogger: jest.fn(() => createMockLogger()), +})); + +function createMockToolContext() { + return { + logger: createMockLogger(), + } as any; +} + +// Import after mocks +import { buildImageContext } from '../../../src/tools/build-image-context/tool'; +import type { BuildImageParams } from '../../../src/tools/build-image-context/schema'; + +const mockFs = fs as jest.Mocked; + +describe('buildImageContext', () => { + let config: BuildImageParams; + + const mockDockerfile = `FROM node:18-alpine +WORKDIR /app +COPY package*.json ./ +RUN npm ci --only=production +COPY . . +EXPOSE 3000 +USER appuser +CMD ["node", "index.js"]`; + + beforeEach(() => { + config = { + path: '/test/repo', + dockerfile: 'Dockerfile', + imageName: 'test-app', + tags: ['latest', 'v1.0'], + buildArgs: {}, + }; + + jest.clearAllMocks(); + + // Default mock implementations + mockFs.access.mockResolvedValue(undefined); + mockFs.stat.mockResolvedValue({ isFile: () => true, isDirectory: () => false } as any); + mockFs.readFile.mockResolvedValue(mockDockerfile); + }); + + describe('Context Preparation', () => { + it('should return build context with validated paths', async () => { + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.context).toBeDefined(); + expect(result.value.context.buildContextPath).toContain('/test/repo'); + expect(result.value.context.dockerfilePath).toContain('Dockerfile'); + expect(result.value.context.dockerfileRelative).toBe('Dockerfile'); + } + }); + + it('should detect .dockerignore presence', async () => { + // Mock .dockerignore exists + mockFs.access.mockResolvedValue(undefined); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.context.hasDockerignore).toBeDefined(); + } + }); + + it('should compute final tags from imageName and tags', async () => { + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.buildConfig.finalTags).toContain('test-app:latest'); + expect(result.value.buildConfig.finalTags).toContain('test-app:v1.0'); + } + }); + + it('should handle full tag references in tags array', async () => { + config.tags = ['registry.io/myapp:prod']; + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.buildConfig.finalTags).toContain('registry.io/myapp:prod'); + } + }); + + it('should use default tag when no tags provided', async () => { + config.tags = []; + config.imageName = 'myapp'; + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.buildConfig.finalTags).toContain('myapp:latest'); + } + }); + }); + + describe('Security Analysis', () => { + it('should detect secrets in build args', async () => { + config.buildArgs = { + API_PASSWORD: 'secret123', + DB_TOKEN: 'token456', + }; + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + const warnings = result.value.securityAnalysis.warnings; + expect(warnings.some((w) => w.id === 'secret-in-build-arg')).toBe(true); + expect(warnings.some((w) => w.message.includes('API_PASSWORD'))).toBe(true); + } + }); + + it('should detect sudo usage in Dockerfile', async () => { + const dockerfileWithSudo = `FROM ubuntu:20.04 +RUN sudo apt-get update +USER appuser`; + + mockFs.readFile.mockResolvedValue(dockerfileWithSudo); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + const warnings = result.value.securityAnalysis.warnings; + expect(warnings.some((w) => w.id === 'sudo-usage')).toBe(true); + } + }); + + it('should detect unpinned base images', async () => { + const dockerfileWithLatest = `FROM node:latest +WORKDIR /app +USER appuser`; + + mockFs.readFile.mockResolvedValue(dockerfileWithLatest); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + const warnings = result.value.securityAnalysis.warnings; + expect(warnings.some((w) => w.id === 'unpinned-base-image')).toBe(true); + } + }); + + it('should detect missing USER instruction', async () => { + const dockerfileWithoutUser = `FROM node:18-alpine +WORKDIR /app +CMD ["node", "index.js"]`; + + mockFs.readFile.mockResolvedValue(dockerfileWithoutUser); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + const warnings = result.value.securityAnalysis.warnings; + expect(warnings.some((w) => w.id === 'runs-as-root')).toBe(true); + } + }); + + it('should detect root user directive', async () => { + const dockerfileWithRoot = `FROM node:18-alpine +USER root +CMD ["node", "index.js"]`; + + mockFs.readFile.mockResolvedValue(dockerfileWithRoot); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + const warnings = result.value.securityAnalysis.warnings; + expect(warnings.some((w) => w.id === 'runs-as-root')).toBe(true); + } + }); + + it('should detect chmod 777', async () => { + const dockerfileWithChmod = `FROM node:18-alpine +RUN chmod 777 /app +USER appuser`; + + mockFs.readFile.mockResolvedValue(dockerfileWithChmod); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + const warnings = result.value.securityAnalysis.warnings; + expect(warnings.some((w) => w.id === 'overly-permissive-chmod')).toBe(true); + } + }); + + it('should compute correct risk level', async () => { + // High risk: multiple high severity warnings + const highRiskDockerfile = `FROM node +RUN sudo apt-get update +USER root`; + + mockFs.readFile.mockResolvedValue(highRiskDockerfile); + config.buildArgs = { SECRET_KEY: 'abc123' }; + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(['medium', 'high']).toContain(result.value.securityAnalysis.riskLevel); + } + }); + + it('should provide remediation for each warning', async () => { + config.buildArgs = { API_TOKEN: 'secret' }; + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + const warnings = result.value.securityAnalysis.warnings; + warnings.forEach((w) => { + expect(w.remediation).toBeDefined(); + expect(w.remediation.length).toBeGreaterThan(0); + }); + } + }); + }); + + describe('BuildKit Analysis', () => { + it('should detect multi-stage builds', async () => { + const multiStageDockerfile = `FROM node:18-alpine AS builder +WORKDIR /app +RUN npm ci +FROM node:18-alpine +COPY --from=builder /app/dist ./dist +USER appuser`; + + mockFs.readFile.mockResolvedValue(multiStageDockerfile); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.buildKitAnalysis.features.multiStage).toBe(true); + expect(result.value.buildKitAnalysis.features.stageCount).toBe(2); + expect(result.value.buildKitAnalysis.features.copyFrom).toBe(true); + } + }); + + it('should detect cache mount usage', async () => { + const dockerfileWithCache = `FROM node:18-alpine +RUN --mount=type=cache,target=/root/.npm npm ci +USER appuser`; + + mockFs.readFile.mockResolvedValue(dockerfileWithCache); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.buildKitAnalysis.features.cacheMount).toBe(true); + expect(result.value.buildKitAnalysis.recommended).toBe(true); + } + }); + + it('should detect secret mount usage', async () => { + const dockerfileWithSecret = `FROM node:18-alpine +RUN --mount=type=secret,id=npmrc npm ci +USER appuser`; + + mockFs.readFile.mockResolvedValue(dockerfileWithSecret); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.buildKitAnalysis.features.secretMount).toBe(true); + } + }); + + it('should recommend BuildKit when features are used', async () => { + const dockerfileWithBuildKit = `FROM node:18-alpine AS builder +RUN --mount=type=cache,target=/root/.npm npm ci +FROM node:18-alpine +USER appuser`; + + mockFs.readFile.mockResolvedValue(dockerfileWithBuildKit); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.buildKitAnalysis.recommended).toBe(true); + } + }); + + it('should suggest cache mounts for npm', async () => { + const dockerfileWithNpm = `FROM node:18-alpine +RUN npm install +USER appuser`; + + mockFs.readFile.mockResolvedValue(dockerfileWithNpm); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + const recommendations = result.value.buildKitAnalysis.recommendations; + expect(recommendations.some((r) => r.includes('npm') && r.includes('cache'))).toBe(true); + } + }); + }); + + describe('Dockerfile Analysis', () => { + it('should extract base images', async () => { + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.dockerfileAnalysis.baseImages).toContain('node:18-alpine'); + } + }); + + it('should extract exposed ports', async () => { + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.dockerfileAnalysis.exposedPorts).toContain(3000); + } + }); + + it('should detect final USER', async () => { + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.dockerfileAnalysis.finalUser).toBe('appuser'); + } + }); + + it('should detect HEALTHCHECK', async () => { + const dockerfileWithHealthcheck = `FROM node:18-alpine +HEALTHCHECK CMD curl -f http://localhost:3000/ || exit 1 +USER appuser`; + + mockFs.readFile.mockResolvedValue(dockerfileWithHealthcheck); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.dockerfileAnalysis.hasHealthcheck).toBe(true); + } + }); + + it('should estimate layer count', async () => { + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + // Our mock Dockerfile has: 2 COPY, 1 RUN = 3 layers + expect(result.value.dockerfileAnalysis.layerCount).toBeGreaterThan(0); + } + }); + }); + + describe('Build Command Generation', () => { + it('should generate valid docker build command', async () => { + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + const cmd = result.value.nextAction.buildCommand; + expect(cmd.command).toContain('docker build'); + expect(cmd.command).toContain('-t test-app:latest'); + expect(cmd.parts.executable).toBe('docker'); + expect(cmd.parts.subcommand).toBe('build'); + } + }); + + it('should include build args in command', async () => { + config.buildArgs = { NODE_ENV: 'production' }; + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + const cmd = result.value.nextAction.buildCommand; + expect(cmd.command).toContain('--build-arg NODE_ENV=production'); + } + }); + + it('should include platform in command', async () => { + config.platform = 'linux/arm64'; + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + const cmd = result.value.nextAction.buildCommand; + expect(cmd.command).toContain('--platform linux/arm64'); + } + }); + + it('should set DOCKER_BUILDKIT=1 when BuildKit features detected', async () => { + const dockerfileWithBuildKit = `FROM node:18-alpine +RUN --mount=type=cache,target=/root/.npm npm ci +USER appuser`; + + mockFs.readFile.mockResolvedValue(dockerfileWithBuildKit); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + const cmd = result.value.nextAction.buildCommand; + expect(cmd.environment.DOCKER_BUILDKIT).toBe('1'); + } + }); + + it('should include fallback command when BuildKit is used', async () => { + const dockerfileWithBuildKit = `FROM node:18-alpine AS builder +RUN npm ci +FROM node:18-alpine +USER appuser`; + + mockFs.readFile.mockResolvedValue(dockerfileWithBuildKit); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.nextAction.fallbackCommand).toBeDefined(); + } + }); + }); + + describe('Next Action Instructions', () => { + it('should include pre-checks', async () => { + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.nextAction.preChecks.length).toBeGreaterThan(0); + expect(result.value.nextAction.preChecks.some((c) => c.includes('docker'))).toBe(true); + } + }); + + it('should include post-build steps', async () => { + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.nextAction.postBuildSteps.length).toBeGreaterThan(0); + } + }); + + it('should suggest HEALTHCHECK when missing', async () => { + const dockerfileWithoutHealthcheck = `FROM node:18-alpine +USER appuser +CMD ["node", "index.js"]`; + + mockFs.readFile.mockResolvedValue(dockerfileWithoutHealthcheck); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + const postSteps = result.value.nextAction.postBuildSteps; + expect(postSteps.some((s) => s.includes('HEALTHCHECK'))).toBe(true); + } + }); + }); + + describe('Build Args Processing', () => { + it('should merge user args with defaults', async () => { + config.buildArgs = { CUSTOM_ARG: 'value' }; + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + const buildArgs = result.value.buildConfig.buildArgs; + expect(buildArgs.CUSTOM_ARG).toBe('value'); + expect(buildArgs.NODE_ENV).toBeDefined(); + expect(buildArgs.BUILD_DATE).toBeDefined(); + expect(buildArgs.VCS_REF).toBeDefined(); + } + }); + + it('should allow user args to override defaults', async () => { + config.buildArgs = { NODE_ENV: 'development' }; + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.buildConfig.buildArgs.NODE_ENV).toBe('development'); + } + }); + }); + + describe('Error Handling', () => { + it('should fail with invalid parameters', async () => { + const result = await buildImageContext(null as any, createMockToolContext()); + + expect(result.ok).toBe(false); + if (!result.ok) { + expect(result.error).toContain('Invalid parameters'); + } + }); + + it('should fail when Dockerfile does not exist', async () => { + mockFs.access.mockRejectedValue(new Error('ENOENT')); + mockFs.stat.mockRejectedValue(new Error('ENOENT')); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(false); + }); + + it('should fail when Dockerfile is not readable', async () => { + mockFs.readFile.mockRejectedValue(new Error('Permission denied')); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(false); + }); + }); + + describe('Summary Generation', () => { + it('should include tag info in summary', async () => { + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.summary).toContain('test-app'); + } + }); + + it('should include security status in summary', async () => { + config.buildArgs = { API_SECRET: 'token' }; + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.summary).toMatch(/security|warning/i); + } + }); + + it('should mention BuildKit when recommended', async () => { + const dockerfileWithBuildKit = `FROM node:18-alpine +RUN --mount=type=cache,target=/root/.npm npm ci +USER appuser`; + + mockFs.readFile.mockResolvedValue(dockerfileWithBuildKit); + + const result = await buildImageContext(config, createMockToolContext()); + + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value.summary).toContain('BuildKit'); + } + }); + }); +}); diff --git a/test/unit/tools/build-image.test.ts b/test/unit/tools/build-image.test.ts deleted file mode 100644 index 8b26b83be..000000000 --- a/test/unit/tools/build-image.test.ts +++ /dev/null @@ -1,896 +0,0 @@ -/** - * Unit Tests: Build Image Tool - * Tests the build-image tool functionality with mock Docker client and filesystem - */ - -import { jest } from '@jest/globals'; -import { promises as fs } from 'node:fs'; -import { createMockValidatePath } from '../../__support__/utilities/mocks'; -import type { ErrorGuidance } from '../../../src/types'; - -// Result Type Helpers for Testing -function createSuccessResult(value: T) { - return { - ok: true as const, - value, - }; -} - -function createFailureResult(error: string) { - return { - ok: false as const, - error, - }; -} - -function createMockLogger() { - return { - info: jest.fn(), - warn: jest.fn(), - error: jest.fn(), - debug: jest.fn(), - trace: jest.fn(), - fatal: jest.fn(), - child: jest.fn().mockReturnThis(), - } as any; -} - -// Mock the validation library to bypass path validation in tests -jest.mock('../../../src/lib/validation', () => ({ - validatePath: createMockValidatePath(), - validateImageName: jest.fn().mockImplementation((name: string) => ({ ok: true, value: name })), - validateK8sName: jest.fn().mockImplementation((name: string) => ({ ok: true, value: name })), - validateNamespace: jest.fn().mockImplementation((ns: string) => ({ ok: true, value: ns })), -})); - -// Mock validation-helpers to use the mocked validation -jest.mock('../../../src/lib/validation-helpers', () => ({ - validatePathOrFail: jest.fn().mockImplementation(async (...args: any[]) => { - const { validatePath } = require('../../../src/lib/validation'); - return validatePath(...args); - }), - parseImageName: jest.fn().mockImplementation((imageName: string) => { - // Simple mock that handles basic image name parsing - const colonIndex = imageName.lastIndexOf(':'); - if (colonIndex > 0 && !imageName.substring(colonIndex + 1).includes('/')) { - const imagePath = imageName.substring(0, colonIndex); - const tag = imageName.substring(colonIndex + 1); - const parts = imagePath.split('/'); - const hasRegistry = - parts.length > 1 && - /^([a-zA-Z0-9.-]+\.[a-zA-Z]{2,}|localhost|(\d{1,3}\.){3}\d{1,3})(:\d+)?$/.test(parts[0]); - - return { - ok: true, - value: { - registry: hasRegistry ? parts[0] : undefined, - repository: hasRegistry ? parts.slice(1).join('/') : imagePath, - tag: tag || 'latest', - }, - }; - } - - const parts = imageName.split('/'); - const hasRegistry = parts.length > 1 && parts[0]?.includes('.'); - - return { - ok: true, - value: { - registry: hasRegistry ? parts[0] : undefined, - repository: hasRegistry ? parts.slice(1).join('/') : imageName, - tag: 'latest', - }, - }; - }), -})); - -// Mock filesystem functions with proper structure -jest.mock('node:fs', () => ({ - promises: { - access: jest.fn(), - readFile: jest.fn(), - writeFile: jest.fn(), - stat: jest.fn(), - constants: { - R_OK: 4, - W_OK: 2, - X_OK: 1, - F_OK: 0, - }, - }, - constants: { - R_OK: 4, - W_OK: 2, - X_OK: 1, - F_OK: 0, - }, -})); - -// Mock lib modules -const mockDockerClient = { - ping: jest.fn() as jest.MockedFunction< - () => Promise<{ - ok: boolean; - value?: any; - error?: string; - }> - >, - buildImage: jest.fn() as jest.MockedFunction< - (options: any) => Promise<{ - ok: boolean; - value?: any; - error?: string; - guidance?: ErrorGuidance; - }> - >, - tagImage: jest.fn() as jest.MockedFunction< - ( - imageId: string, - repo: string, - tag: string, - ) => Promise<{ - ok: boolean; - value?: void; - error?: string; - }> - >, -}; - -jest.mock('../../../src/infra/docker/client', () => ({ - createDockerClient: jest.fn(() => mockDockerClient), -})); - -jest.mock('../../../src/lib/logger', () => ({ - createTimer: jest.fn(() => ({ - end: jest.fn(), - error: jest.fn(), - })), - createLogger: jest.fn(() => createMockLogger()), -})); - -function createMockToolContext() { - return { - logger: createMockLogger(), - } as any; -} - -// Import these after mocks are set up -import { buildImage } from '../../../src/tools/build-image/tool'; -import type { BuildImageParams as BuildImageConfig } from '../../../src/tools/build-image/schema'; - -const mockFs = fs as jest.Mocked; - -describe('buildImage', () => { - let mockLogger: ReturnType; - let config: BuildImageConfig; - - const mockDockerfile = `FROM node:18-alpine -WORKDIR /app -COPY package*.json ./ -RUN npm ci --only=production -COPY . . -EXPOSE 3000 -USER appuser -CMD ["node", "index.js"]`; - - beforeEach(() => { - mockLogger = createMockLogger(); - config = { - path: '/test/repo', - dockerfile: 'Dockerfile', - imageName: 'test-app:latest', - tags: ['myapp:latest', 'myapp:v1.0'], - buildArgs: {}, - }; - - // Reset all mocks - jest.clearAllMocks(); - - // Default mock implementations - mockFs.access.mockResolvedValue(undefined); - mockFs.stat.mockResolvedValue({ isFile: () => true, isDirectory: () => false } as any); - mockFs.readFile.mockResolvedValue(mockDockerfile); - mockFs.writeFile.mockResolvedValue(undefined); - - mockDockerClient.ping.mockResolvedValue(createSuccessResult(undefined)); - - // Default successful Docker build - mockDockerClient.buildImage.mockResolvedValue( - createSuccessResult({ - imageId: 'sha256:mock-image-id', - digest: 'sha256:abcdef1234567890', - tags: ['myapp:latest', 'myapp:v1.0'], - size: 123456789, - layers: 8, - buildTime: 5000, - logs: ['Step 1/8 : FROM node:18-alpine', 'Successfully built mock-image-id'], - warnings: [], - }), - ); - mockDockerClient.tagImage.mockResolvedValue(createSuccessResult(undefined)); - }); - - describe('Successful Build', () => { - it('should successfully build Docker image with default settings', async () => { - const mockContext = createMockToolContext(); - const result = await buildImage(config, mockContext); - - if (!result.ok) { - console.error('Build failed:', result.error); - } - expect(result.ok).toBe(true); - if (result.ok) { - expect(result.value.success).toBe(true); - expect(result.value.imageId).toBe('sha256:mock-image-id'); - expect(result.value.createdTags).toEqual(['myapp:latest', 'myapp:v1.0']); - expect(result.value.size).toBe(123456789); - expect(result.value.layers).toBe(8); - expect(result.value.logs).toContain('Successfully built mock-image-id'); - expect(result.value.buildTime).toBeGreaterThanOrEqual(0); - } - }); - - it('should pass build arguments to Docker client', async () => { - config.buildArgs = { - NODE_ENV: 'development', - API_URL: 'https://api.example.com', - }; - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(true); - expect(mockDockerClient.buildImage).toHaveBeenCalledWith( - expect.objectContaining({ - buildargs: expect.objectContaining({ - NODE_ENV: 'development', - API_URL: 'https://api.example.com', - BUILD_DATE: expect.any(String), - VCS_REF: expect.any(String), - }), - }), - ); - }); - - it('should include default build arguments', async () => { - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(true); - expect(mockDockerClient.buildImage).toHaveBeenCalledWith( - expect.objectContaining({ - buildargs: expect.objectContaining({ - NODE_ENV: expect.any(String), - BUILD_DATE: expect.any(String), - VCS_REF: expect.any(String), - }), - }), - ); - }); - - it('should verify build result structure', async () => { - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(true); - expect(result.ok).toBe(true); - if (result.ok) { - expect(result.value).toHaveProperty('imageId'); - expect(result.value).toHaveProperty('requestedTags'); - expect(result.value).toHaveProperty('createdTags'); - } - }); - it('should include build logs in result', async () => { - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(true); - if (result.ok) { - // Verify logs are included - expect(result.value).toHaveProperty('logs'); - expect(Array.isArray(result.value.logs)).toBe(true); - expect(result.value.logs.length).toBeGreaterThan(0); - - // Verify logs contain expected content from mock - expect(result.value.logs).toContain('Step 1/8 : FROM node:18-alpine'); - expect(result.value.logs).toContain('Successfully built mock-image-id'); - } - }); - - it('should apply multiple tags to built image', async () => { - const configWithMultipleTags = { - ...config, - tags: ['myapp:latest', 'myapp:v1.0.0', 'registry.io/myapp:prod'], - }; - - const result = await buildImage(configWithMultipleTags, createMockToolContext()); - - expect(result.ok).toBe(true); - if (result.ok) { - // First tag is applied during build - expect(mockDockerClient.buildImage).toHaveBeenCalledWith( - expect.objectContaining({ - t: 'myapp:latest', - }), - ); - - // Additional tags are applied via tagImage - expect(mockDockerClient.tagImage).toHaveBeenCalledTimes(2); - expect(mockDockerClient.tagImage).toHaveBeenCalledWith( - 'sha256:mock-image-id', - 'myapp', - 'v1.0.0', - ); - expect(mockDockerClient.tagImage).toHaveBeenCalledWith( - 'sha256:mock-image-id', - 'registry.io/myapp', - 'prod', - ); - - // Result includes all requested tags - expect(result.value.createdTags).toEqual([ - 'myapp:latest', - 'myapp:v1.0.0', - 'registry.io/myapp:prod', - ]); - } - }); - }); - - describe('Dockerfile Resolution', () => { - it('should fail when Dockerfile does not exist', async () => { - // Mock access to simulate file doesn't exist (for validation) - mockFs.access.mockRejectedValue(new Error('ENOENT: no such file or directory')); - mockFs.stat.mockRejectedValue(new Error('ENOENT: no such file or directory')); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toContain('does not exist'); - } - }); - - it('should use dockerfilePath when provided', async () => { - const customConfig = { - ...config, - dockerfilePath: 'custom/Dockerfile', - }; - - mockFs.readFile.mockResolvedValue(mockDockerfile); - - const result = await buildImage(customConfig, createMockToolContext()); - - expect(result.ok).toBe(true); - expect(mockFs.readFile).toHaveBeenCalledWith('/test/repo/custom/Dockerfile', 'utf-8'); - }); - }); - - describe('Security Analysis', () => { - it('should detect security warnings in build args', async () => { - config.buildArgs = { - API_PASSWORD: 'secret123', - DB_TOKEN: 'token456', - }; - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(true); - if (result.ok) { - expect(result.value.securityWarnings).toEqual( - expect.arrayContaining([ - 'Potential secret in build arg: API_PASSWORD', - 'Potential secret in build arg: DB_TOKEN', - ]), - ); - } - }); - - it('should detect sudo usage in Dockerfile', async () => { - const dockerfileWithSudo = `FROM ubuntu:20.04 -RUN sudo apt-get update -USER appuser`; - - mockFs.readFile.mockResolvedValue(dockerfileWithSudo); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(true); - if (result.ok) { - expect(result.value.securityWarnings).toContain( - 'Using sudo in Dockerfile - consider running as non-root', - ); - } - }); - - it('should detect :latest tags in Dockerfile', async () => { - const dockerfileWithLatest = `FROM node:latest -WORKDIR /app -USER appuser`; - - mockFs.readFile.mockResolvedValue(dockerfileWithLatest); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(true); - if (result.ok) { - expect(result.value.securityWarnings).toContain( - 'Using :latest tag - consider pinning versions for reproducibility', - ); - } - }); - - it('should detect missing USER instruction', async () => { - const dockerfileWithoutUser = `FROM node:18-alpine -WORKDIR /app -COPY package*.json ./ -RUN npm ci -COPY . . -CMD ["node", "index.js"]`; - - mockFs.readFile.mockResolvedValue(dockerfileWithoutUser); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(true); - if (result.ok) { - expect(result.value.securityWarnings).toContain( - 'Container may run as root - consider adding a non-root USER', - ); - } - }); - - it('should detect root user', async () => { - const dockerfileWithRootUser = `FROM node:18-alpine -WORKDIR /app -COPY . . -USER root -CMD ["node", "index.js"]`; - - mockFs.readFile.mockResolvedValue(dockerfileWithRootUser); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(true); - if (result.ok) { - expect(result.value.securityWarnings).toContain( - 'Container may run as root - consider adding a non-root USER', - ); - } - }); - }); - - describe('Error Handling', () => { - it('should succeed with valid Dockerfile', async () => { - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(true); - if (result.ok) { - expect(result.value.imageId).toBe('sha256:mock-image-id'); - } - }); - - it('should return error when Docker build fails', async () => { - mockFs.readFile.mockResolvedValue(mockDockerfile); - mockDockerClient.buildImage.mockResolvedValue( - createFailureResult('Docker build failed: syntax error'), - ); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toContain('Docker build failed: syntax error'); - } - }); - - it('should include build logs in error when build fails', async () => { - mockFs.readFile.mockResolvedValue(mockDockerfile); - - // Mock a failure with build logs in guidance - mockDockerClient.buildImage.mockResolvedValue({ - ok: false, - error: 'RUN command failed', - guidance: { - message: 'RUN command failed', - hint: 'npm install failed', - resolution: 'Check package.json dependencies', - details: { - buildLogs: [ - 'Step 1/5 : FROM node:18-alpine', - 'Step 2/5 : WORKDIR /app', - 'Step 3/5 : RUN npm install', - 'npm ERR! Cannot find module "express"', - 'npm ERR! A complete log of this run can be found in: /root/.npm/_logs', - "The command '/bin/sh -c npm install' returned a non-zero code: 1", - ], - }, - }, - }); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - // Verify error message contains main error - expect(result.error).toContain('RUN command failed'); - - // Verify build logs are preserved in guidance details - expect(result.guidance?.details?.buildLogs).toBeDefined(); - const buildLogs = result.guidance?.details?.buildLogs as string[]; - expect(buildLogs.some((log: string) => log.includes('FROM node:18-alpine'))).toBe(true); - expect(buildLogs.some((log: string) => log.includes('npm ERR! Cannot find module "express"'))).toBe(true); - expect(buildLogs.some((log: string) => log.includes('returned a non-zero code: 1'))).toBe(true); - - // Verify guidance is preserved - expect(result.guidance?.hint).toBe('npm install failed'); - expect(result.guidance?.resolution).toBe('Check package.json dependencies'); - } - }); - - it('should handle filesystem errors', async () => { - mockFs.readFile.mockRejectedValue(new Error('Permission denied')); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toContain('Permission denied'); - } - }); - - it('should handle Docker client errors', async () => { - mockFs.readFile.mockResolvedValue(mockDockerfile); - mockDockerClient.buildImage.mockRejectedValue(new Error('Docker daemon not running')); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toBe('Docker daemon not running'); - } - }); - }); - - describe('Error Scenarios - Infrastructure', () => { - it('should fail gracefully when Docker daemon is not running', async () => { - mockFs.readFile.mockResolvedValue(mockDockerfile); - mockDockerClient.buildImage.mockResolvedValue({ - ok: false, - error: 'Cannot connect to the Docker daemon', - guidance: { - hint: 'The Docker daemon must be running to build images', - resolution: 'Start Docker Desktop or run: sudo systemctl start docker', - }, - }); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toContain('Cannot connect to the Docker daemon'); - expect(result.guidance).toBeDefined(); - expect(result.guidance?.hint).toContain('Docker daemon'); - expect(result.guidance?.resolution).toBeDefined(); - } - }); - - it('should fail when Docker socket is not accessible', async () => { - mockFs.readFile.mockResolvedValue(mockDockerfile); - mockDockerClient.buildImage.mockResolvedValue({ - ok: false, - error: 'EACCES: permission denied, connect /var/run/docker.sock', - guidance: { - hint: 'Current user does not have permission to access Docker socket', - resolution: 'Add user to docker group: sudo usermod -aG docker $USER', - }, - }); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toContain('permission denied'); - expect(result.guidance).toBeDefined(); - expect(result.guidance?.hint).toBeDefined(); - expect(result.guidance?.resolution).toBeDefined(); - } - }); - - it('should fail when network is unreachable during base image pull', async () => { - mockFs.readFile.mockResolvedValue(mockDockerfile); - mockDockerClient.buildImage.mockResolvedValue({ - ok: false, - error: 'Error pulling image: network unreachable', - guidance: { - hint: 'Cannot pull base image due to network issues', - resolution: 'Check your internet connection and Docker registry configuration', - }, - }); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toContain('network unreachable'); - expect(result.guidance).toBeDefined(); - } - }); - }); - - describe('Error Scenarios - File System', () => { - it('should fail when build context directory does not exist', async () => { - mockFs.access.mockRejectedValue(new Error('ENOENT: no such file or directory')); - mockFs.stat.mockRejectedValue(new Error('ENOENT')); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toContain('does not exist'); - } - }); - - it('should fail when Dockerfile is not readable', async () => { - mockFs.access.mockRejectedValue(new Error('EACCES: permission denied')); - mockFs.stat.mockResolvedValue({ isFile: () => true, isDirectory: () => false } as any); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toMatch(/permission denied|does not exist|not accessible/i); - } - }); - - it('should fail when Dockerfile path points to a directory', async () => { - mockFs.access.mockResolvedValue(undefined); - mockFs.stat.mockResolvedValue({ isFile: () => false, isDirectory: () => true } as any); - mockFs.readFile.mockRejectedValue(Object.assign(new Error('EISDIR'), { code: 'EISDIR' })); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toMatch(/not a file|directory|EISDIR/i); - } - }); - - it('should handle EISDIR error when reading Dockerfile', async () => { - mockFs.access.mockResolvedValue(undefined); - mockFs.stat.mockResolvedValue({ isFile: () => true, isDirectory: () => false } as any); - const error = new Error('EISDIR: illegal operation on a directory') as any; - error.code = 'EISDIR'; - mockFs.readFile.mockRejectedValue(error); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toContain('directory'); - } - }); - }); - - describe('Error Scenarios - Input Validation', () => { - it('should fail with invalid parameters object', async () => { - const result = await buildImage(null as any, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toContain('Invalid parameters'); - } - }); - - it('should fail when imageName and tags are both empty', async () => { - const invalidConfig = { - ...config, - imageName: undefined, - tags: [], - }; - - const result = await buildImage(invalidConfig, createMockToolContext()); - - // Should still succeed but with no tags (implementation allows this) - // Or fail depending on validation logic - expect(result).toBeDefined(); - }); - }); - - describe('Error Scenarios - Docker Build Failures', () => { - it('should fail when Dockerfile has syntax errors', async () => { - mockFs.readFile.mockResolvedValue('INVALID DOCKERFILE SYNTAX'); - mockDockerClient.buildImage.mockResolvedValue({ - ok: false, - error: 'Dockerfile parse error: unknown instruction: INVALID', - guidance: { - hint: 'Dockerfile contains syntax errors', - resolution: - 'Check Dockerfile syntax and fix errors. Use "docker build" locally to debug.', - }, - }); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toContain('parse error'); - } - }); - - it('should fail when base image is not found', async () => { - mockFs.readFile.mockResolvedValue('FROM nonexistent:image\nCMD ["echo", "hello"]'); - mockDockerClient.buildImage.mockResolvedValue({ - ok: false, - error: 'manifest for nonexistent:image not found', - guidance: { - hint: 'Base image does not exist in registry', - resolution: 'Verify the image name and tag, or use a different base image', - }, - }); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toContain('not found'); - expect(result.guidance).toBeDefined(); - } - }); - - it('should fail when build step fails', async () => { - mockFs.readFile.mockResolvedValue(mockDockerfile); - mockDockerClient.buildImage.mockResolvedValue({ - ok: false, - error: 'RUN command failed: npm ci exited with code 1', - guidance: { - hint: 'Build step failed during execution', - resolution: 'Check the command in your Dockerfile and ensure dependencies are available', - }, - }); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toContain('RUN command failed'); - expect(result.guidance).toBeDefined(); - } - }); - - it('should fail when disk space is insufficient', async () => { - mockFs.readFile.mockResolvedValue(mockDockerfile); - mockDockerClient.buildImage.mockResolvedValue({ - ok: false, - error: 'no space left on device', - guidance: { - hint: 'Insufficient disk space to complete build', - resolution: 'Free up disk space or prune unused Docker images: docker system prune', - }, - }); - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(false); - if (!result.ok) { - expect(result.error).toContain('no space left'); - expect(result.guidance).toBeDefined(); - } - }); - }); - - describe('Build Arguments', () => { - beforeEach(() => { - // Setup filesystem mocks - mockFs.access.mockResolvedValue(undefined); - mockFs.readFile.mockResolvedValue(mockDockerfile); - - // Setup docker build mock - mockDockerClient.buildImage.mockResolvedValue( - createSuccessResult({ - imageId: 'sha256:mock-image-id', - digest: 'sha256:abcdef1234567890', - tags: ['myapp:latest', 'myapp:v1.0'], - size: 123456789, - layers: 8, - buildTime: 5000, - logs: ['Step 1/8 : FROM node:18-alpine', 'Successfully built mock-image-id'], - warnings: [], - }), - ); - }); - - it('should override default arguments with custom ones', async () => { - config.buildArgs = { - NODE_ENV: 'development', - BUILD_DATE: '2023-01-01', - }; - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(true); - expect(mockDockerClient.buildImage).toHaveBeenCalledWith( - expect.objectContaining({ - buildargs: expect.objectContaining({ - NODE_ENV: 'development', - BUILD_DATE: '2023-01-01', - VCS_REF: expect.any(String), - }), - }), - ); - }); - }); - - describe('Platform Support', () => { - beforeEach(() => { - mockFs.access.mockResolvedValue(undefined); - mockFs.readFile.mockResolvedValue(mockDockerfile); - }); - - it('should pass platform parameter to Docker client', async () => { - const configWithPlatform = { - ...config, - platform: 'linux/arm64', - }; - - const result = await buildImage(configWithPlatform, createMockToolContext()); - - expect(result.ok).toBe(true); - expect(mockDockerClient.buildImage).toHaveBeenCalledWith( - expect.objectContaining({ - platform: 'linux/arm64', - }), - ); - }); - - it('should not include platform if not specified', async () => { - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(true); - const buildOptions = mockDockerClient.buildImage.mock.calls[0]?.[0]; - expect(buildOptions).toBeDefined(); - expect(buildOptions?.platform).toBeUndefined(); - }); - }); - - describe('Environment Variables', () => { - beforeEach(() => { - mockFs.access.mockResolvedValue(undefined); - mockFs.readFile.mockResolvedValue(mockDockerfile); - }); - - it('should use NODE_ENV from environment', async () => { - const originalNodeEnv = process.env.NODE_ENV; - process.env.NODE_ENV = 'staging'; - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(true); - expect(mockDockerClient.buildImage).toHaveBeenCalledWith( - expect.objectContaining({ - buildargs: expect.objectContaining({ - NODE_ENV: 'staging', - }), - }), - ); - - // Restore original NODE_ENV - process.env.NODE_ENV = originalNodeEnv; - }); - - it('should use GIT_COMMIT from environment', async () => { - const originalGitCommit = process.env.GIT_COMMIT; - process.env.GIT_COMMIT = 'abc123def456'; - - const result = await buildImage(config, createMockToolContext()); - - expect(result.ok).toBe(true); - expect(mockDockerClient.buildImage).toHaveBeenCalledWith( - expect.objectContaining({ - buildargs: expect.objectContaining({ - VCS_REF: 'abc123def456', - }), - }), - ); - - // Restore original GIT_COMMIT - process.env.GIT_COMMIT = originalGitCommit; - }); - }); -}); diff --git a/test/unit/tools/standardized-logging.test.ts b/test/unit/tools/standardized-logging.test.ts index 42e83f0e0..f0d177fbd 100644 --- a/test/unit/tools/standardized-logging.test.ts +++ b/test/unit/tools/standardized-logging.test.ts @@ -16,7 +16,7 @@ import type { Logger } from 'pino'; */ const ALL_TOOLS = [ 'analyze-repo', - 'build-image', + 'build-image-context', 'fix-dockerfile', 'generate-dockerfile', 'generate-k8s-manifests', @@ -51,7 +51,7 @@ describe('Standardized Logging Regression Tests', () => { describe('Tool logging format validation', () => { it('should define all expected tools', () => { expect(ALL_TOOLS.length).toBeGreaterThan(8); - expect(ALL_TOOLS).toContain('build-image'); + expect(ALL_TOOLS).toContain('build-image-context'); expect(ALL_TOOLS).toContain('verify-deploy'); }); @@ -201,7 +201,7 @@ describe('Standardized Logging Regression Tests', () => { // Tools should use their directory name as the tool identifier const expectedToolNames = [ 'analyze-repo', - 'build-image', + 'build-image-context', 'fix-dockerfile', 'push-image', 'scan-image', @@ -220,7 +220,7 @@ describe('Standardized Logging Regression Tests', () => { const criticalTools = [ 'analyze-repo', 'fix-dockerfile', - 'build-image', + 'build-image-context', 'scan-image', 'tag-image', 'push-image',