forked from tosanjay/BinCodeQL
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path.env.example
More file actions
59 lines (50 loc) · 2.76 KB
/
.env.example
File metadata and controls
59 lines (50 loc) · 2.76 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
# =============================================================================
# BinCodeQL Configuration
# =============================================================================
# Copy this file to .env and fill in your values:
# cp .env.example .env
#
# .env is gitignored — your secrets and local paths stay private.
# =============================================================================
# =============================================================================
# Model Configuration
# =============================================================================
# LiteLLM model ID. Auto-picks API key based on prefix.
# Examples: anthropic/claude-sonnet-4-6, openai/gpt-5, openai/gpt-5.2
MODEL_NAME="anthropic/claude-sonnet-4-6"
# =============================================================================
# API Keys — set the key(s) for your chosen provider(s)
# =============================================================================
# API_KEY overrides provider-specific keys (use if you want one key for all):
#API_KEY=""
# Provider-specific keys (auto-selected based on MODEL_NAME prefix):
OPENAI_API_KEY=""
ANTHROPIC_API_KEY=""
#GOOGLE_API_KEY=""
#OPENROUTER_API_KEY=""
# =============================================================================
# Binary Ninja MCP Bridge
# =============================================================================
# Python interpreter inside the MCP bridge venv (must have mcp package)
MCP_PYTHON_PATH="/path/to/binary_ninja_mcp/.venv/bin/python3"
# Path to the MCP bridge script
MCP_BRIDGE_PATH="/path/to/binary_ninja_mcp/bridge/binja_mcp_bridge.py"
# =============================================================================
# Binary Ninja Headless (for batch fact extraction)
# =============================================================================
# Option A: Full path to a Python interpreter with binaryninja installed
#BN_PYTHON="/path/to/python3"
# Option B: Path to binaryninja Python package directory
# (added to PYTHONPATH when running headless extraction)
#BN_PYTHON_PATH="/path/to/binaryninja/python"
# =============================================================================
# Pre-analyzed Binary Ninja Database (optional, speeds up extraction)
# =============================================================================
# If set, tool_extract_facts_batch uses this as the default binary_path.
# .bndb files load in milliseconds (vs seconds/minutes for raw binaries)
# and include user-refined analysis (renamed functions, custom types).
#BNDB_PATH="/path/to/target.bndb"
# =============================================================================
# ADK / Vertex AI (optional)
# =============================================================================
GOOGLE_GENAI_USE_VERTEXAI=FALSE