Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
81 changes: 63 additions & 18 deletions frontend/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

192 changes: 192 additions & 0 deletions run_dev.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,192 @@
#!/usr/bin/env bash
set -euo pipefail

###############################################################################
# run_dev.sh – one-command bootstrap for the EYS366 dev environment
#
# Starts: Ollama (+ pulls model) | Django backend | Next.js frontend
# Stop: Ctrl-C (all child processes are cleaned up automatically)
###############################################################################

ROOT_DIR="$(cd "$(dirname "$0")" && pwd)"
BACKEND_DIR="$ROOT_DIR/backend"
FRONTEND_DIR="$ROOT_DIR/frontend"
VENV_DIR="$BACKEND_DIR/.venv"

# ── Ollama / LLM configuration ──────────────────────────────────────────────
OLLAMA_MODEL="${LLM_MODEL:-hf.co/unsloth/Qwen3.5-2B-GGUF:Qwen3.5-2B-Q4_K_M.gguf}"
OLLAMA_HOST="${OLLAMA_HOST:-http://localhost:11434}"

# ── Colours for log lines ───────────────────────────────────────────────────
RED='\033[0;31m'; GREEN='\033[0;32m'; YELLOW='\033[0;33m'
BLUE='\033[0;34m'; CYAN='\033[0;36m'; NC='\033[0m'

log() { echo -e "${GREEN}[run_dev]${NC} $*"; }
warn() { echo -e "${YELLOW}[run_dev]${NC} $*"; }
err() { echo -e "${RED}[run_dev]${NC} $*" >&2; }

# ── Cleanup on exit ─────────────────────────────────────────────────────────
PIDS=()
cleanup() {
log "Shutting down…"
for pid in "${PIDS[@]}"; do
kill "$pid" 2>/dev/null && wait "$pid" 2>/dev/null || true
done
log "All processes stopped."
}
trap cleanup EXIT INT TERM

###############################################################################
# 1. Pre-flight checks
###############################################################################
log "Checking prerequisites…"

# Python
if ! command -v python3 &>/dev/null; then
err "python3 is not installed. Please install Python 3.10+."
exit 1
fi

# Node / npm
if ! command -v node &>/dev/null || ! command -v npm &>/dev/null; then
err "node/npm is not installed. Please install Node.js 18+."
exit 1
fi

# Ollama
if ! command -v ollama &>/dev/null; then
err "ollama is not installed. Install it from https://ollama.com"
exit 1
fi

###############################################################################
# 2. Ollama – start server & pull model
###############################################################################
log "Starting Ollama server…"

# Start ollama serve in background (it's a no-op if already running)
ollama serve &>/dev/null &
PIDS+=($!)

# Wait for Ollama API to be reachable
log "Waiting for Ollama API…"
for i in $(seq 1 30); do
if curl -sf "$OLLAMA_HOST/api/tags" &>/dev/null; then
break
fi
if [ "$i" -eq 30 ]; then
err "Ollama API did not become reachable within 30 s."
exit 1
fi
sleep 1
done
log "Ollama API is up."

# Pull model if not already present
if ollama list | grep -q "${OLLAMA_MODEL%%:*}"; then
log "Model '$OLLAMA_MODEL' already pulled."
else
log "Pulling model '$OLLAMA_MODEL' (this may take a while on first run)…"
ollama pull "$OLLAMA_MODEL"
fi

###############################################################################
# 3. Backend – Python venv, dependencies, migrations, runserver
###############################################################################
log "Setting up backend…"

# Create virtualenv if missing
if [ ! -d "$VENV_DIR" ]; then
log "Creating Python virtual environment…"
python3 -m venv "$VENV_DIR"
fi

# Activate venv
source "$VENV_DIR/bin/activate"

# Install / upgrade deps
log "Installing Python dependencies…"
pip3 install --quiet --upgrade pip
pip3 install --quiet -r "$BACKEND_DIR/requirements.txt"

# Run migrations
log "Running Django migrations…"
python3 "$BACKEND_DIR/manage.py" migrate --run-syncdb --no-input

# Create superuser if none exists
HAS_SUPERUSER=$(python3 "$BACKEND_DIR/manage.py" shell -c \
"from users.models import User; print(User.objects.filter(is_superuser=True).exists())" 2>/dev/null | tail -1)

if [ "$HAS_SUPERUSER" = "False" ]; then
warn "No superuser found. Let's create one."
echo ""
read -rp " Superuser username: " SU_USERNAME
read -rp " Superuser email: " SU_EMAIL
while true; do
read -rsp " Superuser password: " SU_PASSWORD
echo ""
read -rsp " Confirm password: " SU_PASSWORD2
echo ""
if [ "$SU_PASSWORD" = "$SU_PASSWORD2" ]; then
break
fi
warn "Passwords do not match. Try again."
done
python3 "$BACKEND_DIR/manage.py" shell -c "
from users.models import User
User.objects.create_superuser(
username='$SU_USERNAME',
email='$SU_EMAIL',
password='$SU_PASSWORD',
role='admin',
)
print('Superuser created successfully.')
"
echo ""
else
log "Superuser already exists – skipping creation."
fi

# Export LLM env vars so Django picks up the Ollama model
export LLM_API_URL="http://localhost:11434/v1"
export LLM_MODEL="$OLLAMA_MODEL"
export LLM_API_KEY="ollama"

# Start Django dev server
log "Starting Django backend on http://127.0.0.1:8000 …"
python3 "$BACKEND_DIR/manage.py" runserver 127.0.0.1:8000 &>/dev/null &
PIDS+=($!)

###############################################################################
# 4. Frontend – npm install & dev server
###############################################################################
log "Setting up frontend…"

cd "$FRONTEND_DIR"

log "Installing Node dependencies…"
npm install --silent

log "Starting Next.js frontend on http://localhost:3000 …"
npm run dev &>/dev/null &
PIDS+=($!)

cd "$ROOT_DIR"

###############################################################################
# 5. Ready
###############################################################################
echo ""
echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo -e "${GREEN} EYS366 dev environment is running${NC}"
echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo -e " Frontend : ${BLUE}http://localhost:3000${NC}"
echo -e " Backend : ${BLUE}http://127.0.0.1:8000${NC}"
echo -e " Admin : ${BLUE}http://127.0.0.1:8000/admin/${NC}"
echo -e " Ollama : ${BLUE}$OLLAMA_HOST${NC} (model: ${YELLOW}$OLLAMA_MODEL${NC})"
echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo -e " Press ${RED}Ctrl-C${NC} to stop all services."
echo ""

# Keep script alive until interrupted
wait
Loading