Files
jr-sql-ai/scripts/selftest.sh

213 lines
6.0 KiB
Bash
Executable File

#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
ENV_FILE="${ROOT}/.env"
ts(){ date -Is; }
log_dir="${ROOT}/logs"
mkdir -p "$log_dir"
log_file="${log_dir}/selftest-$(date -Iseconds).log"
exec > >(tee -a "$log_file") 2>&1
echo "================================================================================"
echo "[$(ts)] selftest: START ROOT=$ROOT"
# -------- helpers --------
fail() {
local msg="$1"
local code="${2:-1}"
echo "[$(ts)] selftest: FAIL code=$code msg=$msg"
echo "[$(ts)] selftest: END status=FAIL"
echo "================================================================================"
exit "$code"
}
warn() {
echo "[$(ts)] selftest: WARN $*"
}
ok() {
echo "[$(ts)] selftest: OK $*"
}
need_cmd() {
command -v "$1" >/dev/null 2>&1 || fail "missing command: $1" 10
}
# robust JSON check
is_json() {
# reads stdin
python -c 'import json,sys; json.load(sys.stdin)' >/dev/null 2>&1
}
# Extract model names from /api/tags JSON and check membership
model_in_tags() {
# args: modelname, tags_json_string
local model="$1"
local tags="$2"
printf '%s' "$tags" | python -c '
import json,sys
m=sys.argv[1]
obj=json.load(sys.stdin)
names=set()
for it in obj.get("models", []):
n=it.get("name")
if n: names.add(n)
ok = (m in names) or (m + ":latest" in names) or (m.endswith(":latest") and m[:-7] in names)
sys.exit(0 if ok else 1)
' "$model" >/dev/null 2>&1
}
# -------- preflight --------
need_cmd docker
need_cmd curl
need_cmd python
need_cmd grep
need_cmd sed
if [[ ! -f "$ENV_FILE" ]]; then
warn ".env not found, creating from .env.example"
cp -n "${ROOT}/.env.example" "${ROOT}/.env" || true
fi
# shellcheck disable=SC1090
source "${ROOT}/.env"
: "${OLLAMA_URL:=http://127.0.0.1:11434}"
: "${EXPERT_MODEL:=jr-sql-expert}"
: "${BASE_MODEL:=}"
ok "loaded env: OLLAMA_URL=$OLLAMA_URL EXPERT_MODEL=$EXPERT_MODEL BASE_MODEL=${BASE_MODEL:-<empty>}"
if [[ ! -x "${ROOT}/bin/sqlai" ]]; then
fail "bin/sqlai missing or not executable at ${ROOT}/bin/sqlai" 11
fi
# docker compose availability (plugin or legacy)
if docker compose version >/dev/null 2>&1; then
ok "docker compose available"
else
fail "docker compose not available (install docker compose plugin)" 12
fi
# container state
if docker ps --format '{{.Names}}' | grep -qx 'ollama'; then
ok "container 'ollama' is running"
else
warn "container 'ollama' not running; attempting 'docker compose up -d'"
docker compose -f "${ROOT}/docker-compose.yml" up -d || fail "docker compose up failed" 13
if ! docker ps --format '{{.Names}}' | grep -qx 'ollama'; then
fail "container 'ollama' still not running after compose up" 14
fi
ok "container 'ollama' is running after compose up"
fi
# API wait loop
ok "waiting for Ollama API at $OLLAMA_URL ..."
api_ok="0"
for i in {1..120}; do
if curl -sS "${OLLAMA_URL}/api/tags" >/dev/null 2>&1; then
api_ok="1"
break
fi
sleep 1
done
[[ "$api_ok" == "1" ]] || fail "Ollama API not reachable at $OLLAMA_URL after waiting" 20
ok "Ollama API reachable"
# Fetch /api/tags and validate JSON
tags="$(curl -sS "${OLLAMA_URL}/api/tags" || true)"
[[ -n "$tags" ]] || fail "/api/tags returned empty body" 21
if ! printf '%s' "$tags" | is_json; then
echo "[$(ts)] selftest: /api/tags RAW_BEGIN"
printf '%s\n' "$tags" | head -n 200
echo "[$(ts)] selftest: /api/tags RAW_END"
fail "/api/tags did not return JSON" 22
fi
ok "/api/tags returned valid JSON"
# Model availability checks
expert_present="0"
base_present="0"
if model_in_tags "$EXPERT_MODEL" "$tags"; then
expert_present="1"
ok "EXPERT_MODEL present: $EXPERT_MODEL"
else
warn "EXPERT_MODEL not present in tags: $EXPERT_MODEL"
fi
if [[ -n "${BASE_MODEL:-}" ]] && model_in_tags "$BASE_MODEL" "$tags"; then
base_present="1"
ok "BASE_MODEL present: $BASE_MODEL"
else
if [[ -n "${BASE_MODEL:-}" ]]; then
warn "BASE_MODEL not present in tags: $BASE_MODEL"
else
warn "BASE_MODEL empty in .env"
fi
fi
if [[ "$expert_present" != "1" && "$base_present" != "1" ]]; then
warn "No expert/base model found. Attempting to pull BASE_MODEL if set..."
if [[ -n "${BASE_MODEL:-}" ]]; then
docker exec -it ollama ollama pull "${BASE_MODEL}" || warn "ollama pull BASE_MODEL failed"
tags2="$(curl -sS "${OLLAMA_URL}/api/tags" || true)"
if printf '%s' "$tags2" | is_json && model_in_tags "$BASE_MODEL" "$tags2"; then
ok "BASE_MODEL now present after pull: $BASE_MODEL"
base_present="1"
else
fail "Neither EXPERT_MODEL nor BASE_MODEL available after attempted pull" 23
fi
else
fail "Neither EXPERT_MODEL nor BASE_MODEL available and BASE_MODEL is empty" 24
fi
fi
# Warmup request
ok "warmup request via sqlai (no-metrics)"
set +e
"${ROOT}/bin/sqlai" ask --text "Warmup: reply with exactly 'OK'." --no-metrics
rc=$?
set -e
if [[ "$rc" -ne 0 ]]; then
warn "warmup failed rc=$rc (continuing to real query)"
else
ok "warmup succeeded"
fi
# Real query (slightly longer)
ok "real query via sqlai (ask)"
set +e
out="$("${ROOT}/bin/sqlai" ask --text "Give a concise checklist to troubleshoot parameter sniffing in SQL Server 2022. Keep it technical." --no-metrics 2>&1)"
rc=$?
set -e
if [[ "$rc" -ne 0 ]]; then
echo "[$(ts)] selftest: sqlai output BEGIN"
printf '%s\n' "$out" | tail -n 200
echo "[$(ts)] selftest: sqlai output END"
fail "real query failed rc=$rc" 30
fi
# Basic sanity: must contain at least some non-empty text
if [[ -z "${out//[[:space:]]/}" ]]; then
fail "real query returned empty output" 31
fi
ok "real query returned non-empty output"
# Summary
echo "--------------------------------------------------------------------------------"
echo "[$(ts)] selftest: SUMMARY"
echo "[$(ts)] selftest: API=OK"
echo "[$(ts)] selftest: EXPERT_MODEL_PRESENT=$expert_present"
echo "[$(ts)] selftest: BASE_MODEL_PRESENT=$base_present"
echo "[$(ts)] selftest: LOG_FILE=$log_file"
echo "--------------------------------------------------------------------------------"
echo "[$(ts)] selftest: END status=OK"
echo "================================================================================"