Add AI functionality; fuck up UI royally, still a piece of shit.

This commit is contained in:
Ned Halksworth
2026-05-04 20:00:31 +01:00
parent e0f2eedcd9
commit 0fe063fed5
10 changed files with 1528 additions and 405 deletions
+3
View File
@@ -0,0 +1,3 @@
OLLAMA_API_KEY=
OLLAMA_DEFAULT_MODEL=gpt-oss:120b
OLLAMA_API_BASE=https://ollama.com
+1
View File
@@ -14,3 +14,4 @@ backend/venv/
.env
.env.*
!.env.example
+30 -1
View File
@@ -1,6 +1,6 @@
# sFetch
sFetch is a full-stack search engine prototype with a lightweight Google/DDG-inspired frontend, a FastAPI search API, and an async crawler that indexes pages into a local SQLite FTS5 database.
sFetch is a full-stack search engine prototype with a serious search interface, a FastAPI search API, Ollama Cloud-powered AI answers, and an async crawler that indexes pages into a local SQLite FTS5 database.
On first backend launch, sFetch downloads the latest Tranco top-site list, filters pornographic/adult domains, and seeds up to 1,000 non-adult sites if that seed has not already been recorded in the database.
@@ -11,6 +11,7 @@ sFetch/
├── backend/
│ ├── main.py
│ ├── crawler.py
│ ├── ollama_cloud.py
│ ├── top_sites.py
│ ├── content_filter.py
│ ├── indexer.py
@@ -21,6 +22,7 @@ sFetch/
│ └── requirements.txt
├── frontend/
│ ├── index.html
│ ├── ai.html
│ └── results.html
└── README.md
```
@@ -46,6 +48,23 @@ sFetch/
The frontend uses `const API_BASE = "http://localhost:8000";` at the top of each page script.
## Ollama Cloud AI
sFetch reads Ollama Cloud credentials from environment variables. Do not hardcode API keys into source files.
```bash
export OLLAMA_API_KEY=your_api_key
export OLLAMA_DEFAULT_MODEL=gpt-oss:120b
```
AI features:
- `GET /ai/models` loads all models currently returned by Ollama Cloud's `/api/tags`.
- `POST /ai/search` generates an AI answer for search results using local indexed results and optional Ollama web search context.
- `POST /ai/search/stream` streams a search-grounded answer as server-sent events.
- `POST /ai/chat` powers the dedicated AI chat page at `frontend/ai.html`, with model selection and optional web search context.
- `POST /ai/chat/stream` streams chat responses as server-sent events.
## Crawling
The home page has index controls for:
@@ -92,6 +111,12 @@ The crawler:
| `POST` | `/crawl/top-sites` | Queue the top-site seed crawl |
| `GET` | `/crawl/top-sites/status` | Check top-site seed state |
| `GET` | `/stats` | Total indexed pages and latest index time |
| `GET` | `/ai/config` | Check Ollama Cloud configuration |
| `GET` | `/ai/models` | List available Ollama Cloud models |
| `POST` | `/ai/search` | Generate an AI answer for a search query |
| `POST` | `/ai/search/stream` | Stream an AI answer for a search query |
| `POST` | `/ai/chat` | Generate an AI chat response |
| `POST` | `/ai/chat/stream` | Stream an AI chat response |
## Configuration
@@ -107,6 +132,9 @@ sFetch's crawl and storage behavior lives in `backend/config.py`:
| `TOP_SITE_SOURCE_URL` | Top-site list source |
| `TOP_SITE_SEED_LIMIT` | Number of safe top sites to seed |
| `USER_AGENT` | User agent sent by `sFetchBot` |
| `OLLAMA_API_BASE` | Ollama Cloud API base URL |
| `OLLAMA_API_KEY` | API key used for authenticated Ollama Cloud calls |
| `OLLAMA_DEFAULT_MODEL` | Default model selected in AI features |
## Tech Stack
@@ -114,6 +142,7 @@ sFetch's crawl and storage behavior lives in `backend/config.py`:
| --- | --- |
| Frontend | HTML, TailwindCSS CDN, Vanilla JavaScript |
| Backend | Python, FastAPI |
| AI | Ollama Cloud API |
| Crawler | Python, `httpx`, `BeautifulSoup4`, `asyncio` |
| Search Index | SQLite FTS5 via `aiosqlite` |
| Top Sites | Tranco daily top-site ZIP with bundled fallback |
+13
View File
@@ -1,9 +1,18 @@
"""Application-wide configuration for sFetch."""
import os
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent
try:
from dotenv import load_dotenv
load_dotenv(BASE_DIR.parent / ".env")
load_dotenv(BASE_DIR / ".env")
except ImportError:
pass
MAX_CRAWL_DEPTH = 2
MAX_PAGES_PER_DOMAIN = 50
CRAWL_DELAY_SECONDS = 1.0
@@ -14,6 +23,10 @@ TOP_SITE_SOURCE_URL = "https://tranco-list.eu/top-1m.csv.zip"
TOP_SITE_SEED_LIMIT = 1000
TOP_SITE_DOWNLOAD_TIMEOUT_SECONDS = 30.0
TOP_SITE_SEED_META_KEY = "top_site_seed_v1"
OLLAMA_API_BASE = os.getenv("OLLAMA_API_BASE", "https://ollama.com").rstrip("/")
OLLAMA_API_KEY = os.getenv("OLLAMA_API_KEY", "")
OLLAMA_DEFAULT_MODEL = os.getenv("OLLAMA_DEFAULT_MODEL", "gpt-oss:120b")
OLLAMA_REQUEST_TIMEOUT_SECONDS = 90.0
ADULT_DOMAINS = {
"pornhub.com", "xvideos.com", "xnxx.com", "xhamster.com", "redtube.com",
+302 -1
View File
@@ -3,10 +3,13 @@
from __future__ import annotations
import asyncio
import json
from collections.abc import AsyncIterator
from datetime import UTC, datetime
from fastapi import FastAPI, HTTPException, Query, BackgroundTasks
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import StreamingResponse
from crawler import sFetchBot
from config import TOP_SITE_SEED_LIMIT, TOP_SITE_SEED_META_KEY
@@ -19,7 +22,16 @@ from database import (
init_db,
set_meta_value,
)
from models import CrawlRequest, SearchResponse
from models import AIAnswerResponse, AIChatRequest, AISearchRequest, AISource, CrawlRequest, SearchResponse
from ollama_cloud import (
OllamaCloudError,
chat as ollama_chat,
default_model,
is_ollama_configured,
list_models as list_ollama_models,
stream_chat as ollama_stream_chat,
web_search as ollama_web_search,
)
from searcher import search, search_images_api, search_videos_api
from top_sites import load_top_site_seed_urls
@@ -205,3 +217,292 @@ async def crawl_top_sites_status_endpoint() -> dict[str, object]:
async def stats_endpoint() -> dict[str, object]:
stats = await get_stats()
return stats
@app.get("/ai/config")
async def ai_config_endpoint() -> dict[str, object]:
return {
"configured": is_ollama_configured(),
"default_model": default_model(),
"provider": "Ollama Cloud",
}
@app.get("/ai/models")
async def ai_models_endpoint() -> dict[str, object]:
try:
models = await list_ollama_models()
except OllamaCloudError as exc:
raise HTTPException(status_code=exc.status_code, detail=str(exc)) from exc
return {
"default_model": default_model(),
"models": models,
}
@app.post("/ai/chat", response_model=AIAnswerResponse)
async def ai_chat_endpoint(request: AIChatRequest) -> AIAnswerResponse:
model = (request.model or default_model()).strip()
if not model:
raise HTTPException(status_code=400, detail="Model is required.")
if not request.messages:
raise HTTPException(status_code=400, detail="At least one message is required.")
try:
messages, sources = await _build_chat_messages_and_sources(request)
response = await ollama_chat(model=model, messages=messages, think=request.think)
except OllamaCloudError as exc:
raise HTTPException(status_code=exc.status_code, detail=str(exc)) from exc
message = response.get("message") or {}
return AIAnswerResponse(
model=response.get("model") or model,
content=message.get("content") or "",
thinking=message.get("thinking"),
sources=sources,
configured=is_ollama_configured(),
)
def _sse(event: str, data: object) -> str:
return f"event: {event}\ndata: {json.dumps(data, ensure_ascii=False)}\n\n"
async def _build_chat_messages_and_sources(request: AIChatRequest) -> tuple[list[dict[str, object]], list[AISource]]:
messages = [
message.model_dump(exclude_none=True)
for message in request.messages
if message.content.strip() or message.tool_calls
]
if not messages:
raise OllamaCloudError("At least one message is required.", status_code=400)
sources: list[AISource] = []
if request.use_web_search:
latest_user_message = next(
(message.content for message in reversed(request.messages) if message.role == "user" and message.content.strip()),
"",
)
if latest_user_message:
web_results = await ollama_web_search(latest_user_message, max_results=request.web_result_limit)
sources = [
AISource(
title=result.get("title") or result.get("url") or "Web result",
url=result.get("url") or "",
source_type="web",
content=result.get("content") or "",
)
for result in web_results
if result.get("url")
]
if sources:
context = "\n".join(_source_text(source, index) for index, source in enumerate(sources, start=1))
messages.insert(
0,
{
"role": "system",
"content": (
"Use the following web search context when it is relevant. "
"Cite sources inline using bracket numbers such as [1].\n\n"
f"{context}"
),
},
)
return messages, sources
async def _stream_ollama_events(
model: str,
messages: list[dict[str, object]],
think: bool | str | None,
sources: list[AISource],
) -> AsyncIterator[str]:
content = ""
thinking = ""
yield _sse(
"meta",
{
"model": model,
"configured": is_ollama_configured(),
"sources": [source.model_dump() for source in sources],
},
)
try:
async for chunk in ollama_stream_chat(model=model, messages=messages, think=think):
message = chunk.get("message") or {}
thinking_delta = message.get("thinking") or ""
content_delta = message.get("content") or ""
if thinking_delta:
thinking += thinking_delta
yield _sse("thinking", {"delta": thinking_delta})
if content_delta:
content += content_delta
yield _sse("content", {"delta": content_delta})
if chunk.get("done"):
yield _sse(
"done",
{
"model": chunk.get("model") or model,
"content": content,
"thinking": thinking,
"sources": [source.model_dump() for source in sources],
},
)
return
except OllamaCloudError as exc:
yield _sse("error", {"detail": str(exc), "status_code": exc.status_code})
except Exception as exc:
yield _sse("error", {"detail": f"Streaming failed: {exc}", "status_code": 502})
@app.post("/ai/chat/stream")
async def ai_chat_stream_endpoint(request: AIChatRequest) -> StreamingResponse:
model = (request.model or default_model()).strip()
if not model:
raise HTTPException(status_code=400, detail="Model is required.")
if not is_ollama_configured():
raise HTTPException(status_code=503, detail="Ollama Cloud is not configured. Set OLLAMA_API_KEY.")
try:
messages, sources = await _build_chat_messages_and_sources(request)
except OllamaCloudError as exc:
raise HTTPException(status_code=exc.status_code, detail=str(exc)) from exc
return StreamingResponse(
_stream_ollama_events(model=model, messages=messages, think=request.think, sources=sources),
media_type="text/event-stream",
headers={"Cache-Control": "no-cache", "X-Accel-Buffering": "no"},
)
def _source_text(source: AISource, index: int) -> str:
return (
f"[{index}] {source.title}\n"
f"Type: {source.source_type}\n"
f"URL: {source.url}\n"
f"Excerpt: {source.content[:1200]}\n"
)
async def _build_ai_search_sources(request: AISearchRequest) -> list[AISource]:
local_results = await search(query=request.query, limit=request.local_result_limit, offset=0)
sources = [
AISource(
title=result["title"],
url=result["url"],
source_type="local",
content=result["snippet"],
)
for result in local_results
]
if request.include_web:
web_results = await ollama_web_search(request.query, max_results=request.web_result_limit)
sources.extend(
AISource(
title=result.get("title") or result.get("url") or "Web result",
url=result.get("url") or "",
source_type="web",
content=result.get("content") or "",
)
for result in web_results
if result.get("url")
)
return sources
@app.post("/ai/search", response_model=AIAnswerResponse)
async def ai_search_endpoint(request: AISearchRequest) -> AIAnswerResponse:
model = (request.model or default_model()).strip()
query = request.query.strip()
if not model:
raise HTTPException(status_code=400, detail="Model is required.")
if not query:
raise HTTPException(status_code=400, detail="Query is required.")
try:
sources = await _build_ai_search_sources(request)
source_context = "\n".join(_source_text(source, index) for index, source in enumerate(sources, start=1))
if not source_context:
source_context = "No search sources were found for this query."
messages = [
{
"role": "system",
"content": (
"You are sFetch AI, a precise search assistant. Answer only from the provided sources. "
"Write in a neutral, professional tone. Keep the response concise. "
"Cite sources inline using bracket numbers such as [1]. "
"If the sources are insufficient, say what is missing rather than guessing."
),
},
{
"role": "user",
"content": f"Search query: {query}\n\nSources:\n{source_context}",
},
]
response = await ollama_chat(model=model, messages=messages, think=request.think)
except OllamaCloudError as exc:
raise HTTPException(status_code=exc.status_code, detail=str(exc)) from exc
message = response.get("message") or {}
return AIAnswerResponse(
model=response.get("model") or model,
content=message.get("content") or "",
thinking=message.get("thinking"),
sources=sources,
configured=is_ollama_configured(),
)
async def _build_ai_search_messages(request: AISearchRequest) -> tuple[list[dict[str, str]], list[AISource]]:
sources = await _build_ai_search_sources(request)
source_context = "\n".join(_source_text(source, index) for index, source in enumerate(sources, start=1))
if not source_context:
source_context = "No search sources were found for this query."
messages = [
{
"role": "system",
"content": (
"You are sFetch AI, a precise search assistant. Answer only from the provided sources. "
"Write in a neutral, useful tone with direct synthesis. "
"Cite sources inline using bracket numbers such as [1]. "
"If sources are insufficient, say what is missing rather than guessing."
),
},
{
"role": "user",
"content": f"Search query: {request.query.strip()}\n\nSources:\n{source_context}",
},
]
return messages, sources
@app.post("/ai/search/stream")
async def ai_search_stream_endpoint(request: AISearchRequest) -> StreamingResponse:
model = (request.model or default_model()).strip()
query = request.query.strip()
if not model:
raise HTTPException(status_code=400, detail="Model is required.")
if not query:
raise HTTPException(status_code=400, detail="Query is required.")
if not is_ollama_configured():
raise HTTPException(status_code=503, detail="Ollama Cloud is not configured. Set OLLAMA_API_KEY.")
try:
messages, sources = await _build_ai_search_messages(request)
except OllamaCloudError as exc:
raise HTTPException(status_code=exc.status_code, detail=str(exc)) from exc
return StreamingResponse(
_stream_ollama_events(model=model, messages=messages, think=request.think, sources=sources),
media_type="text/event-stream",
headers={"Cache-Control": "no-cache", "X-Accel-Buffering": "no"},
)
+42
View File
@@ -2,6 +2,8 @@
from __future__ import annotations
from typing import Any, Literal
from pydantic import BaseModel, Field
@@ -41,3 +43,43 @@ class CrawlRequest(BaseModel):
max_depth: int = Field(default=2, ge=0, le=5)
max_pages_per_domain: int = Field(default=50, ge=1, le=500)
same_domain_only: bool = True
class AIMessage(BaseModel):
role: Literal["system", "user", "assistant", "tool"]
content: str = ""
thinking: str | None = None
tool_name: str | None = None
tool_calls: list[dict[str, Any]] | None = None
class AIChatRequest(BaseModel):
model: str | None = None
messages: list[AIMessage] = Field(min_length=1)
think: bool | str | None = None
use_web_search: bool = False
web_result_limit: int = Field(default=5, ge=1, le=10)
class AISearchRequest(BaseModel):
query: str = Field(min_length=1)
model: str | None = None
include_web: bool = True
local_result_limit: int = Field(default=5, ge=1, le=10)
web_result_limit: int = Field(default=5, ge=1, le=10)
think: bool | str | None = None
class AISource(BaseModel):
title: str
url: str
source_type: Literal["local", "web"]
content: str = ""
class AIAnswerResponse(BaseModel):
model: str
content: str
thinking: str | None = None
sources: list[AISource] = Field(default_factory=list)
configured: bool = True
+138
View File
@@ -0,0 +1,138 @@
"""Ollama Cloud API client helpers for AI search and chat."""
from __future__ import annotations
import json
from collections.abc import AsyncIterator
from typing import Any
import httpx
from config import (
OLLAMA_API_BASE,
OLLAMA_API_KEY,
OLLAMA_DEFAULT_MODEL,
OLLAMA_REQUEST_TIMEOUT_SECONDS,
)
class OllamaCloudError(RuntimeError):
"""Raised when Ollama Cloud is unavailable or rejects a request."""
def __init__(self, message: str, status_code: int = 502) -> None:
super().__init__(message)
self.status_code = status_code
def is_ollama_configured() -> bool:
return bool(OLLAMA_API_KEY)
def default_model() -> str:
return OLLAMA_DEFAULT_MODEL
def _headers(require_auth: bool = False) -> dict[str, str]:
headers = {"Content-Type": "application/json"}
if OLLAMA_API_KEY:
headers["Authorization"] = f"Bearer {OLLAMA_API_KEY}"
elif require_auth:
raise OllamaCloudError(
"Ollama Cloud is not configured. Set OLLAMA_API_KEY before using AI responses.",
status_code=503,
)
return headers
def _normalise_error(response: httpx.Response) -> str:
try:
payload = response.json()
except ValueError:
return response.text.strip() or response.reason_phrase
detail = payload.get("error") or payload.get("detail") or payload
return str(detail)
async def list_models() -> list[dict[str, Any]]:
timeout = httpx.Timeout(OLLAMA_REQUEST_TIMEOUT_SECONDS)
async with httpx.AsyncClient(timeout=timeout) as client:
response = await client.get(f"{OLLAMA_API_BASE}/api/tags", headers=_headers(require_auth=False))
if response.status_code >= 400:
raise OllamaCloudError(_normalise_error(response), status_code=response.status_code)
payload = response.json()
models = payload.get("models") or []
return sorted(models, key=lambda item: item.get("name") or item.get("model") or "")
async def chat(
model: str,
messages: list[dict[str, Any]],
think: bool | str | None = None,
) -> dict[str, Any]:
payload: dict[str, Any] = {
"model": model,
"messages": messages,
"stream": False,
}
if think is not None and think != "off":
payload["think"] = think
timeout = httpx.Timeout(OLLAMA_REQUEST_TIMEOUT_SECONDS)
async with httpx.AsyncClient(timeout=timeout) as client:
response = await client.post(
f"{OLLAMA_API_BASE}/api/chat",
headers=_headers(require_auth=True),
json=payload,
)
if response.status_code >= 400:
raise OllamaCloudError(_normalise_error(response), status_code=response.status_code)
return response.json()
async def stream_chat(
model: str,
messages: list[dict[str, Any]],
think: bool | str | None = None,
) -> AsyncIterator[dict[str, Any]]:
payload: dict[str, Any] = {
"model": model,
"messages": messages,
"stream": True,
}
if think is not None and think != "off":
payload["think"] = think
timeout = httpx.Timeout(OLLAMA_REQUEST_TIMEOUT_SECONDS)
async with httpx.AsyncClient(timeout=timeout) as client:
async with client.stream(
"POST",
f"{OLLAMA_API_BASE}/api/chat",
headers=_headers(require_auth=True),
json=payload,
) as response:
if response.status_code >= 400:
body = (await response.aread()).decode("utf-8", errors="replace")
raise OllamaCloudError(body or response.reason_phrase, status_code=response.status_code)
async for line in response.aiter_lines():
clean_line = line.strip()
if not clean_line:
continue
try:
yield json.loads(clean_line)
except json.JSONDecodeError:
yield {"message": {"content": clean_line}}
async def web_search(query: str, max_results: int = 5) -> list[dict[str, Any]]:
timeout = httpx.Timeout(OLLAMA_REQUEST_TIMEOUT_SECONDS)
async with httpx.AsyncClient(timeout=timeout) as client:
response = await client.post(
f"{OLLAMA_API_BASE}/api/web_search",
headers=_headers(require_auth=True),
json={"query": query, "max_results": max(1, min(max_results, 10))},
)
if response.status_code >= 400:
raise OllamaCloudError(_normalise_error(response), status_code=response.status_code)
payload = response.json()
return payload.get("results") or []
+446
View File
@@ -0,0 +1,446 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>sFetch AI</title>
<script src="https://cdn.tailwindcss.com"></script>
<script>
tailwind.config = {
theme: {
extend: {
colors: {
shell: {
bg: "#f7f7f4",
panel: "#ffffff",
raised: "#fbfbf8",
ink: "#171717",
muted: "#6f6f68",
line: "#deded6",
soft: "#eeeeE8",
accent: "#315f95",
accentDark: "#244a75",
warm: "#8a5a20",
},
},
boxShadow: {
lift: "0 18px 55px rgba(23, 23, 23, 0.08)",
focus: "0 0 0 3px rgba(49, 95, 149, 0.16)",
},
},
},
};
</script>
<style>
:root { color-scheme: light; }
body {
background: #f7f7f4;
color: #171717;
font-family: Inter, ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif;
}
.message-text p + p { margin-top: 0.8rem; }
.pulse-dot {
animation: pulse 1.1s ease-in-out infinite;
}
@keyframes pulse {
0%, 100% { opacity: 0.35; transform: scale(0.9); }
50% { opacity: 1; transform: scale(1); }
}
</style>
</head>
<body class="min-h-screen overflow-hidden">
<div class="grid h-screen grid-cols-1 lg:grid-cols-[280px_1fr]">
<aside class="hidden border-r border-shell-line bg-shell-raised lg:flex lg:flex-col">
<div class="border-b border-shell-line px-5 py-5">
<a href="./index.html" class="text-2xl font-semibold tracking-tight text-shell-ink">sFetch</a>
<p id="configText" class="mt-2 text-xs text-shell-muted">Checking Ollama Cloud...</p>
</div>
<nav class="space-y-1 p-3 text-sm">
<a href="./index.html" class="flex items-center justify-between rounded-lg px-3 py-2 font-medium text-shell-muted hover:bg-shell-soft hover:text-shell-ink">
Search
<span class="text-xs">/</span>
</a>
<a href="./ai.html" class="flex items-center justify-between rounded-lg bg-shell-soft px-3 py-2 font-medium text-shell-ink">
AI Chat
<span class="text-xs">active</span>
</a>
<a href="./results.html" class="flex items-center justify-between rounded-lg px-3 py-2 font-medium text-shell-muted hover:bg-shell-soft hover:text-shell-ink">
Results
<span class="text-xs">index</span>
</a>
</nav>
<div class="mt-auto border-t border-shell-line p-4">
<button id="newChatSidebar" class="w-full rounded-lg border border-shell-line bg-white px-4 py-2 text-sm font-semibold text-shell-ink transition hover:border-shell-accent hover:text-shell-accent">
New chat
</button>
</div>
</aside>
<main class="flex min-h-0 flex-col">
<header class="flex flex-col gap-3 border-b border-shell-line bg-shell-panel px-4 py-3 md:flex-row md:items-center md:justify-between">
<div class="flex items-center justify-between gap-3 lg:hidden">
<a href="./index.html" class="text-xl font-semibold tracking-tight text-shell-ink">sFetch</a>
<a href="./index.html" class="rounded-lg border border-shell-line px-3 py-2 text-sm font-medium text-shell-muted">Search</a>
</div>
<div class="grid gap-3 md:grid-cols-[minmax(220px,360px)_150px_170px] md:items-end">
<div>
<label for="modelSelect" class="mb-1 block text-xs font-semibold uppercase tracking-wide text-shell-muted">Model</label>
<select id="modelSelect" class="w-full rounded-lg border border-shell-line bg-white px-3 py-2 text-sm text-shell-ink outline-none transition focus:border-shell-accent focus:shadow-focus">
<option value="">Loading models...</option>
</select>
</div>
<div>
<label for="thinkSelect" class="mb-1 block text-xs font-semibold uppercase tracking-wide text-shell-muted">Think</label>
<select id="thinkSelect" class="w-full rounded-lg border border-shell-line bg-white px-3 py-2 text-sm text-shell-ink outline-none transition focus:border-shell-accent focus:shadow-focus">
<option value="off">Default</option>
<option value="low">Low</option>
<option value="medium">Medium</option>
<option value="high">High</option>
</select>
</div>
<label class="flex h-10 items-center gap-2 rounded-lg border border-shell-line bg-white px-3 text-sm font-medium text-shell-ink">
<input id="useWebSearch" type="checkbox" class="h-4 w-4 rounded border-shell-line text-shell-accent" />
Web search
</label>
</div>
<div class="hidden items-center gap-2 text-xs text-shell-muted md:flex">
<span id="streamStateDot" class="h-2 w-2 rounded-full bg-shell-muted"></span>
<span id="chatStatus">Ready</span>
</div>
</header>
<section id="messages" class="min-h-0 flex-1 overflow-y-auto px-4 py-6">
<div class="mx-auto flex max-w-4xl flex-col gap-5">
<div class="rounded-2xl border border-shell-line bg-shell-panel p-5 shadow-lift">
<p class="text-sm font-semibold uppercase tracking-wide text-shell-accent">sFetch AI</p>
<h1 class="mt-3 text-3xl font-semibold tracking-tight text-shell-ink">Ask, search, and synthesize.</h1>
<div class="mt-5 grid gap-3 md:grid-cols-3">
<button data-prompt="Summarize the strongest search results for cloud model APIs." class="prompt-card rounded-xl border border-shell-line bg-shell-raised p-4 text-left text-sm leading-6 text-shell-muted transition hover:border-shell-accent hover:text-shell-ink">
Summarize indexed evidence
</button>
<button data-prompt="Compare the top sources and tell me what disagrees." class="prompt-card rounded-xl border border-shell-line bg-shell-raised p-4 text-left text-sm leading-6 text-shell-muted transition hover:border-shell-accent hover:text-shell-ink">
Compare conflicting sources
</button>
<button data-prompt="Research the latest context, cite sources, and give me the answer." class="prompt-card rounded-xl border border-shell-line bg-shell-raised p-4 text-left text-sm leading-6 text-shell-muted transition hover:border-shell-accent hover:text-shell-ink">
Research with web context
</button>
</div>
</div>
</div>
</section>
<footer class="border-t border-shell-line bg-shell-panel p-4">
<form id="chatForm" class="mx-auto max-w-4xl">
<div class="rounded-2xl border border-shell-line bg-white p-3 shadow-lift focus-within:border-shell-accent focus-within:shadow-focus">
<textarea id="messageInput" rows="3" placeholder="Message sFetch..." class="max-h-44 w-full resize-none bg-transparent px-2 py-2 text-base text-shell-ink outline-none placeholder:text-shell-muted"></textarea>
<div class="flex flex-col gap-3 border-t border-shell-line pt-3 sm:flex-row sm:items-center sm:justify-between">
<div class="flex flex-wrap gap-2 text-xs text-shell-muted">
<span id="modelHint" class="rounded-full bg-shell-soft px-3 py-1">Model loading</span>
<span id="streamHint" class="rounded-full bg-shell-soft px-3 py-1">Streaming on</span>
</div>
<div class="flex gap-2">
<button type="button" id="clearChat" class="rounded-lg border border-shell-line bg-white px-4 py-2 text-sm font-semibold text-shell-ink transition hover:bg-shell-soft">
Clear
</button>
<button type="submit" id="sendButton" class="rounded-lg bg-shell-accent px-5 py-2 text-sm font-semibold text-white transition hover:bg-shell-accentDark">
Send
</button>
</div>
</div>
</div>
</form>
</footer>
</main>
</div>
<script>
const API_BASE = "http://localhost:8000";
const messagesContainer = document.getElementById("messages");
const chatForm = document.getElementById("chatForm");
const messageInput = document.getElementById("messageInput");
const chatStatus = document.getElementById("chatStatus");
const sendButton = document.getElementById("sendButton");
const clearChat = document.getElementById("clearChat");
const newChatSidebar = document.getElementById("newChatSidebar");
const modelSelect = document.getElementById("modelSelect");
const thinkSelect = document.getElementById("thinkSelect");
const useWebSearch = document.getElementById("useWebSearch");
const configText = document.getElementById("configText");
const modelHint = document.getElementById("modelHint");
const streamStateDot = document.getElementById("streamStateDot");
let messages = [];
let currentAssistant = null;
function escapeHTML(value) {
return String(value || "")
.replaceAll("&", "&amp;")
.replaceAll("<", "&lt;")
.replaceAll(">", "&gt;")
.replaceAll('"', "&quot;")
.replaceAll("'", "&#039;");
}
function scrollToBottom() {
messagesContainer.scrollTop = messagesContainer.scrollHeight;
}
function setStatus(text, active = false) {
chatStatus.textContent = text;
streamStateDot.className = `h-2 w-2 rounded-full ${active ? "pulse-dot bg-shell-accent" : "bg-shell-muted"}`;
}
function formatContent(text) {
const safe = escapeHTML(text);
return safe
.split(/\n{2,}/)
.map((part) => `<p>${part.replaceAll("\n", "<br>")}</p>`)
.join("");
}
function messageShell(role) {
const isUser = role === "user";
const article = document.createElement("article");
article.className = `mx-auto flex w-full max-w-4xl gap-3 ${isUser ? "justify-end" : "justify-start"}`;
article.innerHTML = `
${isUser ? "" : '<div class="mt-1 flex h-8 w-8 shrink-0 items-center justify-center rounded-lg bg-shell-ink text-xs font-bold text-white">s</div>'}
<div class="${isUser ? "max-w-2xl rounded-2xl bg-shell-ink px-4 py-3 text-white" : "w-full max-w-3xl rounded-2xl border border-shell-line bg-shell-panel px-4 py-4 shadow-lift"}">
<div class="message-text text-sm leading-7 ${isUser ? "text-white" : "text-shell-ink"}"></div>
</div>
`;
messagesContainer.querySelector(".max-w-4xl")?.appendChild(article);
return article.querySelector(".message-text");
}
function renderUserMessage(content) {
const target = messageShell("user");
target.innerHTML = formatContent(content);
scrollToBottom();
}
function createAssistantMessage() {
const target = messageShell("assistant");
target.innerHTML = '<span class="text-shell-muted">Thinking...</span>';
currentAssistant = {
content: "",
thinking: "",
sources: [],
target,
details: null,
sourcesNode: null,
};
scrollToBottom();
}
function updateAssistantContent(delta) {
if (!currentAssistant) return;
currentAssistant.content += delta;
currentAssistant.target.innerHTML = formatContent(currentAssistant.content);
scrollToBottom();
}
function updateThinking(delta) {
if (!currentAssistant) return;
currentAssistant.thinking += delta;
if (!currentAssistant.details) {
currentAssistant.details = document.createElement("details");
currentAssistant.details.className = "mx-auto mt-2 w-full max-w-4xl rounded-xl border border-shell-line bg-shell-raised p-3";
currentAssistant.details.innerHTML = `
<summary class="cursor-pointer text-sm font-medium text-shell-muted">Reasoning trace</summary>
<pre class="mt-3 whitespace-pre-wrap text-xs leading-5 text-shell-muted"></pre>
`;
messagesContainer.querySelector(".max-w-4xl")?.appendChild(currentAssistant.details);
}
currentAssistant.details.querySelector("pre").textContent = currentAssistant.thinking;
}
function renderSources(sources) {
if (!currentAssistant || !sources.length) return;
currentAssistant.sources = sources;
const wrapper = document.createElement("div");
wrapper.className = "mx-auto mt-2 grid w-full max-w-4xl gap-2 md:grid-cols-2";
sources.slice(0, 8).forEach((source, index) => {
const link = document.createElement("a");
link.href = source.url;
link.target = "_blank";
link.rel = "noreferrer noopener";
link.className = "rounded-xl border border-shell-line bg-shell-panel p-3 text-sm transition hover:border-shell-accent";
link.innerHTML = `
<span class="font-semibold text-shell-ink">[${index + 1}] ${escapeHTML(source.title)}</span>
<span class="mt-1 block truncate text-xs text-shell-muted">${escapeHTML(source.url)}</span>
`;
wrapper.appendChild(link);
});
messagesContainer.querySelector(".max-w-4xl")?.appendChild(wrapper);
currentAssistant.sourcesNode = wrapper;
}
async function streamSSE(url, payload, handlers) {
const response = await fetch(url, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(payload),
});
if (!response.ok || !response.body) {
const data = await response.json().catch(() => ({}));
throw new Error(data.detail || "Model stream failed.");
}
const reader = response.body.getReader();
const decoder = new TextDecoder();
let buffer = "";
while (true) {
const { value, done } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const events = buffer.split("\n\n");
buffer = events.pop() || "";
for (const rawEvent of events) {
const eventName = (rawEvent.match(/^event: (.+)$/m) || [])[1] || "message";
const dataLine = rawEvent
.split("\n")
.filter((line) => line.startsWith("data: "))
.map((line) => line.slice(6))
.join("\n");
const data = dataLine ? JSON.parse(dataLine) : {};
handlers[eventName]?.(data);
}
}
}
async function loadModels() {
try {
const [configResponse, modelsResponse] = await Promise.all([
fetch(`${API_BASE}/ai/config`),
fetch(`${API_BASE}/ai/models`),
]);
const config = await configResponse.json();
const payload = await modelsResponse.json();
if (!modelsResponse.ok) throw new Error(payload.detail || "Unable to load models.");
modelSelect.innerHTML = "";
(payload.models || []).forEach((model) => {
const name = model.name || model.model;
if (!name) return;
const option = document.createElement("option");
option.value = name;
option.textContent = name;
if (name === payload.default_model) option.selected = true;
modelSelect.appendChild(option);
});
if (!modelSelect.options.length) {
modelSelect.innerHTML = `<option value="${payload.default_model || "gpt-oss:120b"}">${payload.default_model || "gpt-oss:120b"}</option>`;
}
configText.textContent = config.configured ? "Ollama Cloud connected" : "Ollama key missing";
modelHint.textContent = modelSelect.value || payload.default_model || "No model";
} catch (error) {
modelSelect.innerHTML = '<option value="gpt-oss:120b">gpt-oss:120b</option>';
configText.textContent = error.message || "Model loading failed";
modelHint.textContent = "gpt-oss:120b";
}
}
async function sendMessage(content) {
const userMessage = { role: "user", content };
messages.push(userMessage);
renderUserMessage(content);
createAssistantMessage();
setStatus("Streaming response", true);
sendButton.disabled = true;
try {
const think = thinkSelect.value === "off" ? null : thinkSelect.value;
let finalContent = "";
let finalSources = [];
await streamSSE(`${API_BASE}/ai/chat/stream`, {
model: modelSelect.value,
messages,
think,
use_web_search: useWebSearch.checked,
web_result_limit: 5,
}, {
meta(data) {
finalSources = data.sources || [];
if (finalSources.length) renderSources(finalSources);
},
thinking(data) {
updateThinking(data.delta || "");
},
content(data) {
finalContent += data.delta || "";
updateAssistantContent(data.delta || "");
},
done(data) {
finalContent = data.content || finalContent;
setStatus(`Response from ${data.model || modelSelect.value}`, false);
},
error(data) {
throw new Error(data.detail || "Model stream failed.");
},
});
messages.push({ role: "assistant", content: finalContent || currentAssistant?.content || "" });
} catch (error) {
updateAssistantContent(`\n\n${error.message || "Model stream failed."}`);
setStatus("Stream failed", false);
} finally {
sendButton.disabled = false;
messageInput.focus();
}
}
function clearConversation() {
messages = [];
messagesContainer.innerHTML = `
<div class="mx-auto flex max-w-4xl flex-col gap-5">
<div class="rounded-2xl border border-shell-line bg-shell-panel p-5 shadow-lift">
<p class="text-sm font-semibold uppercase tracking-wide text-shell-accent">sFetch AI</p>
<h1 class="mt-3 text-3xl font-semibold tracking-tight text-shell-ink">Ask, search, and synthesize.</h1>
<div class="mt-5 grid gap-3 md:grid-cols-3">
<button data-prompt="Summarize the strongest search results for cloud model APIs." class="prompt-card rounded-xl border border-shell-line bg-shell-raised p-4 text-left text-sm leading-6 text-shell-muted transition hover:border-shell-accent hover:text-shell-ink">Summarize indexed evidence</button>
<button data-prompt="Compare the top sources and tell me what disagrees." class="prompt-card rounded-xl border border-shell-line bg-shell-raised p-4 text-left text-sm leading-6 text-shell-muted transition hover:border-shell-accent hover:text-shell-ink">Compare conflicting sources</button>
<button data-prompt="Research the latest context, cite sources, and give me the answer." class="prompt-card rounded-xl border border-shell-line bg-shell-raised p-4 text-left text-sm leading-6 text-shell-muted transition hover:border-shell-accent hover:text-shell-ink">Research with web context</button>
</div>
</div>
</div>
`;
bindPromptCards();
setStatus("Ready", false);
}
function bindPromptCards() {
document.querySelectorAll(".prompt-card").forEach((button) => {
button.addEventListener("click", () => {
messageInput.value = button.dataset.prompt || "";
messageInput.focus();
});
});
}
chatForm.addEventListener("submit", (event) => {
event.preventDefault();
const content = messageInput.value.trim();
if (!content) {
messageInput.focus();
return;
}
messageInput.value = "";
sendMessage(content);
});
clearChat.addEventListener("click", clearConversation);
newChatSidebar?.addEventListener("click", clearConversation);
modelSelect.addEventListener("change", () => {
modelHint.textContent = modelSelect.value || "No model";
});
bindPromptCards();
loadModels();
</script>
</body>
</html>
+214 -216
View File
@@ -10,221 +10,180 @@
theme: {
extend: {
colors: {
sfetch: {
bg: "#f8fafc",
surface: "#ffffff",
surfaceSoft: "#f1f5f9",
ink: "#202124",
muted: "#5f6368",
border: "#dadce0",
blue: "#1a73e8",
orange: "#de5833",
green: "#0b8043",
shell: {
bg: "#f7f7f4",
panel: "#ffffff",
raised: "#fbfbf8",
ink: "#171717",
muted: "#6f6f68",
line: "#deded6",
soft: "#eeeeE8",
accent: "#315f95",
accentDark: "#244a75",
warm: "#8a5a20",
},
},
boxShadow: {
search: "0 2px 8px rgba(60, 64, 67, 0.14), 0 1px 3px rgba(60, 64, 67, 0.12)",
panel: "0 16px 40px rgba(15, 23, 42, 0.08)",
lift: "0 18px 55px rgba(23, 23, 23, 0.08)",
focus: "0 0 0 3px rgba(49, 95, 149, 0.16)",
},
},
},
};
</script>
<style>
:root {
color-scheme: light;
}
:root { color-scheme: light; }
body {
background: #f8fafc;
color: #202124;
font-family: Arial, Helvetica, sans-serif;
}
.brand {
font-family: Arial, Helvetica, sans-serif;
font-weight: 700;
letter-spacing: 0;
}
.brand span:nth-child(1) { color: #de5833; }
.brand span:nth-child(2) { color: #1a73e8; }
.brand span:nth-child(3) { color: #188038; }
.brand span:nth-child(4) { color: #fbbc04; }
.brand span:nth-child(5) { color: #1a73e8; }
.brand span:nth-child(6) { color: #de5833; }
.modal-open {
overflow: hidden;
background: #f7f7f4;
color: #171717;
font-family: Inter, ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif;
}
.modal-open { overflow: hidden; }
</style>
</head>
<body class="min-h-screen">
<main class="flex min-h-screen flex-col">
<header class="flex items-center justify-between px-5 py-4 text-sm text-sfetch-muted sm:px-8">
<a href="./index.html" class="brand text-2xl" aria-label="sFetch home">
<span>s</span><span>F</span><span>e</span><span>t</span><span>c</span><span>h</span>
<body class="min-h-screen overflow-hidden">
<div class="grid h-screen grid-cols-1 lg:grid-cols-[280px_1fr]">
<aside class="hidden border-r border-shell-line bg-shell-raised lg:flex lg:flex-col">
<div class="border-b border-shell-line px-5 py-5">
<a href="./index.html" class="text-2xl font-semibold tracking-tight text-shell-ink">sFetch</a>
<p id="aiConfigText" class="mt-2 text-xs text-shell-muted">Checking Ollama Cloud...</p>
</div>
<nav class="space-y-1 p-3 text-sm">
<a href="./index.html" class="flex items-center justify-between rounded-lg bg-shell-soft px-3 py-2 font-medium text-shell-ink">
AI Search
<span class="text-xs">active</span>
</a>
<button
id="openCrawlerModal"
class="rounded-full border border-sfetch-border bg-white px-4 py-2 font-medium text-sfetch-ink transition hover:border-sfetch-orange hover:text-sfetch-orange"
>
Index tools
<a href="./ai.html" class="flex items-center justify-between rounded-lg px-3 py-2 font-medium text-shell-muted hover:bg-shell-soft hover:text-shell-ink">
AI Chat
<span class="text-xs">stream</span>
</a>
<button id="openCrawlerModal" class="flex w-full items-center justify-between rounded-lg px-3 py-2 font-medium text-shell-muted hover:bg-shell-soft hover:text-shell-ink">
Index Admin
<span class="text-xs">crawl</span>
</button>
</header>
</nav>
<section class="mx-auto flex w-full max-w-5xl flex-1 flex-col items-center justify-center px-5 pb-24 pt-10">
<h1 class="brand text-center text-6xl leading-none sm:text-7xl">
<span>s</span><span>F</span><span>e</span><span>t</span><span>c</span><span>h</span>
</h1>
<form id="searchForm" class="mt-9 w-full max-w-2xl">
<label
for="searchInput"
class="flex min-h-14 items-center gap-3 rounded-full border border-sfetch-border bg-white px-5 transition focus-within:border-transparent focus-within:shadow-search"
>
<svg class="h-5 w-5 shrink-0 text-sfetch-muted" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.8" aria-hidden="true">
<circle cx="11" cy="11" r="6"></circle>
<path d="M20 20L16.65 16.65"></path>
</svg>
<input
id="searchInput"
type="text"
autocomplete="off"
placeholder="Search sFetch"
class="w-full bg-transparent text-base text-sfetch-ink outline-none placeholder:text-sfetch-muted sm:text-lg"
/>
</label>
<div class="mt-6 flex flex-wrap items-center justify-center gap-3">
<button
type="submit"
class="rounded-md bg-sfetch-blue px-5 py-2.5 text-sm font-medium text-white transition hover:bg-[#1558b0]"
>
sFetch Search
</button>
<button
type="button"
data-search-type="image"
class="rounded-md border border-sfetch-border bg-white px-5 py-2.5 text-sm font-medium text-sfetch-ink transition hover:border-sfetch-blue hover:text-sfetch-blue"
>
Images
</button>
<button
type="button"
data-search-type="video"
class="rounded-md border border-sfetch-border bg-white px-5 py-2.5 text-sm font-medium text-sfetch-ink transition hover:border-sfetch-blue hover:text-sfetch-blue"
>
Videos
</button>
<div class="mt-auto border-t border-shell-line p-4">
<section class="rounded-2xl border border-shell-line bg-white p-4">
<p class="text-xs font-semibold uppercase tracking-wide text-shell-muted">Index</p>
<p id="statsSummary" class="mt-2 text-sm leading-5 text-shell-muted">Checking index...</p>
<div class="mt-3 h-2 overflow-hidden rounded-full bg-shell-soft">
<div id="seedProgress" class="h-full w-0 bg-shell-accent transition-all duration-300"></div>
</div>
</form>
<section class="mt-12 w-full max-w-3xl rounded-lg border border-sfetch-border bg-white p-4 shadow-panel" aria-label="Index controls">
<div class="flex flex-col gap-4 sm:flex-row sm:items-center sm:justify-between">
<div>
<p class="text-xs font-semibold uppercase text-sfetch-orange">Index</p>
<p id="statsSummary" class="mt-1 text-sm text-sfetch-muted">Checking index...</p>
</div>
<div class="flex flex-wrap gap-2">
<button
id="seedTopSites"
class="rounded-md bg-sfetch-orange px-4 py-2 text-sm font-medium text-white transition hover:bg-[#c44724]"
>
<p id="seedStatus" class="mt-3 text-xs leading-5 text-shell-muted">Seed status unavailable.</p>
<button id="seedTopSites" class="mt-3 w-full rounded-lg bg-shell-ink px-3 py-2 text-sm font-semibold text-white transition hover:bg-black">
Seed top 1000
</button>
<button
id="openCrawlerModalSecondary"
class="rounded-md border border-sfetch-border bg-white px-4 py-2 text-sm font-medium text-sfetch-ink transition hover:border-sfetch-orange hover:text-sfetch-orange"
>
Custom crawl
</section>
</div>
</aside>
<main class="flex min-h-0 flex-col">
<header class="flex flex-col gap-3 border-b border-shell-line bg-shell-panel px-4 py-3 md:flex-row md:items-center md:justify-between">
<div class="flex items-center justify-between gap-3 lg:hidden">
<a href="./index.html" class="text-xl font-semibold tracking-tight text-shell-ink">sFetch</a>
<a href="./ai.html" class="rounded-lg border border-shell-line px-3 py-2 text-sm font-medium text-shell-muted">AI Chat</a>
</div>
<div class="grid gap-3 md:grid-cols-[minmax(220px,360px)_150px] md:items-end">
<div>
<label for="modelSelect" class="mb-1 block text-xs font-semibold uppercase tracking-wide text-shell-muted">Model</label>
<select id="modelSelect" class="w-full rounded-lg border border-shell-line bg-white px-3 py-2 text-sm text-shell-ink outline-none transition focus:border-shell-accent focus:shadow-focus">
<option value="">Loading models...</option>
</select>
</div>
<label class="flex h-10 items-center gap-2 rounded-lg border border-shell-line bg-white px-3 text-sm font-medium text-shell-ink">
<input id="includeAI" type="checkbox" checked class="h-4 w-4 rounded border-shell-line text-shell-accent" />
AI answer
</label>
</div>
<div class="hidden items-center gap-2 text-xs text-shell-muted md:flex">
<span class="h-2 w-2 rounded-full bg-shell-accent"></span>
<span id="modelHint">Ready</span>
</div>
</header>
<section class="min-h-0 flex-1 overflow-y-auto px-4 py-8">
<div class="mx-auto flex max-w-4xl flex-col gap-6">
<section class="rounded-3xl border border-shell-line bg-shell-panel p-6 shadow-lift">
<p class="text-sm font-semibold uppercase tracking-wide text-shell-accent">AI Search Workspace</p>
<h1 class="mt-4 max-w-3xl text-4xl font-semibold tracking-tight text-shell-ink md:text-5xl">
Search the index. Stream the answer.
</h1>
<form id="searchForm" class="mt-8">
<div class="rounded-2xl border border-shell-line bg-white p-3 shadow-lift focus-within:border-shell-accent focus-within:shadow-focus">
<textarea id="searchInput" rows="4" placeholder="Ask a question or enter a search query..." class="max-h-44 w-full resize-none bg-transparent px-2 py-2 text-lg text-shell-ink outline-none placeholder:text-shell-muted"></textarea>
<div class="flex flex-col gap-3 border-t border-shell-line pt-3 sm:flex-row sm:items-center sm:justify-between">
<div class="flex flex-wrap gap-2 text-xs text-shell-muted">
<button type="button" data-search-type="all" class="mode-btn rounded-full bg-shell-soft px-3 py-1 font-medium text-shell-ink">All</button>
<button type="button" data-search-type="image" class="mode-btn rounded-full px-3 py-1 font-medium hover:bg-shell-soft hover:text-shell-ink">Images</button>
<button type="button" data-search-type="video" class="mode-btn rounded-full px-3 py-1 font-medium hover:bg-shell-soft hover:text-shell-ink">Videos</button>
</div>
<button type="submit" class="rounded-lg bg-shell-accent px-5 py-2 text-sm font-semibold text-white transition hover:bg-shell-accentDark">
Search
</button>
</div>
</div>
<div class="mt-4 h-2 overflow-hidden rounded-full bg-sfetch-surfaceSoft">
<div id="seedProgress" class="h-full w-0 bg-sfetch-orange transition-all duration-300"></div>
</form>
</section>
<div class="grid gap-4 md:grid-cols-3">
<button data-query="What are the latest results in the local index about AI search?" class="query-card rounded-2xl border border-shell-line bg-shell-panel p-4 text-left text-sm leading-6 text-shell-muted shadow-lift transition hover:border-shell-accent hover:text-shell-ink">
AI search status
</button>
<button data-query="Compare the best indexed sources for cloud model APIs." class="query-card rounded-2xl border border-shell-line bg-shell-panel p-4 text-left text-sm leading-6 text-shell-muted shadow-lift transition hover:border-shell-accent hover:text-shell-ink">
Compare sources
</button>
<button data-query="Find indexed pages about Python and summarize the useful sources." class="query-card rounded-2xl border border-shell-line bg-shell-panel p-4 text-left text-sm leading-6 text-shell-muted shadow-lift transition hover:border-shell-accent hover:text-shell-ink">
Summarize Python sources
</button>
</div>
</div>
<p id="seedStatus" class="mt-3 min-h-5 text-sm text-sfetch-muted">Top-site seed status unavailable.</p>
</section>
</section>
<footer class="border-t border-sfetch-border bg-white px-5 py-4 text-center text-xs text-sfetch-muted">
&copy; 2026 sFetch
</footer>
</main>
</div>
<div
id="crawlerModal"
class="pointer-events-none fixed inset-0 z-30 flex items-center justify-center bg-slate-900/35 px-4 opacity-0 transition"
aria-hidden="true"
>
<div class="w-full max-w-xl rounded-lg border border-sfetch-border bg-white p-5 shadow-panel">
<div class="flex items-center justify-between gap-4 border-b border-sfetch-border pb-4">
<h2 class="text-lg font-semibold text-sfetch-ink">Custom crawl</h2>
<button
id="closeCrawlerModal"
class="flex h-9 w-9 items-center justify-center rounded-full text-sfetch-muted transition hover:bg-sfetch-surfaceSoft hover:text-sfetch-ink"
aria-label="Close crawler modal"
>
<div id="crawlerModal" class="pointer-events-none fixed inset-0 z-30 flex items-center justify-center bg-neutral-950/40 px-4 opacity-0 transition" aria-hidden="true">
<div class="w-full max-w-xl rounded-2xl border border-shell-line bg-white p-5 shadow-lift">
<div class="flex items-center justify-between gap-4 border-b border-shell-line pb-4">
<h2 class="text-lg font-semibold text-shell-ink">Index Admin</h2>
<button id="closeCrawlerModal" class="flex h-9 w-9 items-center justify-center rounded-lg text-shell-muted transition hover:bg-shell-soft hover:text-shell-ink" aria-label="Close crawler modal">
X
</button>
</div>
<form id="crawlerForm" class="mt-5 space-y-4">
<div>
<label for="seedUrls" class="mb-2 block text-sm font-medium text-sfetch-ink">Seed URLs</label>
<textarea
id="seedUrls"
rows="6"
placeholder="https://example.com&#10;https://docs.python.org/"
class="w-full rounded-md border border-sfetch-border bg-white px-3 py-2 text-sm text-sfetch-ink outline-none transition focus:border-sfetch-blue focus:ring-2 focus:ring-blue-100"
></textarea>
<label for="seedUrls" class="mb-2 block text-sm font-medium text-shell-ink">Seed URLs</label>
<textarea id="seedUrls" rows="6" placeholder="https://example.com&#10;https://docs.python.org/" class="w-full rounded-lg border border-shell-line bg-white px-3 py-2 text-sm text-shell-ink outline-none transition focus:border-shell-accent focus:shadow-focus"></textarea>
</div>
<div class="grid gap-4 sm:grid-cols-2">
<div>
<label for="crawlDepth" class="mb-2 block text-sm font-medium text-sfetch-ink">Max depth</label>
<input
id="crawlDepth"
type="number"
min="0"
max="5"
value="2"
class="w-full rounded-md border border-sfetch-border bg-white px-3 py-2 text-sm text-sfetch-ink outline-none transition focus:border-sfetch-blue focus:ring-2 focus:ring-blue-100"
/>
<label for="crawlDepth" class="mb-2 block text-sm font-medium text-shell-ink">Max depth</label>
<input id="crawlDepth" type="number" min="0" max="5" value="2" class="w-full rounded-lg border border-shell-line bg-white px-3 py-2 text-sm text-shell-ink outline-none transition focus:border-shell-accent focus:shadow-focus" />
</div>
<div>
<label for="maxPagesPerDomain" class="mb-2 block text-sm font-medium text-sfetch-ink">Pages per domain</label>
<input
id="maxPagesPerDomain"
type="number"
min="1"
max="500"
value="50"
class="w-full rounded-md border border-sfetch-border bg-white px-3 py-2 text-sm text-sfetch-ink outline-none transition focus:border-sfetch-blue focus:ring-2 focus:ring-blue-100"
/>
<label for="maxPagesPerDomain" class="mb-2 block text-sm font-medium text-shell-ink">Pages per domain</label>
<input id="maxPagesPerDomain" type="number" min="1" max="500" value="50" class="w-full rounded-lg border border-shell-line bg-white px-3 py-2 text-sm text-shell-ink outline-none transition focus:border-shell-accent focus:shadow-focus" />
</div>
</div>
<label class="flex items-center gap-3 text-sm text-sfetch-ink">
<input id="sameDomainOnly" type="checkbox" checked class="h-4 w-4 rounded border-sfetch-border text-sfetch-blue" />
<label class="flex items-center gap-3 text-sm text-shell-ink">
<input id="sameDomainOnly" type="checkbox" checked class="h-4 w-4 rounded border-shell-line text-shell-accent" />
Same domain only
</label>
<p id="crawlerStatus" class="min-h-5 text-sm text-sfetch-muted"></p>
<p id="crawlerStatus" class="min-h-5 text-sm text-shell-muted"></p>
<div class="flex flex-col-reverse gap-3 sm:flex-row sm:justify-end">
<button
type="button"
id="cancelCrawler"
class="rounded-md border border-sfetch-border bg-white px-4 py-2 text-sm font-medium text-sfetch-ink transition hover:bg-sfetch-surfaceSoft"
>
<button type="button" id="cancelCrawler" class="rounded-lg border border-shell-line bg-white px-4 py-2 text-sm font-semibold text-shell-ink transition hover:bg-shell-soft">
Cancel
</button>
<button
type="submit"
class="rounded-md bg-sfetch-blue px-4 py-2 text-sm font-medium text-white transition hover:bg-[#1558b0]"
>
<button type="submit" class="rounded-lg bg-shell-accent px-4 py-2 text-sm font-semibold text-white transition hover:bg-shell-accentDark">
Launch crawl
</button>
</div>
@@ -237,8 +196,11 @@
const searchForm = document.getElementById("searchForm");
const searchInput = document.getElementById("searchInput");
const modelSelect = document.getElementById("modelSelect");
const includeAI = document.getElementById("includeAI");
const aiConfigText = document.getElementById("aiConfigText");
const modelHint = document.getElementById("modelHint");
const openCrawlerModal = document.getElementById("openCrawlerModal");
const openCrawlerModalSecondary = document.getElementById("openCrawlerModalSecondary");
const closeCrawlerModal = document.getElementById("closeCrawlerModal");
const cancelCrawler = document.getElementById("cancelCrawler");
const crawlerModal = document.getElementById("crawlerModal");
@@ -253,28 +215,69 @@
const seedProgress = document.getElementById("seedProgress");
const seedTopSites = document.getElementById("seedTopSites");
function runSearch(type = "all") {
let selectedType = "all";
function runSearch(type = selectedType) {
const query = searchInput.value.trim();
if (!query) {
searchInput.focus();
return;
}
const params = new URLSearchParams({ q: query });
if (type !== "all") {
params.set("type", type);
if (type !== "all") params.set("type", type);
if (includeAI.checked && type === "all") {
params.set("ai", "1");
if (modelSelect.value) params.set("model", modelSelect.value);
}
window.location.href = `results.html?${params.toString()}`;
}
function setType(type) {
selectedType = type;
document.querySelectorAll(".mode-btn").forEach((button) => {
const active = button.dataset.searchType === type;
button.classList.toggle("bg-shell-soft", active);
button.classList.toggle("text-shell-ink", active);
});
}
function setModalOpen(isOpen) {
crawlerModal.classList.toggle("opacity-0", !isOpen);
crawlerModal.classList.toggle("pointer-events-none", !isOpen);
crawlerModal.setAttribute("aria-hidden", String(!isOpen));
document.body.classList.toggle("modal-open", isOpen);
if (isOpen) {
seedUrlsField.focus();
} else {
crawlerStatus.textContent = "";
if (isOpen) seedUrlsField.focus();
else crawlerStatus.textContent = "";
}
async function loadModels() {
try {
const [configResponse, modelsResponse] = await Promise.all([
fetch(`${API_BASE}/ai/config`),
fetch(`${API_BASE}/ai/models`),
]);
const config = await configResponse.json();
const payload = await modelsResponse.json();
const models = payload.models || [];
modelSelect.innerHTML = "";
models.forEach((model) => {
const name = model.name || model.model;
if (!name) return;
const option = document.createElement("option");
option.value = name;
option.textContent = name;
if (name === payload.default_model) option.selected = true;
modelSelect.appendChild(option);
});
if (!models.length) {
modelSelect.innerHTML = `<option value="${payload.default_model || "gpt-oss:120b"}">${payload.default_model || "gpt-oss:120b"}</option>`;
}
aiConfigText.textContent = config.configured ? "Ollama Cloud connected" : "Ollama key missing";
modelHint.textContent = modelSelect.value || payload.default_model || "Ready";
} catch {
modelSelect.innerHTML = '<option value="gpt-oss:120b">gpt-oss:120b</option>';
aiConfigText.textContent = "Model loading failed";
modelHint.textContent = "Model fallback";
}
}
@@ -282,11 +285,9 @@
try {
const response = await fetch(`${API_BASE}/stats`);
const stats = await response.json();
if (!response.ok) {
throw new Error();
}
const lastIndexed = stats.last_indexed_at ? `, last indexed ${stats.last_indexed_at}` : "";
statsSummary.textContent = `${stats.total_pages.toLocaleString()} pages${lastIndexed}`;
if (!response.ok) throw new Error();
const lastIndexed = stats.last_indexed_at ? `Last indexed ${stats.last_indexed_at}` : "No timestamp";
statsSummary.textContent = `${stats.total_pages.toLocaleString()} pages. ${lastIndexed}.`;
} catch {
statsSummary.textContent = "Backend unavailable";
}
@@ -296,17 +297,15 @@
try {
const response = await fetch(`${API_BASE}/crawl/top-sites/status`);
const status = await response.json();
if (!response.ok) {
throw new Error();
}
if (!response.ok) throw new Error();
const total = Number(status.total || 0);
const indexed = Number(status.indexed || 0);
const percent = total > 0 && status.state === "complete" ? 100 : total > 0 ? Math.min(96, (indexed / total) * 100) : 0;
seedProgress.style.width = `${percent}%`;
seedStatus.textContent = `${status.message || "Idle"}${status.source ? ` Source: ${status.source}` : ""}`;
seedStatus.textContent = status.message || "Idle";
} catch {
seedProgress.style.width = "0%";
seedStatus.textContent = "Top-site seed status unavailable.";
seedStatus.textContent = "Seed status unavailable.";
}
}
@@ -316,13 +315,11 @@
try {
const response = await fetch(`${API_BASE}/crawl/top-sites`, { method: "POST" });
const data = await response.json().catch(() => ({}));
if (!response.ok) {
throw new Error(data.detail || "Unable to queue top-site seed.");
}
if (!response.ok) throw new Error(data.detail || "Unable to queue seed.");
seedStatus.textContent = "Top-site seed queued.";
await refreshSeedStatus();
} catch (error) {
seedStatus.textContent = error.message || "Unable to queue top-site seed.";
seedStatus.textContent = error.message || "Unable to queue seed.";
} finally {
setTimeout(() => {
seedTopSites.disabled = false;
@@ -333,66 +330,67 @@
async function handleCrawlerSubmit(event) {
event.preventDefault();
const seedUrls = seedUrlsField.value
.split("\n")
.map((value) => value.trim())
.filter(Boolean);
const seedUrls = seedUrlsField.value.split("\n").map((value) => value.trim()).filter(Boolean);
if (!seedUrls.length) {
crawlerStatus.textContent = "Add at least one seed URL.";
return;
}
const payload = {
seed_urls: seedUrls,
max_depth: Number.parseInt(crawlDepthField.value, 10) || 0,
max_pages_per_domain: Number.parseInt(maxPagesPerDomainField.value, 10) || 1,
same_domain_only: sameDomainOnlyField.checked,
};
crawlerStatus.textContent = "Starting crawl...";
try {
const response = await fetch(`${API_BASE}/crawl`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(payload),
body: JSON.stringify({
seed_urls: seedUrls,
max_depth: Number.parseInt(crawlDepthField.value, 10) || 0,
max_pages_per_domain: Number.parseInt(maxPagesPerDomainField.value, 10) || 1,
same_domain_only: sameDomainOnlyField.checked,
}),
});
const data = await response.json().catch(() => ({}));
if (!response.ok) {
throw new Error(data.detail || "Unable to start the crawler.");
}
if (!response.ok) throw new Error(data.detail || "Unable to start crawler.");
crawlerStatus.textContent = `Crawl started for ${seedUrls.length} seed URL${seedUrls.length === 1 ? "" : "s"}.`;
setTimeout(() => {
setModalOpen(false);
refreshStats();
}, 900);
} catch (error) {
crawlerStatus.textContent = error.message || "Unable to start the crawler.";
crawlerStatus.textContent = error.message || "Unable to start crawler.";
}
}
searchForm.addEventListener("submit", (event) => {
event.preventDefault();
runSearch("all");
runSearch();
});
document.querySelectorAll("[data-search-type]").forEach((button) => {
button.addEventListener("click", () => runSearch(button.dataset.searchType || "all"));
document.querySelectorAll(".mode-btn").forEach((button) => {
button.addEventListener("click", () => {
setType(button.dataset.searchType || "all");
});
});
openCrawlerModal.addEventListener("click", () => setModalOpen(true));
openCrawlerModalSecondary.addEventListener("click", () => setModalOpen(true));
document.querySelectorAll(".query-card").forEach((button) => {
button.addEventListener("click", () => {
searchInput.value = button.dataset.query || "";
searchInput.focus();
});
});
openCrawlerModal?.addEventListener("click", () => setModalOpen(true));
closeCrawlerModal.addEventListener("click", () => setModalOpen(false));
cancelCrawler.addEventListener("click", () => setModalOpen(false));
crawlerModal.addEventListener("click", (event) => {
if (event.target === crawlerModal) {
setModalOpen(false);
}
if (event.target === crawlerModal) setModalOpen(false);
});
seedTopSites.addEventListener("click", seedTopSitesNow);
crawlerForm.addEventListener("submit", handleCrawlerSubmit);
modelSelect.addEventListener("change", () => {
modelHint.textContent = modelSelect.value || "Ready";
});
setType("all");
loadModels();
refreshStats();
refreshSeedStatus();
setInterval(refreshStats, 10000);
+312 -160
View File
@@ -10,92 +10,62 @@
theme: {
extend: {
colors: {
sfetch: {
bg: "#f8fafc",
app: {
bg: "#f6f8fb",
surface: "#ffffff",
surfaceSoft: "#f1f5f9",
ink: "#202124",
muted: "#5f6368",
border: "#dadce0",
blue: "#1a73e8",
orange: "#de5833",
green: "#0b8043",
ink: "#111827",
muted: "#5f6b7a",
border: "#d8dee8",
soft: "#eef2f7",
primary: "#174ea6",
primaryDark: "#123b7d",
success: "#137333",
warning: "#b06000",
},
},
boxShadow: {
search: "0 2px 8px rgba(60, 64, 67, 0.14), 0 1px 3px rgba(60, 64, 67, 0.12)",
panel: "0 16px 40px rgba(15, 23, 42, 0.08)",
panel: "0 18px 45px rgba(15, 23, 42, 0.08)",
focus: "0 0 0 3px rgba(23, 78, 166, 0.14)",
},
},
},
};
</script>
<style>
:root {
color-scheme: light;
}
:root { color-scheme: light; }
body {
background: #ffffff;
color: #202124;
font-family: Arial, Helvetica, sans-serif;
color: #111827;
font-family: Inter, ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif;
}
.brand {
font-family: Arial, Helvetica, sans-serif;
font-weight: 700;
letter-spacing: 0;
}
.brand span:nth-child(1) { color: #de5833; }
.brand span:nth-child(2) { color: #1a73e8; }
.brand span:nth-child(3) { color: #188038; }
.brand span:nth-child(4) { color: #fbbc04; }
.brand span:nth-child(5) { color: #1a73e8; }
.brand span:nth-child(6) { color: #de5833; }
.skeleton {
background: linear-gradient(90deg, #eef2f7 25%, #f8fafc 37%, #eef2f7 63%);
background-size: 400% 100%;
animation: shimmer 1.4s ease infinite;
}
mark {
background: rgba(251, 188, 4, 0.28);
color: #202124;
background: rgba(23, 78, 166, 0.12);
color: #111827;
padding: 0 0.12rem;
border-radius: 0.2rem;
}
@keyframes shimmer {
0% { background-position: 100% 50%; }
100% { background-position: 0 50%; }
}
@keyframes barrel-roll {
0% { transform: rotateZ(0deg); }
100% { transform: rotateZ(360deg); }
}
.barrel-roll {
animation: barrel-roll 1.2s cubic-bezier(0.25, 0.46, 0.45, 0.94) forwards;
}
</style>
</head>
<body class="min-h-screen">
<div class="min-h-screen">
<header class="sticky top-0 z-20 border-b border-sfetch-border bg-white/95 backdrop-blur">
<div class="mx-auto flex max-w-6xl flex-col gap-4 px-5 py-4 sm:flex-row sm:items-center">
<a href="./index.html" class="brand text-3xl leading-none" aria-label="sFetch home">
<span>s</span><span>F</span><span>e</span><span>t</span><span>c</span><span>h</span>
</a>
<header class="sticky top-0 z-20 border-b border-app-border bg-white/95 backdrop-blur">
<div class="mx-auto flex max-w-6xl flex-col gap-4 px-5 py-4 lg:flex-row lg:items-center">
<a href="./index.html" class="text-2xl font-semibold tracking-tight text-app-ink">sFetch</a>
<form id="searchForm" class="flex flex-1 items-center gap-3">
<label
for="searchInput"
class="flex min-h-12 flex-1 items-center gap-3 rounded-full border border-sfetch-border bg-white px-4 transition focus-within:border-transparent focus-within:shadow-search"
class="flex min-h-12 flex-1 items-center gap-3 rounded-lg border border-app-border bg-white px-4 transition focus-within:border-app-primary focus-within:shadow-focus"
>
<svg class="h-5 w-5 shrink-0 text-sfetch-muted" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.8" aria-hidden="true">
<svg class="h-5 w-5 shrink-0 text-app-muted" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.8" aria-hidden="true">
<circle cx="11" cy="11" r="6"></circle>
<path d="M20 20L16.65 16.65"></path>
</svg>
@@ -103,54 +73,78 @@
id="searchInput"
type="text"
autocomplete="off"
class="w-full bg-transparent text-base text-sfetch-ink outline-none placeholder:text-sfetch-muted"
class="w-full bg-transparent text-base text-app-ink outline-none placeholder:text-app-muted"
placeholder="Search sFetch"
/>
</label>
<button
id="searchButton"
type="submit"
class="rounded-md bg-sfetch-blue px-5 py-3 text-sm font-medium text-white transition hover:bg-[#1558b0]"
>
<button type="submit" class="rounded-md bg-app-primary px-5 py-3 text-sm font-semibold text-white transition hover:bg-app-primaryDark">
Search
</button>
</form>
<a
href="./index.html"
class="rounded-md border border-sfetch-border bg-white px-4 py-2 text-sm font-medium text-sfetch-ink transition hover:border-sfetch-orange hover:text-sfetch-orange"
>
Index tools
</a>
<nav class="flex flex-wrap gap-2 text-sm">
<a href="./index.html" class="rounded-md px-3 py-2 font-medium text-app-muted transition hover:bg-app-soft hover:text-app-ink">Search Home</a>
<a href="./ai.html" class="rounded-md px-3 py-2 font-medium text-app-muted transition hover:bg-app-soft hover:text-app-ink">AI Chat</a>
</nav>
</div>
<nav class="mx-auto flex max-w-6xl gap-7 px-5 text-sm" aria-label="Search verticals">
<button id="tabAll" class="tab-btn border-b-2 border-transparent pb-3 font-medium text-sfetch-muted">All</button>
<button id="tabImages" class="tab-btn border-b-2 border-transparent pb-3 font-medium text-sfetch-muted">Images</button>
<button id="tabVideos" class="tab-btn border-b-2 border-transparent pb-3 font-medium text-sfetch-muted">Videos</button>
<button id="tabAll" class="tab-btn border-b-2 border-transparent pb-3 font-medium text-app-muted">All</button>
<button id="tabImages" class="tab-btn border-b-2 border-transparent pb-3 font-medium text-app-muted">Images</button>
<button id="tabVideos" class="tab-btn border-b-2 border-transparent pb-3 font-medium text-app-muted">Videos</button>
</nav>
</header>
<main class="mx-auto max-w-6xl px-5 py-8">
<p id="metaText" class="text-sm text-sfetch-muted"></p>
<p id="metaText" class="text-sm text-app-muted"></p>
<section id="aiPanel" class="mt-5 hidden max-w-4xl rounded-lg border border-app-border bg-app-bg p-5">
<div class="flex flex-col gap-4 lg:flex-row lg:items-start lg:justify-between">
<div>
<p class="text-sm font-semibold uppercase tracking-wide text-app-primary">AI answer</p>
<p id="aiStatus" class="mt-1 text-sm text-app-muted">Preparing answer...</p>
</div>
<div class="grid gap-3 sm:grid-cols-[220px_150px_auto] sm:items-end">
<div>
<label for="aiModelSelect" class="mb-1 block text-xs font-medium text-app-muted">Model</label>
<select id="aiModelSelect" class="w-full rounded-md border border-app-border bg-white px-3 py-2 text-sm text-app-ink outline-none focus:border-app-primary focus:shadow-focus">
<option value="">Loading models...</option>
</select>
</div>
<label class="flex items-center gap-2 rounded-md border border-app-border bg-white px-3 py-2 text-sm text-app-ink">
<input id="aiUseWeb" type="checkbox" checked class="h-4 w-4 rounded border-app-border text-app-primary" />
Web context
</label>
<button id="aiRegenerate" class="rounded-md bg-app-ink px-4 py-2 text-sm font-semibold text-white transition hover:bg-black">
Generate
</button>
</div>
</div>
<div id="aiAnswer" class="mt-5 whitespace-pre-wrap text-sm leading-7 text-app-ink"></div>
<details id="aiThinkingWrap" class="mt-4 hidden rounded-md border border-app-border bg-white p-3">
<summary class="cursor-pointer text-sm font-medium text-app-muted">Reasoning trace</summary>
<pre id="aiThinking" class="mt-3 whitespace-pre-wrap text-xs leading-5 text-app-muted"></pre>
</details>
<div id="aiSources" class="mt-5 grid gap-2"></div>
</section>
<section id="resultsContainer" class="mt-6"></section>
<nav id="pagination" class="mt-10 flex items-center justify-start gap-2" aria-label="Pagination"></nav>
</main>
</div>
<div id="imageModal" class="fixed inset-0 z-50 hidden bg-slate-950/60">
<div class="absolute inset-y-0 right-0 w-full max-w-4xl border-l border-sfetch-border bg-white shadow-panel">
<div class="flex items-center justify-between border-b border-sfetch-border px-6 py-4">
<h3 id="modalTitle" class="truncate text-base font-medium text-sfetch-ink">Image preview</h3>
<button id="closeModal" class="flex h-9 w-9 items-center justify-center rounded-full text-sfetch-muted transition hover:bg-sfetch-surfaceSoft hover:text-sfetch-ink">
<div class="absolute inset-y-0 right-0 w-full max-w-4xl border-l border-app-border bg-white shadow-panel">
<div class="flex items-center justify-between border-b border-app-border px-6 py-4">
<h3 id="modalTitle" class="truncate text-base font-medium text-app-ink">Image preview</h3>
<button id="closeModal" class="flex h-9 w-9 items-center justify-center rounded-md text-app-muted transition hover:bg-app-soft hover:text-app-ink">
X
</button>
</div>
<div class="h-[calc(100vh-73px)] overflow-y-auto px-6 py-5">
<div class="overflow-hidden rounded-lg bg-sfetch-surfaceSoft">
<div class="overflow-hidden rounded-lg bg-app-soft">
<img id="modalImage" class="max-h-[62vh] w-full object-contain" alt="Preview" />
</div>
<div class="mt-6">
<h4 class="mb-3 text-sm font-medium text-sfetch-muted">Related images</h4>
<h4 class="mb-3 text-sm font-medium text-app-muted">Related images</h4>
<div id="relatedImages" class="grid grid-cols-2 gap-3 sm:grid-cols-3"></div>
</div>
</div>
@@ -169,6 +163,15 @@
const tabAll = document.getElementById("tabAll");
const tabImages = document.getElementById("tabImages");
const tabVideos = document.getElementById("tabVideos");
const aiPanel = document.getElementById("aiPanel");
const aiStatus = document.getElementById("aiStatus");
const aiAnswer = document.getElementById("aiAnswer");
const aiSources = document.getElementById("aiSources");
const aiModelSelect = document.getElementById("aiModelSelect");
const aiUseWeb = document.getElementById("aiUseWeb");
const aiRegenerate = document.getElementById("aiRegenerate");
const aiThinkingWrap = document.getElementById("aiThinkingWrap");
const aiThinking = document.getElementById("aiThinking");
const imageModal = document.getElementById("imageModal");
const closeModalBtn = document.getElementById("closeModal");
const modalImage = document.getElementById("modalImage");
@@ -186,29 +189,35 @@
.replaceAll("'", "&#039;");
}
function getTypeFromUrl() {
const typeValue = new URLSearchParams(window.location.search).get("type");
if (typeValue === "image" || typeValue === "video" || typeValue === "all") {
return typeValue;
function getParams() {
return new URLSearchParams(window.location.search);
}
return "all";
function getTypeFromUrl() {
const typeValue = getParams().get("type");
return ["image", "video", "all"].includes(typeValue) ? typeValue : "all";
}
function getQueryFromUrl() {
return (new URLSearchParams(window.location.search).get("q") || "").trim();
return (getParams().get("q") || "").trim();
}
function getPageFromUrl() {
const raw = new URLSearchParams(window.location.search).get("page") || "1";
const page = Number.parseInt(raw, 10);
const page = Number.parseInt(getParams().get("page") || "1", 10);
return Number.isNaN(page) || page < 1 ? 1 : page;
}
function updateUrl(query, page) {
const params = new URLSearchParams(window.location.search);
const params = getParams();
params.set("q", query);
page > 1 ? params.set("page", String(page)) : params.delete("page");
currentType === "all" ? params.delete("type") : params.set("type", currentType);
if (currentType === "all") {
params.set("ai", "1");
if (aiModelSelect.value) {
params.set("model", aiModelSelect.value);
}
}
window.history.replaceState({}, "", `${window.location.pathname}?${params.toString()}`);
}
@@ -219,13 +228,43 @@
[tabVideos, currentType === "video"],
];
tabs.forEach(([tab, active]) => {
tab.classList.toggle("border-sfetch-orange", active);
tab.classList.toggle("text-sfetch-ink", active);
tab.classList.toggle("border-app-primary", active);
tab.classList.toggle("text-app-ink", active);
tab.classList.toggle("border-transparent", !active);
tab.classList.toggle("text-sfetch-muted", !active);
tab.classList.toggle("text-app-muted", !active);
});
}
async function loadModels() {
try {
const response = await fetch(`${API_BASE}/ai/models`);
const payload = await response.json();
if (!response.ok) {
throw new Error(payload.detail || "Unable to load models.");
}
const selectedFromUrl = getParams().get("model");
aiModelSelect.innerHTML = "";
(payload.models || []).forEach((model) => {
const name = model.name || model.model;
if (!name) {
return;
}
const option = document.createElement("option");
option.value = name;
option.textContent = name;
if (name === selectedFromUrl || (!selectedFromUrl && name === payload.default_model)) {
option.selected = true;
}
aiModelSelect.appendChild(option);
});
if (!aiModelSelect.options.length) {
aiModelSelect.innerHTML = `<option value="${payload.default_model || "gpt-oss:120b"}">${payload.default_model || "gpt-oss:120b"}</option>`;
}
} catch {
aiModelSelect.innerHTML = '<option value="gpt-oss:120b">gpt-oss:120b</option>';
}
}
async function fetchSearch(type, query, limit, offset) {
const response = await fetch(
`${API_BASE}/search?q=${encodeURIComponent(query)}&type=${type}&limit=${limit}&offset=${offset}`
@@ -237,6 +276,114 @@
return data;
}
async function streamSSE(url, payload, handlers) {
const response = await fetch(url, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(payload),
});
if (!response.ok || !response.body) {
const data = await response.json().catch(() => ({}));
throw new Error(data.detail || "Model stream failed.");
}
const reader = response.body.getReader();
const decoder = new TextDecoder();
let buffer = "";
while (true) {
const { value, done } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const events = buffer.split("\n\n");
buffer = events.pop() || "";
for (const rawEvent of events) {
const eventName = (rawEvent.match(/^event: (.+)$/m) || [])[1] || "message";
const dataLine = rawEvent
.split("\n")
.filter((line) => line.startsWith("data: "))
.map((line) => line.slice(6))
.join("\n");
const data = dataLine ? JSON.parse(dataLine) : {};
handlers[eventName]?.(data);
}
}
}
async function fetchAIAnswer(query) {
aiPanel.classList.remove("hidden");
aiStatus.textContent = "Streaming answer with Ollama Cloud...";
aiAnswer.textContent = "";
aiSources.innerHTML = "";
aiThinkingWrap.classList.add("hidden");
aiThinking.textContent = "";
aiRegenerate.disabled = true;
try {
let finalContent = "";
await streamSSE(
`${API_BASE}/ai/search/stream`,
{
query,
model: aiModelSelect.value,
include_web: aiUseWeb.checked,
local_result_limit: 5,
web_result_limit: 5,
},
{
meta(data) {
aiStatus.textContent = `Streaming from ${data.model || aiModelSelect.value}`;
renderAISources(data.sources || []);
},
thinking(data) {
aiThinkingWrap.classList.remove("hidden");
aiThinking.textContent += data.delta || "";
},
content(data) {
finalContent += data.delta || "";
aiAnswer.textContent = finalContent;
},
done(data) {
aiStatus.textContent = `Generated by ${data.model || aiModelSelect.value}`;
aiAnswer.textContent = data.content || finalContent || "No answer returned.";
},
error(data) {
throw new Error(data.detail || "Unable to generate AI answer.");
},
}
);
} catch (error) {
aiStatus.textContent = "AI answer unavailable";
aiAnswer.textContent = error.message || "Unable to generate AI answer.";
} finally {
aiRegenerate.disabled = false;
}
}
function renderAISources(sources) {
aiSources.innerHTML = "";
if (!sources.length) {
return;
}
const heading = document.createElement("p");
heading.className = "text-xs font-semibold uppercase tracking-wide text-app-muted";
heading.textContent = "Sources";
aiSources.appendChild(heading);
sources.slice(0, 8).forEach((source, index) => {
const link = document.createElement("a");
link.href = source.url;
link.target = "_blank";
link.rel = "noreferrer noopener";
link.className = "block rounded-md border border-app-border bg-white p-3 text-sm transition hover:border-app-primary";
link.innerHTML = `
<span class="font-semibold text-app-ink">[${index + 1}] ${escapeHTML(source.title)}</span>
<span class="ml-2 rounded bg-app-soft px-2 py-0.5 text-xs uppercase text-app-muted">${escapeHTML(source.source_type)}</span>
<span class="mt-1 block truncate text-xs text-app-muted">${escapeHTML(source.url)}</span>
`;
aiSources.appendChild(link);
});
}
function extractHost(url) {
try {
return new URL(url).hostname.replace(/^www\./, "");
@@ -272,11 +419,12 @@
function renderError(message) {
metaText.textContent = "Search unavailable";
aiPanel.classList.add("hidden");
resultsContainer.className = "mt-6";
resultsContainer.innerHTML = `
<section class="max-w-2xl rounded-lg border border-sfetch-border bg-sfetch-bg px-5 py-6">
<p class="text-lg text-sfetch-ink">Unable to load results.</p>
<p class="mt-2 text-sm text-sfetch-muted">${escapeHTML(message)}</p>
<section class="max-w-2xl rounded-lg border border-app-border bg-app-bg px-5 py-6">
<p class="text-lg text-app-ink">Unable to load results.</p>
<p class="mt-2 text-sm text-app-muted">${escapeHTML(message)}</p>
</section>
`;
paginationNav.innerHTML = "";
@@ -286,10 +434,9 @@
metaText.textContent = "About 0 results";
resultsContainer.className = "mt-6";
resultsContainer.innerHTML = `
<section class="max-w-2xl rounded-lg border border-sfetch-border bg-sfetch-bg px-5 py-8">
<div class="flex h-12 w-12 items-center justify-center rounded-full bg-sfetch-surfaceSoft text-lg font-bold text-sfetch-orange">s</div>
<h2 class="mt-4 text-xl text-sfetch-ink">No results found</h2>
<p class="mt-2 text-sm text-sfetch-muted">No indexed pages matched "${escapeHTML(query)}".</p>
<section class="max-w-2xl rounded-lg border border-app-border bg-app-bg px-5 py-8">
<h2 class="text-xl font-semibold text-app-ink">No results found</h2>
<p class="mt-2 text-sm text-app-muted">No indexed pages matched "${escapeHTML(query)}".</p>
</section>
`;
paginationNav.innerHTML = "";
@@ -310,10 +457,10 @@
btn.disabled = disabled;
btn.className = `flex h-10 min-w-10 items-center justify-center rounded-md border px-3 text-sm transition ${
active
? "border-sfetch-blue bg-sfetch-blue text-white"
? "border-app-primary bg-app-primary text-white"
: disabled
? "cursor-not-allowed border-sfetch-border text-sfetch-muted/50"
: "border-sfetch-border text-sfetch-ink hover:border-sfetch-blue hover:text-sfetch-blue"
? "cursor-not-allowed border-app-border text-app-muted/50"
: "border-app-border text-app-ink hover:border-app-primary hover:text-app-primary"
}`;
if (!disabled && !active) {
btn.addEventListener("click", () => runSearch(query, page));
@@ -322,18 +469,15 @@
};
paginationNav.appendChild(button("<", currentPage - 1, currentPage === 1));
const maxVisiblePages = 5;
let start = Math.max(1, currentPage - 2);
let end = Math.min(totalPages, start + maxVisiblePages - 1);
if (end - start < maxVisiblePages - 1) {
start = Math.max(1, end - maxVisiblePages + 1);
}
for (let i = start; i <= end; i += 1) {
paginationNav.appendChild(button(String(i), i, false, i === currentPage));
}
paginationNav.appendChild(button(">", currentPage + 1, currentPage === totalPages));
}
@@ -348,14 +492,9 @@
.slice(0, 8)
.forEach((item) => {
const thumb = document.createElement("button");
thumb.className = "overflow-hidden rounded-md border border-sfetch-border transition hover:border-sfetch-orange";
thumb.className = "overflow-hidden rounded-md border border-app-border transition hover:border-app-primary";
thumb.innerHTML = `
<img
src="${escapeHTML(item.url)}"
alt="${escapeHTML(item.alt_text || "Related image")}"
class="h-24 w-full object-cover"
loading="lazy"
/>
<img src="${escapeHTML(item.url)}" alt="${escapeHTML(item.alt_text || "Related image")}" class="h-24 w-full object-cover" loading="lazy" />
`;
thumb.addEventListener("click", () => {
const realIndex = relatedPool.findIndex((candidate) => candidate.id === item.id);
@@ -372,22 +511,18 @@
}
function renderImageGrid(results) {
aiPanel.classList.add("hidden");
resultsContainer.className = "mt-6 grid grid-cols-2 gap-4 sm:grid-cols-3 lg:grid-cols-4";
resultsContainer.innerHTML = "";
results.forEach((result, index) => {
const card = document.createElement("article");
card.className = "group cursor-pointer overflow-hidden rounded-lg border border-sfetch-border bg-white transition hover:border-sfetch-orange";
card.className = "group cursor-pointer overflow-hidden rounded-lg border border-app-border bg-white transition hover:border-app-primary";
card.innerHTML = `
<div class="aspect-square overflow-hidden bg-sfetch-surfaceSoft">
<img
src="${escapeHTML(result.url)}"
alt="${escapeHTML(result.alt_text || "Image result")}"
class="h-full w-full object-cover transition duration-200 group-hover:scale-105"
loading="lazy"
/>
<div class="aspect-square overflow-hidden bg-app-soft">
<img src="${escapeHTML(result.url)}" alt="${escapeHTML(result.alt_text || "Image result")}" class="h-full w-full object-cover transition duration-200 group-hover:scale-105" loading="lazy" />
</div>
<div class="truncate px-3 py-2 text-xs text-sfetch-muted">${escapeHTML(result.alt_text || extractHost(result.page_url))}</div>
<div class="truncate px-3 py-2 text-xs text-app-muted">${escapeHTML(result.alt_text || extractHost(result.page_url))}</div>
`;
card.addEventListener("click", () => openImageModal(result, index, results));
resultsContainer.appendChild(card);
@@ -395,26 +530,27 @@
}
function renderVideoCards(results) {
resultsContainer.className = "mt-6 space-y-4";
aiPanel.classList.add("hidden");
resultsContainer.className = "mt-6 max-w-3xl space-y-4";
resultsContainer.innerHTML = "";
results.forEach((result) => {
const thumbnail = videoThumbnail(result.url);
const card = document.createElement("article");
card.className = "overflow-hidden rounded-lg border border-sfetch-border bg-white";
card.className = "overflow-hidden rounded-lg border border-app-border bg-white";
card.innerHTML = `
<a href="${escapeHTML(result.url)}" target="_blank" rel="noreferrer noopener" class="block md:flex">
<div class="relative h-44 w-full shrink-0 overflow-hidden bg-sfetch-surfaceSoft md:w-72">
<div class="relative h-44 w-full shrink-0 overflow-hidden bg-app-soft md:w-72">
${
thumbnail
? `<img src="${escapeHTML(thumbnail)}" alt="${escapeHTML(result.title)}" class="h-full w-full object-cover" loading="lazy" />`
: `<div class="flex h-full items-center justify-center text-sfetch-muted">Video</div>`
: `<div class="flex h-full items-center justify-center text-app-muted">Video</div>`
}
</div>
<div class="space-y-2 p-5">
<p class="text-xs uppercase text-sfetch-green">${escapeHTML(extractHost(result.url))}</p>
<h3 class="text-xl font-medium text-sfetch-blue">${escapeHTML(result.title)}</h3>
<p class="text-sm text-sfetch-muted">Source: ${escapeHTML(extractHost(result.page_url))}</p>
<p class="text-xs uppercase text-app-success">${escapeHTML(extractHost(result.url))}</p>
<h3 class="text-xl font-medium text-app-primary">${escapeHTML(result.title)}</h3>
<p class="text-sm text-app-muted">Source: ${escapeHTML(extractHost(result.page_url))}</p>
</div>
</a>
`;
@@ -431,20 +567,15 @@
const host = extractHost(result.url);
article.className = "space-y-1";
article.innerHTML = `
<div class="flex items-center gap-2 text-sm text-sfetch-muted">
<div class="flex h-7 w-7 shrink-0 items-center justify-center rounded-full bg-sfetch-surfaceSoft text-xs font-bold text-sfetch-orange">${escapeHTML(host.slice(0, 1).toUpperCase())}</div>
<div class="flex items-center gap-2 text-sm text-app-muted">
<div class="flex h-7 w-7 shrink-0 items-center justify-center rounded bg-app-soft text-xs font-bold text-app-primary">${escapeHTML(host.slice(0, 1).toUpperCase())}</div>
<div class="min-w-0">
<p class="text-sfetch-ink">${escapeHTML(host)}</p>
<p class="text-app-ink">${escapeHTML(host)}</p>
<p class="truncate text-xs">${escapeHTML(result.url)}</p>
</div>
</div>
<a
href="${escapeHTML(result.url)}"
target="_blank"
rel="noreferrer noopener"
class="block text-xl leading-tight text-sfetch-blue hover:underline"
>${escapeHTML(result.title)}</a>
<p class="text-sm leading-6 text-sfetch-muted">${result.snippet}</p>
<a href="${escapeHTML(result.url)}" target="_blank" rel="noreferrer noopener" class="block text-xl leading-tight text-app-primary hover:underline">${escapeHTML(result.title)}</a>
<p class="text-sm leading-6 text-app-muted">${result.snippet}</p>
`;
wrapper.appendChild(article);
});
@@ -453,10 +584,12 @@
}
function renderAllMode(webData, imageData, videoData, page) {
aiPanel.classList.remove("hidden");
const start = (page - 1) * RESULTS_PER_PAGE + 1;
const end = Math.min(start + webData.results.length - 1, webData.total);
if (webData.total === 0 && imageData.total === 0 && videoData.total === 0) {
renderEmpty(webData.query);
fetchAIAnswer(webData.query);
return;
}
@@ -464,22 +597,26 @@
? `${start}-${end} of about ${webData.total} web results`
: "No direct web matches, showing media results";
resultsContainer.className = "mt-6 space-y-9";
resultsContainer.className = "mt-8 space-y-9";
resultsContainer.innerHTML = "";
if (webData.results.length) {
resultsContainer.appendChild(renderWebList(webData.results));
}
if (imageData.results.length) {
const imageSection = document.createElement("section");
imageSection.innerHTML = `
<div class="mb-3 flex max-w-3xl items-center justify-between">
<h2 class="text-sm font-semibold text-sfetch-ink">Images</h2>
<button id="seeAllImagesBtn" class="text-sm font-medium text-sfetch-blue hover:underline">See all</button>
<h2 class="text-sm font-semibold text-app-ink">Images</h2>
<button id="seeAllImagesBtn" class="text-sm font-medium text-app-primary hover:underline">See all</button>
</div>
`;
const grid = document.createElement("div");
grid.className = "grid max-w-3xl grid-cols-3 gap-2 sm:grid-cols-4 md:grid-cols-6";
imageData.results.slice(0, 6).forEach((result, index) => {
const button = document.createElement("button");
button.className = "overflow-hidden rounded-md border border-sfetch-border bg-sfetch-surfaceSoft";
button.className = "overflow-hidden rounded-md border border-app-border bg-app-soft";
button.innerHTML = `<img src="${escapeHTML(result.url)}" alt="${escapeHTML(result.alt_text || "Image result")}" class="aspect-square w-full object-cover" loading="lazy" />`;
button.addEventListener("click", () => openImageModal(result, index, imageData.results));
grid.appendChild(button);
@@ -492,16 +629,12 @@
});
}
if (webData.results.length) {
resultsContainer.appendChild(renderWebList(webData.results));
}
if (videoData.results.length) {
const videoSection = document.createElement("section");
videoSection.innerHTML = `
<div class="mb-3 flex max-w-3xl items-center justify-between">
<h2 class="text-sm font-semibold text-sfetch-ink">Videos</h2>
<button id="seeAllVideosBtn" class="text-sm font-medium text-sfetch-blue hover:underline">See all</button>
<h2 class="text-sm font-semibold text-app-ink">Videos</h2>
<button id="seeAllVideosBtn" class="text-sm font-medium text-app-primary hover:underline">See all</button>
</div>
`;
const list = document.createElement("div");
@@ -512,19 +645,19 @@
card.href = result.url;
card.target = "_blank";
card.rel = "noreferrer noopener";
card.className = "block overflow-hidden rounded-lg border border-sfetch-border bg-white transition hover:border-sfetch-orange sm:flex";
card.className = "block overflow-hidden rounded-lg border border-app-border bg-white transition hover:border-app-primary sm:flex";
card.innerHTML = `
<div class="h-36 w-full shrink-0 overflow-hidden bg-sfetch-surfaceSoft sm:w-56">
<div class="h-36 w-full shrink-0 overflow-hidden bg-app-soft sm:w-56">
${
thumb
? `<img src="${escapeHTML(thumb)}" alt="${escapeHTML(result.title)}" class="h-full w-full object-cover" loading="lazy" />`
: `<div class="flex h-full items-center justify-center text-sfetch-muted">Video</div>`
: `<div class="flex h-full items-center justify-center text-app-muted">Video</div>`
}
</div>
<div class="space-y-2 p-4">
<p class="text-xs uppercase text-sfetch-green">${escapeHTML(extractHost(result.url))}</p>
<h3 class="text-lg font-medium text-sfetch-blue">${escapeHTML(result.title)}</h3>
<p class="text-sm text-sfetch-muted">${escapeHTML(extractHost(result.page_url))}</p>
<p class="text-xs uppercase text-app-success">${escapeHTML(extractHost(result.url))}</p>
<h3 class="text-lg font-medium text-app-primary">${escapeHTML(result.title)}</h3>
<p class="text-sm text-app-muted">${escapeHTML(extractHost(result.page_url))}</p>
</div>
`;
list.appendChild(card);
@@ -538,6 +671,7 @@
}
renderPagination(webData.total, page, webData.query);
fetchAIAnswer(webData.query);
}
function renderVerticalMode(data, page) {
@@ -566,15 +700,13 @@
function renderLoadingSkeleton() {
if (currentType === "image") {
resultsContainer.className = "mt-6 grid grid-cols-2 gap-4 sm:grid-cols-3 lg:grid-cols-4";
resultsContainer.innerHTML = Array.from({ length: 8 })
.map(() => '<div class="skeleton aspect-square rounded-lg"></div>')
.join("");
resultsContainer.innerHTML = Array.from({ length: 8 }).map(() => '<div class="skeleton aspect-square rounded-lg"></div>').join("");
metaText.textContent = "Searching images...";
} else if (currentType === "video") {
resultsContainer.className = "mt-6 max-w-3xl space-y-4";
resultsContainer.innerHTML = Array.from({ length: 4 })
.map(() => `
<div class="overflow-hidden rounded-lg border border-sfetch-border bg-white">
<div class="overflow-hidden rounded-lg border border-app-border bg-white">
<div class="skeleton h-36 w-full"></div>
<div class="space-y-3 p-4">
<div class="skeleton h-3 w-24 rounded-full"></div>
@@ -586,7 +718,11 @@
.join("");
metaText.textContent = "Searching videos...";
} else {
resultsContainer.className = "mt-6 max-w-3xl space-y-6";
aiPanel.classList.remove("hidden");
aiStatus.textContent = "Waiting for search results...";
aiAnswer.textContent = "";
aiSources.innerHTML = "";
resultsContainer.className = "mt-8 max-w-3xl space-y-6";
resultsContainer.innerHTML = Array.from({ length: 4 })
.map(() => `
<article class="space-y-3">
@@ -606,10 +742,11 @@
async function runSearch(query, page = 1) {
const normalizedQuery = query.trim();
if (!normalizedQuery) {
aiPanel.classList.add("hidden");
metaText.textContent = "Enter a search query.";
resultsContainer.className = "mt-6";
resultsContainer.innerHTML = `
<section class="max-w-2xl rounded-lg border border-sfetch-border bg-sfetch-bg px-5 py-6 text-sm text-sfetch-muted">
<section class="max-w-2xl rounded-lg border border-app-border bg-app-bg px-5 py-6 text-sm text-app-muted">
Type a query above and press Search.
</section>
`;
@@ -617,11 +754,6 @@
return;
}
if (normalizedQuery.toLowerCase() === "do a barrel roll") {
document.documentElement.classList.add("barrel-roll");
setTimeout(() => document.documentElement.classList.remove("barrel-roll"), 1200);
}
updateTabsUI();
updateUrl(normalizedQuery, page);
searchInput.value = normalizedQuery;
@@ -671,9 +803,24 @@
searchForm.addEventListener("submit", (event) => {
event.preventDefault();
currentType = "all";
runSearch(searchInput.value, 1);
});
aiRegenerate.addEventListener("click", () => {
const query = searchInput.value || getQueryFromUrl();
if (query.trim()) {
fetchAIAnswer(query.trim());
}
});
aiModelSelect.addEventListener("change", () => {
const query = searchInput.value || getQueryFromUrl();
if (currentType === "all" && query.trim()) {
updateUrl(query.trim(), getPageFromUrl());
}
});
closeModalBtn.addEventListener("click", closeImageModal);
imageModal.addEventListener("click", (event) => {
if (event.target === imageModal) {
@@ -686,8 +833,13 @@
}
});
async function init() {
await loadModels();
currentType = getTypeFromUrl();
runSearch(getQueryFromUrl(), getPageFromUrl());
}
init();
</script>
</body>
</html>