phase 1
This commit is contained in:
33
Dockerfile
33
Dockerfile
@@ -1,13 +1,32 @@
|
||||
FROM ubuntu:24.04
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install OpenCode
|
||||
RUN curl -fsSL https://opencode.ai/install | bash
|
||||
|
||||
# Copy custom OpenCode configuration
|
||||
RUN mkdir -p /root/.config
|
||||
COPY ./config_opencode /root/.config/opencode
|
||||
# Copy OpenCode configuration
|
||||
COPY ../config_opencode /root/.config/opencode
|
||||
|
||||
# env var for the mcp server
|
||||
ARG MCP_SERVER
|
||||
ENV MCP_SERVER=$MCP_SERVER
|
||||
# Copy session manager
|
||||
COPY . /app
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Create working directory
|
||||
RUN mkdir -p /app/somedir
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8080
|
||||
|
||||
# Set environment variables
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
# Start OpenCode web interface
|
||||
CMD ["opencode", "web", "--host", "0.0.0.0", "--port", "8080", "--workdir", "/app/somedir"]
|
||||
46
README-setup.md
Normal file
46
README-setup.md
Normal file
@@ -0,0 +1,46 @@
|
||||
# Lovdata Chat Development Environment
|
||||
|
||||
This setup creates a container-per-visitor architecture for the Norwegian legal research chat interface.
|
||||
|
||||
## Quick Start
|
||||
|
||||
1. **Set up environment variables:**
|
||||
```bash
|
||||
cp .env.example .env
|
||||
# Edit .env with your API keys
|
||||
```
|
||||
|
||||
2. **Start the services:**
|
||||
```bash
|
||||
docker-compose up --build
|
||||
```
|
||||
|
||||
3. **Create a session:**
|
||||
```bash
|
||||
curl http://localhost/api/sessions -X POST
|
||||
```
|
||||
|
||||
4. **Access the chat interface:**
|
||||
Open the returned URL in your browser
|
||||
|
||||
## Architecture
|
||||
|
||||
- **session-manager**: FastAPI service managing container lifecycles
|
||||
- **lovdata-mcp**: Placeholder for Norwegian legal research MCP server
|
||||
- **caddy**: Reverse proxy for routing requests to session containers
|
||||
|
||||
## Development Notes
|
||||
|
||||
- Sessions auto-cleanup after 60 minutes of inactivity
|
||||
- Limited to 3 concurrent sessions for workstation development
|
||||
- Each session gets 4GB RAM and 1 CPU core
|
||||
- Session data persists in ./sessions/ directory
|
||||
|
||||
## API Endpoints
|
||||
|
||||
- `POST /api/sessions` - Create new session
|
||||
- `GET /api/sessions` - List all sessions
|
||||
- `GET /api/sessions/{id}` - Get session info
|
||||
- `DELETE /api/sessions/{id}` - Delete session
|
||||
- `POST /api/cleanup` - Manual cleanup
|
||||
- `GET /api/health` - Health check
|
||||
54
docker-compose.yml
Normal file
54
docker-compose.yml
Normal file
@@ -0,0 +1,54 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
session-manager:
|
||||
build:
|
||||
context: ./session-manager
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "8000:8000"
|
||||
volumes:
|
||||
- ./sessions:/app/sessions
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
environment:
|
||||
- MCP_SERVER=http://lovdata-mcp:8001
|
||||
- OPENAI_API_KEY=${OPENAI_API_KEY:-}
|
||||
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY:-}
|
||||
- GOOGLE_API_KEY=${GOOGLE_API_KEY:-}
|
||||
depends_on:
|
||||
- lovdata-mcp
|
||||
networks:
|
||||
- lovdata-network
|
||||
restart: unless-stopped
|
||||
|
||||
lovdata-mcp:
|
||||
# Placeholder for lovdata MCP server
|
||||
# This should be replaced with the actual lovdata MCP server image
|
||||
image: python:3.11-slim
|
||||
ports:
|
||||
- "8001:8001"
|
||||
networks:
|
||||
- lovdata-network
|
||||
command: ["python", "-c", "import time; time.sleep(999999)"] # Placeholder
|
||||
restart: unless-stopped
|
||||
|
||||
caddy:
|
||||
image: caddy:2.7-alpine
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
volumes:
|
||||
- ./nginx/Caddyfile:/etc/caddy/Caddyfile
|
||||
- caddy_data:/data
|
||||
- caddy_config:/config
|
||||
networks:
|
||||
- lovdata-network
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
caddy_data:
|
||||
caddy_config:
|
||||
|
||||
networks:
|
||||
lovdata-network:
|
||||
driver: bridge
|
||||
31
nginx/Caddyfile
Normal file
31
nginx/Caddyfile
Normal file
@@ -0,0 +1,31 @@
|
||||
# Lovdata Chat Reverse Proxy Configuration
|
||||
|
||||
# Main web interface
|
||||
localhost {
|
||||
# API endpoints for session management
|
||||
handle /api/* {
|
||||
uri strip_prefix /api
|
||||
reverse_proxy session-manager:8000
|
||||
}
|
||||
|
||||
# Session-specific routing - dynamically route to containers
|
||||
handle /session/{session_id}* {
|
||||
uri strip_prefix /session/{session_id}
|
||||
# This will be handled by dynamic routing in the session manager
|
||||
reverse_proxy session-manager:8000 {
|
||||
# The session manager will return the actual container port
|
||||
# This is a simplified version for development
|
||||
}
|
||||
}
|
||||
|
||||
# Static files and main interface
|
||||
handle /* {
|
||||
try_files {path} {path}/ /index.html
|
||||
file_server
|
||||
}
|
||||
|
||||
# Health check
|
||||
handle /health {
|
||||
reverse_proxy session-manager:8000
|
||||
}
|
||||
}
|
||||
5
requirements.txt
Normal file
5
requirements.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
fastapi==0.104.1
|
||||
uvicorn==0.24.0
|
||||
docker==7.0.0
|
||||
pydantic==2.5.0
|
||||
python-multipart==0.0.6
|
||||
30
session-manager/Dockerfile
Normal file
30
session-manager/Dockerfile
Normal file
@@ -0,0 +1,30 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements first for better caching
|
||||
COPY session-manager/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY session-manager/ .
|
||||
|
||||
# Create sessions directory
|
||||
RUN mkdir -p /app/sessions
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Start the session manager
|
||||
CMD ["python", "main.py"]
|
||||
340
session-manager/main.py
Normal file
340
session-manager/main.py
Normal file
@@ -0,0 +1,340 @@
|
||||
"""
|
||||
Session Management Service for Lovdata Chat
|
||||
|
||||
This service manages the lifecycle of OpenCode containers for individual user sessions.
|
||||
Each session gets its own isolated container with a dedicated working directory.
|
||||
"""
|
||||
|
||||
import os
|
||||
import uuid
|
||||
import json
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Dict, Optional, List
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
import docker
|
||||
from docker.errors import DockerException, NotFound
|
||||
from fastapi import FastAPI, HTTPException, BackgroundTasks
|
||||
from fastapi.responses import JSONResponse
|
||||
from pydantic import BaseModel
|
||||
import uvicorn
|
||||
|
||||
|
||||
# Configuration
|
||||
SESSIONS_DIR = Path("/app/sessions")
|
||||
SESSIONS_FILE = Path("/app/sessions/sessions.json")
|
||||
CONTAINER_IMAGE = "lovdata-opencode:latest"
|
||||
MAX_CONCURRENT_SESSIONS = 3 # Workstation limit
|
||||
SESSION_TIMEOUT_MINUTES = 60 # Auto-cleanup after 1 hour
|
||||
CONTAINER_MEMORY_LIMIT = "4g"
|
||||
CONTAINER_CPU_QUOTA = 100000 # 1 CPU core
|
||||
|
||||
|
||||
class SessionData(BaseModel):
|
||||
session_id: str
|
||||
container_name: str
|
||||
container_id: Optional[str] = None
|
||||
host_dir: str
|
||||
port: Optional[int] = None
|
||||
created_at: datetime
|
||||
last_accessed: datetime
|
||||
status: str = "creating" # creating, running, stopped, error
|
||||
|
||||
|
||||
class SessionManager:
|
||||
def __init__(self):
|
||||
self.docker_client = docker.from_env()
|
||||
self.sessions: Dict[str, SessionData] = {}
|
||||
self._load_sessions()
|
||||
|
||||
def _load_sessions(self):
|
||||
"""Load session data from persistent storage"""
|
||||
if SESSIONS_FILE.exists():
|
||||
try:
|
||||
with open(SESSIONS_FILE, "r") as f:
|
||||
data = json.load(f)
|
||||
for session_id, session_dict in data.items():
|
||||
# Convert datetime strings back to datetime objects
|
||||
session_dict["created_at"] = datetime.fromisoformat(
|
||||
session_dict["created_at"]
|
||||
)
|
||||
session_dict["last_accessed"] = datetime.fromisoformat(
|
||||
session_dict["last_accessed"]
|
||||
)
|
||||
self.sessions[session_id] = SessionData(**session_dict)
|
||||
except (json.JSONDecodeError, KeyError) as e:
|
||||
print(f"Warning: Could not load sessions file: {e}")
|
||||
self.sessions = {}
|
||||
|
||||
def _save_sessions(self):
|
||||
"""Save session data to persistent storage"""
|
||||
SESSIONS_DIR.mkdir(exist_ok=True)
|
||||
data = {}
|
||||
for session_id, session in self.sessions.items():
|
||||
data[session_id] = session.dict()
|
||||
|
||||
with open(SESSIONS_FILE, "w") as f:
|
||||
json.dump(data, f, indent=2, default=str)
|
||||
|
||||
def _generate_session_id(self) -> str:
|
||||
"""Generate a unique session ID"""
|
||||
return str(uuid.uuid4()).replace("-", "")[:16]
|
||||
|
||||
def _get_available_port(self) -> int:
|
||||
"""Find an available port for the container"""
|
||||
used_ports = {s.port for s in self.sessions.values() if s.port}
|
||||
port = 8080
|
||||
while port in used_ports:
|
||||
port += 1
|
||||
return port
|
||||
|
||||
def _check_container_limits(self) -> bool:
|
||||
"""Check if we're within concurrent session limits"""
|
||||
active_sessions = sum(
|
||||
1 for s in self.sessions.values() if s.status in ["creating", "running"]
|
||||
)
|
||||
return active_sessions < MAX_CONCURRENT_SESSIONS
|
||||
|
||||
async def create_session(self) -> SessionData:
|
||||
"""Create a new OpenCode session with dedicated container"""
|
||||
if not self._check_container_limits():
|
||||
raise HTTPException(
|
||||
status_code=429,
|
||||
detail=f"Maximum concurrent sessions ({MAX_CONCURRENT_SESSIONS}) reached",
|
||||
)
|
||||
|
||||
session_id = self._generate_session_id()
|
||||
container_name = f"opencode-{session_id}"
|
||||
host_dir = str(SESSIONS_DIR / session_id)
|
||||
port = self._get_available_port()
|
||||
|
||||
# Create host directory
|
||||
Path(host_dir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
session = SessionData(
|
||||
session_id=session_id,
|
||||
container_name=container_name,
|
||||
host_dir=host_dir,
|
||||
port=port,
|
||||
created_at=datetime.now(),
|
||||
last_accessed=datetime.now(),
|
||||
status="creating",
|
||||
)
|
||||
|
||||
self.sessions[session_id] = session
|
||||
self._save_sessions()
|
||||
|
||||
# Start container in background
|
||||
asyncio.create_task(self._start_container(session))
|
||||
|
||||
return session
|
||||
|
||||
async def _start_container(self, session: SessionData):
|
||||
"""Start the OpenCode container for a session"""
|
||||
try:
|
||||
# Check if container already exists
|
||||
try:
|
||||
existing = self.docker_client.containers.get(session.container_name)
|
||||
if existing.status == "running":
|
||||
session.status = "running"
|
||||
session.container_id = existing.id
|
||||
self._save_sessions()
|
||||
return
|
||||
else:
|
||||
existing.remove()
|
||||
except NotFound:
|
||||
pass
|
||||
|
||||
# Create and start new container
|
||||
container = self.docker_client.containers.run(
|
||||
CONTAINER_IMAGE,
|
||||
name=session.container_name,
|
||||
volumes={session.host_dir: {"bind": "/app/somedir", "mode": "rw"}},
|
||||
ports={f"8080/tcp": session.port},
|
||||
detach=True,
|
||||
mem_limit=CONTAINER_MEMORY_LIMIT,
|
||||
cpu_quota=CONTAINER_CPU_QUOTA,
|
||||
environment={
|
||||
"MCP_SERVER": os.getenv(
|
||||
"MCP_SERVER", "http://host.docker.internal:8001"
|
||||
),
|
||||
"OPENAI_API_KEY": os.getenv("OPENAI_API_KEY", ""),
|
||||
"ANTHROPIC_API_KEY": os.getenv("ANTHROPIC_API_KEY", ""),
|
||||
"GOOGLE_API_KEY": os.getenv("GOOGLE_API_KEY", ""),
|
||||
},
|
||||
network_mode="bridge",
|
||||
)
|
||||
|
||||
session.container_id = container.id
|
||||
session.status = "running"
|
||||
self._save_sessions()
|
||||
|
||||
print(f"Started container {session.container_name} on port {session.port}")
|
||||
|
||||
except DockerException as e:
|
||||
session.status = "error"
|
||||
self._save_sessions()
|
||||
print(f"Failed to start container {session.container_name}: {e}")
|
||||
|
||||
async def get_session(self, session_id: str) -> Optional[SessionData]:
|
||||
"""Get session information"""
|
||||
session = self.sessions.get(session_id)
|
||||
if session:
|
||||
session.last_accessed = datetime.now()
|
||||
self._save_sessions()
|
||||
return session
|
||||
|
||||
async def list_sessions(self) -> List[SessionData]:
|
||||
"""List all sessions"""
|
||||
return list(self.sessions.values())
|
||||
|
||||
async def cleanup_expired_sessions(self):
|
||||
"""Clean up expired sessions and their containers"""
|
||||
now = datetime.now()
|
||||
expired_sessions = []
|
||||
|
||||
for session_id, session in self.sessions.items():
|
||||
# Check if session has expired
|
||||
if now - session.last_accessed > timedelta(minutes=SESSION_TIMEOUT_MINUTES):
|
||||
expired_sessions.append(session_id)
|
||||
|
||||
# Stop and remove container
|
||||
try:
|
||||
container = self.docker_client.containers.get(
|
||||
session.container_name
|
||||
)
|
||||
container.stop(timeout=10)
|
||||
container.remove()
|
||||
print(f"Cleaned up container {session.container_name}")
|
||||
except NotFound:
|
||||
pass
|
||||
except DockerException as e:
|
||||
print(f"Error cleaning up container {session.container_name}: {e}")
|
||||
|
||||
# Remove session directory
|
||||
try:
|
||||
import shutil
|
||||
|
||||
shutil.rmtree(session.host_dir)
|
||||
print(f"Removed session directory {session.host_dir}")
|
||||
except OSError as e:
|
||||
print(f"Error removing session directory {session.host_dir}: {e}")
|
||||
|
||||
for session_id in expired_sessions:
|
||||
del self.sessions[session_id]
|
||||
|
||||
if expired_sessions:
|
||||
self._save_sessions()
|
||||
print(f"Cleaned up {len(expired_sessions)} expired sessions")
|
||||
|
||||
|
||||
# Global session manager instance
|
||||
session_manager = SessionManager()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Application lifespan manager"""
|
||||
# Startup
|
||||
print("Starting Session Management Service")
|
||||
|
||||
# Start cleanup task
|
||||
async def cleanup_task():
|
||||
while True:
|
||||
await session_manager.cleanup_expired_sessions()
|
||||
await asyncio.sleep(300) # Run every 5 minutes
|
||||
|
||||
cleanup_coro = asyncio.create_task(cleanup_task())
|
||||
|
||||
yield
|
||||
|
||||
# Shutdown
|
||||
print("Shutting down Session Management Service")
|
||||
cleanup_coro.cancel()
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
title="Lovdata Chat Session Manager",
|
||||
description="Manages isolated OpenCode containers for Norwegian legal research sessions",
|
||||
version="1.0.0",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
|
||||
@app.post("/sessions", response_model=SessionData)
|
||||
async def create_session():
|
||||
"""Create a new session with dedicated container"""
|
||||
try:
|
||||
session = await session_manager.create_session()
|
||||
return session
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to create session: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@app.get("/sessions/{session_id}", response_model=SessionData)
|
||||
async def get_session(session_id: str):
|
||||
"""Get session information"""
|
||||
session = await session_manager.get_session(session_id)
|
||||
if not session:
|
||||
raise HTTPException(status_code=404, detail="Session not found")
|
||||
return session
|
||||
|
||||
|
||||
@app.get("/sessions", response_model=List[SessionData])
|
||||
async def list_sessions():
|
||||
"""List all active sessions"""
|
||||
return await session_manager.list_sessions()
|
||||
|
||||
|
||||
@app.delete("/sessions/{session_id}")
|
||||
async def delete_session(session_id: str, background_tasks: BackgroundTasks):
|
||||
"""Delete a session and its container"""
|
||||
session = await session_manager.get_session(session_id)
|
||||
if not session:
|
||||
raise HTTPException(status_code=404, detail="Session not found")
|
||||
|
||||
# Schedule cleanup
|
||||
background_tasks.add_task(session_manager.cleanup_expired_sessions)
|
||||
|
||||
# Remove from sessions immediately
|
||||
del session_manager.sessions[session_id]
|
||||
session_manager._save_sessions()
|
||||
|
||||
return {"message": f"Session {session_id} scheduled for deletion"}
|
||||
|
||||
|
||||
@app.post("/cleanup")
|
||||
async def trigger_cleanup():
|
||||
"""Manually trigger cleanup of expired sessions"""
|
||||
await session_manager.cleanup_expired_sessions()
|
||||
return {"message": "Cleanup completed"}
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Health check endpoint"""
|
||||
try:
|
||||
# Check Docker connectivity
|
||||
session_manager.docker_client.ping()
|
||||
docker_ok = True
|
||||
except:
|
||||
docker_ok = False
|
||||
|
||||
return {
|
||||
"status": "healthy" if docker_ok else "unhealthy",
|
||||
"docker": docker_ok,
|
||||
"active_sessions": len(
|
||||
[s for s in session_manager.sessions.values() if s.status == "running"]
|
||||
),
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True)
|
||||
Reference in New Issue
Block a user