diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..b342015 --- /dev/null +++ b/.env.example @@ -0,0 +1,17 @@ +# AI Service Environment Variables +# Copy this file to .env and modify as needed + +# LLM Configuration +AI_SERVICE_LLM_PROVIDER=openai +AI_SERVICE_LLM_API_KEY=your-api-key-here +AI_SERVICE_LLM_BASE_URL=https://api.openai.com/v1 +AI_SERVICE_LLM_MODEL=gpt-4o-mini + +# If using DeepSeek +# AI_SERVICE_LLM_PROVIDER=openai +# AI_SERVICE_LLM_API_KEY=your-deepseek-api-key +# AI_SERVICE_LLM_BASE_URL=https://api.deepseek.com/v1 +# AI_SERVICE_LLM_MODEL=deepseek-chat + +# Ollama Configuration (optional) +# AI_SERVICE_OLLAMA_BASE_URL=http://ollama:11434 diff --git a/ai-service-admin/.dockerignore b/ai-service-admin/.dockerignore new file mode 100644 index 0000000..2cde1db --- /dev/null +++ b/ai-service-admin/.dockerignore @@ -0,0 +1,19 @@ +node_modules +dist + +.env +.env.local +.env.*.local + +*.log + +.idea/ +.vscode/ +*.swp +*.swo + +.git +.gitignore + +*.md +!README.md diff --git a/ai-service-admin/Dockerfile b/ai-service-admin/Dockerfile new file mode 100644 index 0000000..5f5b1ba --- /dev/null +++ b/ai-service-admin/Dockerfile @@ -0,0 +1,22 @@ +# AI Service Admin Frontend Dockerfile +FROM node:20-alpine AS builder + +WORKDIR /app + +COPY package*.json ./ + +RUN npm ci + +COPY . . + +RUN npm run build + +FROM nginx:alpine + +COPY --from=builder /app/dist /usr/share/nginx/html + +COPY nginx.conf /etc/nginx/conf.d/default.conf + +EXPOSE 80 + +CMD ["nginx", "-g", "daemon off;"] diff --git a/ai-service-admin/nginx.conf b/ai-service-admin/nginx.conf new file mode 100644 index 0000000..90a7824 --- /dev/null +++ b/ai-service-admin/nginx.conf @@ -0,0 +1,28 @@ +server { + listen 80; + server_name localhost; + root /usr/share/nginx/html; + index index.html; + + location / { + try_files $uri $uri/ /index.html; + } + + location /api/ { + proxy_pass http://ai-service:8080/; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + proxy_read_timeout 300s; + proxy_connect_timeout 75s; + } + + gzip on; + gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript; + gzip_min_length 1000; +} diff --git a/ai-service/.dockerignore b/ai-service/.dockerignore new file mode 100644 index 0000000..980a2b5 --- /dev/null +++ b/ai-service/.dockerignore @@ -0,0 +1,53 @@ +__pycache__ +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +.pytest_cache +.coverage +htmlcov/ +.tox/ +.hypothesis/ + +.mypy_cache/ +.ruff_cache/ + +.env +.env.local +.env.*.local + +*.log +*.pot +*.pyc + +.idea/ +.vscode/ +*.swp +*.swo + +tests/ +scripts/ +*.md +!README.md + +.git +.gitignore +.gitea + +check_qdrant.py diff --git a/ai-service/Dockerfile b/ai-service/Dockerfile new file mode 100644 index 0000000..251933e --- /dev/null +++ b/ai-service/Dockerfile @@ -0,0 +1,32 @@ +# AI Service Backend Dockerfile +FROM python:3.11-slim AS builder + +WORKDIR /app + +RUN pip install --no-cache-dir uv + +COPY pyproject.toml . + +RUN uv pip install --system --no-cache-dir . + +FROM python:3.11-slim + +WORKDIR /app + +RUN groupadd -r appgroup && useradd -r -g appgroup appuser + +COPY --from=builder /usr/local/lib/python3.11/site-packages /usr/local/lib/python3.11/site-packages +COPY --from=builder /usr/local/bin /usr/local/bin + +COPY app ./app + +RUN chown -R appuser:appgroup /app + +USER appuser + +EXPOSE 8080 + +ENV PYTHONUNBUFFERED=1 +ENV PYTHONDONTWRITEBYTECODE=1 + +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8080"] diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000..3db3ecb --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,88 @@ +version: '3.8' + +services: + ai-service: + build: + context: ./ai-service + dockerfile: Dockerfile + container_name: ai-service + restart: unless-stopped + ports: + - "8080:8080" + environment: + - AI_SERVICE_DEBUG=false + - AI_SERVICE_LOG_LEVEL=INFO + - AI_SERVICE_DATABASE_URL=postgresql+asyncpg://postgres:postgres@postgres:5432/ai_service + - AI_SERVICE_QDRANT_URL=http://qdrant:6333 + - AI_SERVICE_LLM_PROVIDER=${AI_SERVICE_LLM_PROVIDER:-openai} + - AI_SERVICE_LLM_API_KEY=${AI_SERVICE_LLM_API_KEY:-} + - AI_SERVICE_LLM_BASE_URL=${AI_SERVICE_LLM_BASE_URL:-https://api.openai.com/v1} + - AI_SERVICE_LLM_MODEL=${AI_SERVICE_LLM_MODEL:-gpt-4o-mini} + - AI_SERVICE_OLLAMA_BASE_URL=${AI_SERVICE_OLLAMA_BASE_URL:-http://ollama:11434} + depends_on: + postgres: + condition: service_healthy + qdrant: + condition: service_started + networks: + - ai-network + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8080/health"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 40s + + ai-service-admin: + build: + context: ./ai-service-admin + dockerfile: Dockerfile + container_name: ai-service-admin + restart: unless-stopped + ports: + - "3000:80" + depends_on: + - ai-service + networks: + - ai-network + + postgres: + image: postgres:15-alpine + container_name: ai-postgres + restart: unless-stopped + environment: + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=postgres + - POSTGRES_DB=ai_service + volumes: + - postgres_data:/var/lib/postgresql/data + - ./ai-service/scripts/init_db.sql:/docker-entrypoint-initdb.d/init_db.sql:ro + ports: + - "5432:5432" + networks: + - ai-network + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres -d ai_service"] + interval: 10s + timeout: 5s + retries: 5 + + qdrant: + image: qdrant/qdrant:latest + container_name: ai-qdrant + restart: unless-stopped + ports: + - "6333:6333" + - "6334:6334" + volumes: + - qdrant_data:/qdrant/storage + networks: + - ai-network + +networks: + ai-network: + driver: bridge + +volumes: + postgres_data: + qdrant_data: