5 Commits

Author SHA1 Message Date
8b94507aeb Finished dockerization 2025-12-14 07:03:37 +01:00
52d568e924 Dockerizing the app (WIP) 2025-12-09 16:12:58 +01:00
da3d6f123d Fix tests and previous commit 2025-12-09 07:11:02 +01:00
ca63865b07 Move .env.example to parent folder 2025-12-09 05:40:01 +01:00
ec7d2d623f Updated folder structure (for Docker) 2025-12-09 05:35:59 +01:00
116 changed files with 661 additions and 54 deletions

View File

@@ -1,34 +1,69 @@
# LLM Provider Selection
# Options: "deepseek" or "ollama"
LLM_PROVIDER=ollama
# Agent Media - Environment Variables
# DeepSeek LLM Configuration (if using DeepSeek)
DEEPSEEK_API_KEY=your_deepseek_api_key
DEEPSEEK_BASE_URL=https://api.deepseek.com
DEEPSEEK_MODEL=deepseek-chat
# LibreChat Security Keys
# Generate secure keys with: openssl rand -base64 32
JWT_SECRET=your-super-secret-jwt-key-change-this-in-production
JWT_REFRESH_SECRET=your-super-secret-refresh-key-change-this-too
# Ollama LLM Configuration (if using Ollama)
OLLAMA_BASE_URL=http://localhost:11434
OLLAMA_MODEL=llama3.2
# Generate with: openssl rand -hex 16 (for CREDS_KEY)
CREDS_KEY=your-32-character-secret-key-here
# LLM Settings
TEMPERATURE=0.2
# Generate with: openssl rand -hex 8 (for CREDS_IV)
CREDS_IV=your-16-character-iv-here
# TMDB API Configuration
TMDB_API_KEY=your_tmdb_api_key
TMDB_BASE_URL=https://api.themoviedb.org/3
# LibreChat Configuration
DOMAIN_CLIENT=http://localhost:3080
DOMAIN_SERVER=http://localhost:3080
# Storage Configuration
MEMORY_FILE=memory.json
# Session expiry (in milliseconds)
# Default: 15 minutes
SESSION_EXPIRY=900000
# Refresh token expiry (in milliseconds)
# Default: 7 days
REFRESH_TOKEN_EXPIRY=604800000
# Meilisearch Configuration
# Master key for Meilisearch (generate with: openssl rand -base64 32)
MEILI_MASTER_KEY=DrhYf7zENyR6AlUCKmnz0eYASOQdl6zxH7s7MKFSfFU
# PostgreSQL Configuration (for RAG API)
POSTGRES_DB=librechat_rag
POSTGRES_USER=postgres
POSTGRES_PASSWORD=postgres
# RAG API Configuration (Vector Database)
RAG_COLLECTION_NAME=testcollection
RAG_EMBEDDINGS_PROVIDER=openai
RAG_EMBEDDINGS_MODEL=text-embedding-3-small
# API Keys
# OpenAI API Key (required for RAG embeddings)
OPENAI_API_KEY=your-openai-api-key-here
# Deepseek API Key (for LLM in agent-brain)
DEEPSEEK_API_KEY=your-deepseek-api-key-here
# Agent Brain Configuration
# LLM Provider (deepseek or ollama)
LLM_PROVIDER=deepseek
# Memory storage directory (inside container)
MEMORY_STORAGE_DIR=/data/memory
# API Key for agent-brain (used by LibreChat custom endpoint)
AGENT_BRAIN_API_KEY=agent-brain-secret-key
# External Services (Optional)
# TMDB API Key (for movie metadata)
TMDB_API_KEY=your-tmdb-key
# qBittorrent Configuration
QBIT_HOST=http://192.168.178.47:30024
QBIT_USER=admin
QBIT_PASS=adminadmin
QBITTORRENT_URL=http://localhost:8080
QBITTORRENT_USERNAME=admin
QBITTORRENT_PASSWORD=adminpass
# Security Configuration
MAX_TOOL_ITERATIONS=10
REQUEST_TIMEOUT=30
# Memory Configuration
MAX_HISTORY_MESSAGES=10
# Debug Options
DEBUG_LOGGING=false
DEBUG_CONSOLE=false

57
brain/.dockerignore Normal file
View File

@@ -0,0 +1,57 @@
# Git
.git
.gitignore
.gitea
# Python
__pycache__
*.pyc
*.pyo
*.pyd
.Python
*.so
.pytest_cache
.coverage
htmlcov
.tox
.nox
.hypothesis
# Virtual environments
venv
.venv
env
.env
.env.*
# IDE
.vscode
.idea
*.swp
*.swo
.qodo
# Build
build
dist
*.egg-info
# Documentation
docs/
*.md
!README.md
# Tests
tests/
pytest.ini
# Data (will be mounted as volumes)
memory_data/
logs/
*.log
# Misc
*.bak
*.tmp
.DS_Store
Thumbs.db

91
brain/Dockerfile Normal file
View File

@@ -0,0 +1,91 @@
# Dockerfile for Agent Media
# Multi-stage build for smaller image size
# ===========================================
# Stage 1: Builder
# ===========================================
FROM python:3.12.7-slim as builder
# STFU (please)
ENV DEBIAN_FRONTEND=noninteractive
# Install build dependencies (needs root)
RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential \
&& rm -rf /var/lib/apt/lists/*
# Install Poetry globally (needs root)
RUN pip install --no-cache-dir poetry
# Copy dependency files (as root for now)
COPY pyproject.toml poetry.lock* /tmp/
# Install dependencies as root (to avoid permission issues with system packages)
WORKDIR /tmp
RUN poetry config virtualenvs.create false \
&& poetry install --only main --no-root --no-cache
# Create non-root user
RUN useradd -m -u 1000 -s /bin/bash appuser
# Switch to non-root user
USER appuser
# Set working directory (owned by appuser)
WORKDIR /home/appuser/app
# ===========================================
# Stage 2: Runtime
# ===========================================
FROM python:3.12.7-slim as runtime
# Install runtime dependencies (needs root)
RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
ca-certificates \
&& rm -rf /var/lib/apt/lists/* \
&& apt-get clean
# Create non-root user
RUN useradd -m -u 1000 -s /bin/bash appuser
# Create data directories (needs root for /data)
RUN mkdir -p /data/memory /data/logs \
&& chown -R appuser:appuser /data
# Switch to non-root user
USER appuser
# Set working directory (owned by appuser)
WORKDIR /home/appuser/app
# Copy Python packages from builder stage
COPY --from=builder /usr/local/lib/python3.12/site-packages /usr/local/lib/python3.12/site-packages
COPY --from=builder /usr/local/bin /usr/local/bin
# Copy application code (already owned by appuser)
COPY --chown=appuser:appuser agent/ ./agent/
COPY --chown=appuser:appuser application/ ./application/
COPY --chown=appuser:appuser domain/ ./domain/
COPY --chown=appuser:appuser infrastructure/ ./infrastructure/
COPY --chown=appuser:appuser app.py .
# Create volumes for persistent data
VOLUME ["/data/memory", "/data/logs"]
# Expose port
EXPOSE 8000
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f http://localhost:8000/health || exit 1
# Environment variables (can be overridden)
ENV PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1 \
PYTHONPATH=/home/appuser/app \
LLM_PROVIDER=deepseek \
MEMORY_STORAGE_DIR=/data/memory
# Run the application
CMD ["python", "-m", "uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8000"]

View File

@@ -9,6 +9,7 @@ from typing import Any
from fastapi import FastAPI, HTTPException
from fastapi.responses import JSONResponse, StreamingResponse
from fastapi.staticfiles import StaticFiles
from pydantic import BaseModel, Field, validator
from agent.agent import Agent
@@ -29,9 +30,21 @@ app = FastAPI(
version="0.2.0",
)
# TODO: Make a variable
manifests = "manifests"
# Sécurité : on vérifie que le dossier existe pour ne pas faire planter l'app au démarrage
if os.path.exists(manifests):
app.mount("/manifests", StaticFiles(directory=manifests), name="manifests")
else:
print(
f"⚠️ ATTENTION : Le dossier '{manifests}' est introuvable. Le plugin ne marchera pas."
)
# Initialize memory context at startup
init_memory(storage_dir="memory_data")
logger.info("Memory context initialized")
# Use /data/memory in Docker, fallback to memory_data for local dev
storage_dir = os.getenv("MEMORY_STORAGE_DIR", "memory_data")
init_memory(storage_dir=storage_dir)
logger.info(f"Memory context initialized (storage: {storage_dir})")
# Initialize LLM based on environment variable
llm_provider = os.getenv("LLM_PROVIDER", "deepseek").lower()

View File

@@ -496,7 +496,7 @@ class Memory:
storage_dir: Directory for persistent storage
"""
self.storage_dir = Path(storage_dir)
self.storage_dir.mkdir(exist_ok=True)
self.storage_dir.mkdir(parents=True, exist_ok=True)
self.ltm_file = self.storage_dir / "ltm.json"

View File

@@ -0,0 +1,16 @@
{
"name": "add_torrent_by_index",
"description": "Ajoute un torrent à la file d'attente de qBittorrent en utilisant l'index (1-basé) d'un résultat de recherche précédent (par exemple, 'download the 3rd one').",
"parameters": {
"type": "object",
"properties": {
"index": {
"type": "integer",
"description": "L'index (1-basé) du torrent dans les derniers résultats de recherche."
}
},
"required": [
"index"
]
}
}

View File

@@ -0,0 +1,16 @@
{
"name": "find_media_imdb_id",
"description": "Trouve l'ID IMDb et les informations d'un film ou d'une série télévisée à partir de son titre en utilisant l'API TMDB. À utiliser comme première étape avant de chercher des torrents.",
"parameters": {
"type": "object",
"properties": {
"media_title": {
"type": "string",
"description": "Le titre exact du média à rechercher (par exemple, 'Inception', 'Breaking Bad')."
}
},
"required": [
"media_title"
]
}
}

View File

@@ -0,0 +1,16 @@
{
"name": "find_torrent",
"description": "Recherche des fichiers torrent pour un titre de média donné. Les résultats sont stockés dans la mémoire de l'agent pour une référence ultérieure par index (e.g., 'download the 3rd one').",
"parameters": {
"type": "object",
"properties": {
"media_title": {
"type": "string",
"description": "Le titre du média pour lequel rechercher des torrents (par exemple, 'Inception 2010')."
}
},
"required": [
"media_title"
]
}
}

View File

@@ -0,0 +1,16 @@
{
"name": "set_language",
"description": "Définit la langue de la conversation pour l'agent.",
"parameters": {
"type": "object",
"properties": {
"language": {
"type": "string",
"description": "Le code de la langue (par exemple, 'en' pour Anglais, 'fr' pour Français, 'es' pour Espagnol)."
}
},
"required": [
"language"
]
}
}

View File

View File

@@ -1,5 +1,11 @@
"""Pytest configuration and shared fixtures."""
import sys
from pathlib import Path
# Ajouter le dossier parent (brain) au PYTHONPATH
sys.path.insert(0, str(Path(__file__).parent.parent))
import shutil
import tempfile
from pathlib import Path

Some files were not shown because too many files have changed in this diff Show More