infra: proper librechat integration & improved configuration handling

Reviewed-on: https://gitea.iswearihadsomethingforthis.net/francwa/alfred/pulls/11
This commit was merged in pull request #11.
This commit is contained in:
2025-12-28 11:56:53 +00:00
122 changed files with 806 additions and 765 deletions

View File

@@ -41,11 +41,8 @@ docs/
*.md *.md
!README.md !README.md
# Tests # Data
tests/ data/
pytest.ini
# Data (will be mounted as volumes)
memory_data/ memory_data/
logs/ logs/
*.log *.log

View File

@@ -1,69 +1,53 @@
# Agent Media - Environment Variables # Configuration
LIBRECHAT_VERSION=v0.8.1
RAG_VERSION=v0.7.0
# LibreChat Security Keys # Keys
# Generate secure keys with: openssl rand -base64 32 # - Deepseek API
JWT_SECRET=your-super-secret-jwt-key-change-this-in-production DEEPSEEK_API_KEY=
JWT_REFRESH_SECRET=your-super-secret-refresh-key-change-this-too
# Generate with: openssl rand -hex 16 (for CREDS_KEY) # - Google API
CREDS_KEY=your-32-character-secret-key-here GOOGLE_API_KEY=
#GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite,gemini-2.0-flash,gemini-2.0-flash-lite #TODO:Update models
# Generate with: openssl rand -hex 8 (for CREDS_IV) # - Anthropic API
CREDS_IV=your-16-character-iv-here ANTHROPIC_API_KEY=
# LibreChat Configuration # - Kimi API
DOMAIN_CLIENT=http://localhost:3080 KIMI_API_KEY=
DOMAIN_SERVER=http://localhost:3080
# Session expiry (in milliseconds) # - ChatGPT/Open API
# Default: 15 minutes OPENAI_API_KEY=
SESSION_EXPIRY=900000
# Refresh token expiry (in milliseconds) # - Themoviedb.org API (media metadata)
# Default: 7 days TMDB_API_KEY=
REFRESH_TOKEN_EXPIRY=604800000
# Meilisearch Configuration # - Security keys
# Master key for Meilisearch (generate with: openssl rand -base64 32) JWT_SECRET=
MEILI_MASTER_KEY=DrhYf7zENyR6AlUCKmnz0eYASOQdl6zxH7s7MKFSfFU JWT_REFRESH_SECRET=
CREDS_KEY=
CREDS_IV=
# PostgreSQL Configuration (for RAG API) # Local LLM
POSTGRES_DB=librechat_rag OLLAMA_BASE_URL=
POSTGRES_USER=postgres OLLAMA_MODEL=
POSTGRES_PASSWORD=postgres
# RAG API Configuration (Vector Database) # Alfred Configuration
RAG_COLLECTION_NAME=testcollection
RAG_EMBEDDINGS_PROVIDER=openai
RAG_EMBEDDINGS_MODEL=text-embedding-3-small
# API Keys
# OpenAI API Key (required for RAG embeddings)
OPENAI_API_KEY=your-openai-api-key-here
# Deepseek API Key (for LLM in agent-brain)
DEEPSEEK_API_KEY=your-deepseek-api-key-here
# Agent Brain Configuration
# LLM Provider (deepseek or ollama)
LLM_PROVIDER=deepseek LLM_PROVIDER=deepseek
# Memory storage directory (inside container) # Memory storage directory (inside container)
MEMORY_STORAGE_DIR=/data/memory MEMORY_STORAGE_DIR=/data/memory
# API Key for agent-brain (used by LibreChat custom endpoint)
AGENT_BRAIN_API_KEY=agent-brain-secret-key
# External Services (Optional)
# TMDB API Key (for movie metadata)
TMDB_API_KEY=your-tmdb-key
# qBittorrent Configuration # qBittorrent Configuration
QBITTORRENT_URL=http://localhost:8080 QBITTORRENT_URL=
QBITTORRENT_USERNAME=admin QBITTORRENT_USERNAME=admin
QBITTORRENT_PASSWORD=adminpass QBITTORRENT_PASSWORD=adminadmin
# Debug Options # Debug Options
DEBUG_LOGGING=false DEBUG_LOGGING=false
DEBUG_CONSOLE=false DEBUG_CONSOLE=false
# Postgres (RAG)
POSTGRES_DB=
POSTGRES_USER=
POSTGRES_PASSWORD=

View File

@@ -63,7 +63,6 @@ jobs:
uses: docker/build-push-action@v5 uses: docker/build-push-action@v5
with: with:
context: . context: .
file: ./brain/Dockerfile
push: true push: true
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }} labels: ${{ steps.meta.outputs.labels }}

6
.gitignore vendored
View File

@@ -59,3 +59,9 @@ Thumbs.db
# Backup files # Backup files
*.backup *.backup
# Application data dir
data/*
# Application logs
logs/*

View File

@@ -30,7 +30,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
WORKDIR /tmp WORKDIR /tmp
# Copy dependency files # Copy dependency files
COPY brain/pyproject.toml brain/poetry.lock* brain/uv.lock* Makefile ./ COPY pyproject.toml poetry.lock* uv.lock* Makefile ./
# Install dependencies as root (to avoid permission issues with system packages) # Install dependencies as root (to avoid permission issues with system packages)
RUN --mount=type=cache,target=/root/.cache/pip \ RUN --mount=type=cache,target=/root/.cache/pip \
@@ -59,12 +59,8 @@ RUN --mount=type=cache,target=/root/.cache/pip \
uv pip install --system -e .[dev]; \ uv pip install --system -e .[dev]; \
fi fi
COPY brain/agent/ ./agent/ COPY alfred/ ./alfred
COPY brain/application/ ./application/ COPY tests/ ./tests
COPY brain/domain/ ./domain/
COPY brain/infrastructure/ ./infrastructure/
COPY brain/tests/ ./tests/
COPY brain/app.py .
# =========================================== # ===========================================
# Stage 3: Runtime # Stage 3: Runtime
@@ -96,18 +92,14 @@ RUN mkdir -p /data/memory /data/logs \
USER appuser USER appuser
# Set working directory (owned by appuser) # Set working directory (owned by appuser)
WORKDIR /home/appuser/app WORKDIR /home/appuser
# Copy Python packages from builder stage # Copy Python packages from builder stage
COPY --from=builder /usr/local/lib/python${PYTHON_VERSION_SHORT}/site-packages /usr/local/lib/python${PYTHON_VERSION_SHORT}/site-packages COPY --from=builder /usr/local/lib/python${PYTHON_VERSION_SHORT}/site-packages /usr/local/lib/python${PYTHON_VERSION_SHORT}/site-packages
COPY --from=builder /usr/local/bin /usr/local/bin COPY --from=builder /usr/local/bin /usr/local/bin
# Copy application code (already owned by appuser) # Copy application code (already owned by appuser)
COPY --chown=appuser:appuser brain/agent/ ./agent/ COPY --chown=appuser:appuser alfred/ ./alfred
COPY --chown=appuser:appuser brain/application/ ./application/
COPY --chown=appuser:appuser brain/domain/ ./domain/
COPY --chown=appuser:appuser brain/infrastructure/ ./infrastructure/
COPY --chown=appuser:appuser brain/app.py .
# Create volumes for persistent data # Create volumes for persistent data
VOLUME ["/data/memory", "/data/logs"] VOLUME ["/data/memory", "/data/logs"]

405
Makefile
View File

@@ -1,249 +1,155 @@
.POSIX:
.SUFFIXES:
.DEFAULT_GOAL := help .DEFAULT_GOAL := help
# --- SETTINGS --- # --- Config ---
CORE_DIR = brain export IMAGE_NAME := alfred_media_organizer
IMAGE_NAME = agent_media export LIBRECHAT_VERSION := v0.8.1
# renovate: datasource=docker depName=python export PYTHON_VERSION := 3.14.2
PYTHON_VERSION = $(shell grep "python" $(CORE_DIR)/pyproject.toml | head -n 1 | sed -E 's/.*[=<>^~"]+ *([0-9]+\.[0-9]+(\.[0-9]+)?).*/\1/') export PYTHON_VERSION_SHORT := 3.14
PYTHON_VERSION_SHORT = $(shell echo $(PYTHON_VERSION) | cut -d. -f1,2) export RAG_VERSION := v0.7.0
# Change to 'uv' when ready. export RUNNER := poetry
RUNNER ?= poetry export SERVICE_NAME := alfred
SERVICE_NAME = agent_media
export IMAGE_NAME # --- Commands ---
export PYTHON_VERSION CLI := python3 cli.py
export PYTHON_VERSION_SHORT DOCKER_COMPOSE := docker compose
export RUNNER DOCKER_BUILD := docker build \
--build-arg PYTHON_VERSION=$(PYTHON_VERSION) \
--build-arg PYTHON_VERSION_SHORT=$(PYTHON_VERSION_SHORT) \
--build-arg RUNNER=$(RUNNER)
# --- ADAPTERS --- # --- Phony ---
# UV uses "sync", Poetry uses "install". Both install DEV deps by default. .PHONY: setup status check
INSTALL_CMD = $(if $(filter uv,$(RUNNER)),sync,install) .PHONY: up down restart logs ps shell
.PHONY: build build-test
.PHONY: install update install-hooks
.PHONY: test coverage lint format clean prune
.PHONY: major minor patch
.PHONY: help
# --- MACROS --- # --- Setup ---
ARGS = $(filter-out $@,$(MAKECMDGOALS)) setup:
BUMP_CMD = cd $(CORE_DIR) && $(RUNNER) run bump-my-version bump @echo "Initializing environment..."
COMPOSE_CMD = docker-compose @$(CLI) setup \
DOCKER_CMD = docker build \ && echo "✓ Environment ready" \
--build-arg PYTHON_VERSION=$(PYTHON_VERSION) \ || (echo "✗ Setup failed" && exit 1)
--build-arg PYTHON_VERSION_SHORT=$(PYTHON_VERSION_SHORT) \
--build-arg RUNNER=$(RUNNER) \
-f $(CORE_DIR)/Dockerfile \
-t $(IMAGE_NAME):latest .
RUNNER_ADD = cd $(CORE_DIR) && $(RUNNER) add status:
RUNNER_HOOKS = cd $(CORE_DIR) && $(RUNNER) run pre-commit install -c ../.pre-commit-config.yaml @$(CLI) status
RUNNER_INSTALL = cd $(CORE_DIR) && $(RUNNER) $(INSTALL_CMD)
RUNNER_RUN = cd $(CORE_DIR) && $(RUNNER) run
RUNNER_UPDATE = cd $(CORE_DIR) && $(RUNNER) update
# --- STYLES --- check:
B = \033[1m @$(CLI) check
G = \033[32m
T = \033[36m
R = \033[0m
# --- TARGETS --- # --- Docker ---
.PHONY: add build build-test check-docker check-runner clean coverage down format help init-dotenv install install-hooks lint logs major minor patch prune ps python-version restart run shell test up update _check_branch _ci-dump-config _ci-run-tests _push_tag up: check
@echo "Starting containers..."
@$(DOCKER_COMPOSE) up -d --remove-orphans \
&& echo "✓ Containers started" \
|| (echo "✗ Failed to start containers" && exit 1)
# Catch-all for args down:
%: @echo "Stopping containers..."
@: @$(DOCKER_COMPOSE) down \
&& echo "✓ Containers stopped" \
|| (echo "✗ Failed to stop containers" && exit 1)
add: check-runner restart:
@echo "$(T) Adding dependency ($(RUNNER)): $(ARGS)$(R)" @echo "Restarting containers..."
$(RUNNER_ADD) $(ARGS) @$(DOCKER_COMPOSE) restart \
&& echo "✓ Containers restarted" \
|| (echo "✗ Failed to restart containers" && exit 1)
build: check-docker logs:
@echo "$(T)🐳 Building Docker image...$(R)" @echo "Following logs (Ctrl+C to exit)..."
$(DOCKER_CMD) @$(DOCKER_COMPOSE) logs -f
@echo "✅ Image $(IMAGE_NAME):latest ready."
build-test: check-docker ps:
@echo "$(T)🐳 Building test image (with dev deps)...$(R)" @echo "Container status:"
docker build \ @$(DOCKER_COMPOSE) ps
--build-arg RUNNER=$(RUNNER) \
--build-arg PYTHON_VERSION=$(PYTHON_VERSION) \
--build-arg PYTHON_VERSION_SHORT=$(PYTHON_VERSION_SHORT) \
-f $(CORE_DIR)/Dockerfile \
--target test \
-t $(IMAGE_NAME):test .
@echo "✅ Test image $(IMAGE_NAME):test ready."
check-docker: shell:
@command -v docker >/dev/null 2>&1 || { echo "$(R)❌ Docker not installed$(R)"; exit 1; } @echo "Opening shell in $(SERVICE_NAME)..."
@docker info >/dev/null 2>&1 || { echo "$(R)❌ Docker daemon not running$(R)"; exit 1; } @$(DOCKER_COMPOSE) exec $(SERVICE_NAME) /bin/bash
check-runner: # --- Build ---
@command -v $(RUNNER) >/dev/null 2>&1 || { echo "$(R)$(RUNNER) not installed$(R)"; exit 1; } build: check
@echo "Building image $(IMAGE_NAME):latest ..."
@$(DOCKER_BUILD) -t $(IMAGE_NAME):latest . \
&& echo "✓ Build complete" \
|| (echo "✗ Build failed" && exit 1)
build-test: check
@echo "Building test image $(IMAGE_NAME):test..."
@$(DOCKER_BUILD) --target test -t $(IMAGE_NAME):test . \
&& echo "✓ Test image built" \
|| (echo "✗ Build failed" && exit 1)
# --- Dependencies ---
install:
@echo "Installing dependencies with $(RUNNER)..."
@$(RUNNER) install \
&& echo "✓ Dependencies installed" \
|| (echo "✗ Installation failed" && exit 1)
update:
@echo "Updating dependencies with $(RUNNER)..."
@$(RUNNER) update \
&& echo "✓ Dependencies updated" \
|| (echo "✗ Update failed" && exit 1)
install-hooks:
@echo "Installing pre-commit hooks..."
@$(RUNNER) run pre-commit install \
&& echo "✓ Hooks installed" \
|| (echo "✗ Hook installation failed" && exit 1)
# --- Quality ---
test:
@echo "Running tests..."
@$(RUNNER) run pytest \
&& echo "✓ Tests passed" \
|| (echo "✗ Tests failed" && exit 1)
coverage:
@echo "Running tests with coverage..."
@$(RUNNER) run pytest --cov=. --cov-report=html --cov-report=term \
&& echo "✓ Coverage report generated" \
|| (echo "✗ Coverage failed" && exit 1)
lint:
@echo "Linting code..."
@$(RUNNER) run ruff check --fix . \
&& echo "✓ Linting complete" \
|| (echo "✗ Linting failed" && exit 1)
format:
@echo "Formatting code..."
@$(RUNNER) run ruff format . && $(RUNNER) run ruff check --fix . \
&& echo "✓ Code formatted" \
|| (echo "✗ Formatting failed" && exit 1)
clean: clean:
@echo "$(T)🧹 Cleaning caches...$(R)" @echo "Cleaning build artifacts..."
cd $(CORE_DIR) && rm -rf .ruff_cache __pycache__ .pytest_cache @rm -rf .ruff_cache __pycache__ .pytest_cache htmlcov .coverage
find $(CORE_DIR) -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true @find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true
find $(CORE_DIR) -type d -name ".pytest_cache" -exec rm -rf {} + 2>/dev/null || true @echo "✓ Cleanup complete"
find $(CORE_DIR) -type f -name "*.pyc" -delete 2>/dev/null || true
@echo "✅ Caches cleaned."
coverage: check-runner prune:
@echo "$(T)📊 Running tests with coverage...$(R)" @echo "Pruning Docker system..."
$(RUNNER_RUN) pytest --cov=. --cov-report=html --cov-report=term $(ARGS) @docker system prune -af \
@echo "✅ Report generated in htmlcov/" && echo "✓ Docker pruned" \
|| (echo "✗ Prune failed" && exit 1)
down: check-docker # --- Versioning ---
@echo "$(T)🛑 Stopping containers...$(R)" major minor patch: _check-main
$(COMPOSE_CMD) down @echo "Bumping $@ version..."
@echo "✅ System stopped." @$(RUNNER) run bump-my-version bump $@ \
&& echo "✓ Version bumped" \
|| (echo "✗ Version bump failed" && exit 1)
format: check-runner @echo "Pushing tags..."
@echo "$(T)✨ Formatting with Ruff...$(R)" @git push --tags \
$(RUNNER_RUN) ruff format . && echo "✓ Tags pushed" \
$(RUNNER_RUN) ruff check --fix . || (echo "✗ Push failed" && exit 1)
@echo "✅ Code cleaned."
help:
@echo "$(B)Available commands:$(R)"
@echo ""
@echo "$(G)Setup:$(R)"
@echo " $(T)check-docker $(R) Verify Docker is installed and running."
@echo " $(T)check-runner $(R) Verify package manager ($(RUNNER))."
@echo " $(T)init-dotenv $(R) Create .env from .env.example with generated secrets."
@echo " $(T)install $(R) Install ALL dependencies (Prod + Dev)."
@echo " $(T)install-hooks $(R) Install git pre-commit hooks."
@echo ""
@echo "$(G)Docker:$(R)"
@echo " $(T)build $(R) Build the docker image (production)."
@echo " $(T)build-test $(R) Build the docker image (with dev deps for testing)."
@echo " $(T)down $(R) Stop and remove containers."
@echo " $(T)logs $(R) Follow logs."
@echo " $(T)prune $(R) Clean Docker system."
@echo " $(T)ps $(R) Show container status."
@echo " $(T)restart $(R) Restart all containers."
@echo " $(T)shell $(R) Open shell in container."
@echo " $(T)up $(R) Start the agent."
@echo ""
@echo "$(G)Development:$(R)"
@echo " $(T)add ... $(R) Add dependency (use --group dev or --dev if needed)."
@echo " $(T)clean $(R) Clean caches."
@echo " $(T)coverage $(R) Run tests with coverage."
@echo " $(T)format $(R) Format code (Ruff)."
@echo " $(T)lint $(R) Lint code without fixing."
@echo " $(T)test ... $(R) Run tests (local with $(RUNNER))."
@echo " $(T)update $(R) Update dependencies."
@echo ""
@echo "$(G)Versioning:$(R)"
@echo " $(T)major/minor/patch $(R) Bump version and push tag (triggers CI/CD)."
init-dotenv:
@echo "$(T)🔑 Initializing .env file...$(R)"
@if [ -f .env ]; then \
echo "$(R)⚠️ .env already exists. Skipping.$(R)"; \
exit 0; \
fi
@if [ ! -f .env.example ]; then \
echo "$(R)❌ .env.example not found$(R)"; \
exit 1; \
fi
@if ! command -v openssl >/dev/null 2>&1; then \
echo "$(R)❌ openssl not found. Please install it first.$(R)"; \
exit 1; \
fi
@echo "$(T) → Copying .env.example...$(R)"
@cp .env.example .env
@echo "$(T) → Generating secrets...$(R)"
@sed -i.bak "s|JWT_SECRET=.*|JWT_SECRET=$$(openssl rand -base64 32)|" .env
@sed -i.bak "s|JWT_REFRESH_SECRET=.*|JWT_REFRESH_SECRET=$$(openssl rand -base64 32)|" .env
@sed -i.bak "s|CREDS_KEY=.*|CREDS_KEY=$$(openssl rand -hex 16)|" .env
@sed -i.bak "s|CREDS_IV=.*|CREDS_IV=$$(openssl rand -hex 8)|" .env
@sed -i.bak "s|MEILI_MASTER_KEY=.*|MEILI_MASTER_KEY=$$(openssl rand -base64 32)|" .env
@sed -i.bak "s|AGENT_BRAIN_API_KEY=.*|AGENT_BRAIN_API_KEY=$$(openssl rand -base64 24)|" .env
@rm -f .env.bak
@echo "$(G)✅ .env created with generated secrets!$(R)"
@echo "$(T)⚠️ Don't forget to add your API keys:$(R)"
@echo " - OPENAI_API_KEY"
@echo " - DEEPSEEK_API_KEY"
@echo " - TMDB_API_KEY (optional)"
install: check-runner
@echo "$(T)📦 Installing FULL environment ($(RUNNER))...$(R)"
$(RUNNER_INSTALL)
@echo "✅ Environment ready (Prod + Dev)."
install-hooks: check-runner
@echo "$(T)🔧 Installing hooks...$(R)"
$(RUNNER_HOOKS)
@echo "✅ Hooks ready."
lint: check-runner
@echo "$(T)🔍 Linting code...$(R)"
$(RUNNER_RUN) ruff check .
logs: check-docker
@echo "$(T)📋 Following logs...$(R)"
$(COMPOSE_CMD) logs -f
major: _check_branch
@echo "$(T)💥 Bumping major...$(R)"
SKIP=all $(BUMP_CMD) major
@$(MAKE) -s _push_tag
minor: _check_branch
@echo "$(T)✨ Bumping minor...$(R)"
SKIP=all $(BUMP_CMD) minor
@$(MAKE) -s _push_tag
patch: _check_branch
@echo "$(T)🚀 Bumping patch...$(R)"
SKIP=all $(BUMP_CMD) patch
@$(MAKE) -s _push_tag
prune: check-docker
@echo "$(T)🗑️ Pruning Docker resources...$(R)"
docker system prune -af
@echo "✅ Docker cleaned."
ps: check-docker
@echo "$(T)📋 Container status:$(R)"
@$(COMPOSE_CMD) ps
python-version:
@echo "🔍 Reading pyproject.toml..."
@echo "✅ Python version : $(PYTHON_VERSION)"
@echo " Sera utilisé pour : FROM python:$(PYTHON_VERSION)-slim"
restart: check-docker
@echo "$(T)🔄 Restarting containers...$(R)"
$(COMPOSE_CMD) restart
@echo "✅ Containers restarted."
run: check-runner
$(RUNNER_RUN) $(ARGS)
shell: check-docker
@echo "$(T)🐚 Opening shell in $(SERVICE_NAME)...$(R)"
$(COMPOSE_CMD) exec $(SERVICE_NAME) /bin/sh
test: check-runner
@echo "$(T)🧪 Running tests...$(R)"
$(RUNNER_RUN) pytest $(ARGS)
up: check-docker
@echo "$(T)🚀 Starting Agent Media...$(R)"
$(COMPOSE_CMD) up -d
@echo "✅ System is up."
update: check-runner
@echo "$(T)🔄 Updating dependencies...$(R)"
$(RUNNER_UPDATE)
@echo "✅ All packages up to date."
_check_branch:
@curr=$$(git rev-parse --abbrev-ref HEAD); \
if [ "$$curr" != "main" ]; then \
echo "❌ Error: not on the main branch"; exit 1; \
fi
_ci-dump-config: _ci-dump-config:
@echo "image_name=$(IMAGE_NAME)" @echo "image_name=$(IMAGE_NAME)"
@@ -252,15 +158,46 @@ _ci-dump-config:
@echo "runner=$(RUNNER)" @echo "runner=$(RUNNER)"
@echo "service_name=$(SERVICE_NAME)" @echo "service_name=$(SERVICE_NAME)"
_ci-run-tests: build-test _ci-run-tests:
@echo "$(T)🧪 Running tests in Docker...$(R)" @echo "Running tests in Docker..."
docker run --rm \ docker run --rm \
-e DEEPSEEK_API_KEY \ -e DEEPSEEK_API_KEY \
-e TMDB_API_KEY \ -e TMDB_API_KEY \
-e QBITTORRENT_URL \
$(IMAGE_NAME):test pytest $(IMAGE_NAME):test pytest
@echo " Tests passed." @echo " Tests passed."
_push_tag: _check-main:
@echo "$(T)📦 Pushing tag...$(R)" @test "$$(git rev-parse --abbrev-ref HEAD)" = "main" \
git push --tags || (echo "✗ ERROR: Not on main branch" && exit 1)
@echo "✅ Tag pushed. Check CI for build status."
# --- Help ---
help:
@echo "Usage: make [target]"
@echo ""
@echo "Setup:"
@echo " setup Initialize .env"
@echo " status Show project status"
@echo ""
@echo "Docker:"
@echo " up Start containers"
@echo " down Stop containers"
@echo " restart Restart containers"
@echo " logs Follow logs"
@echo " ps Container status"
@echo " shell Shell into container"
@echo " build Build image"
@echo ""
@echo "Dev:"
@echo " install Install dependencies"
@echo " update Update dependencies"
@echo " test Run tests"
@echo " coverage Run tests with coverage"
@echo " lint Lint code"
@echo " format Format code"
@echo " clean Clean artifacts"
@echo ""
@echo "Release:"
@echo " patch Bump patch version"
@echo " minor Bump minor version"
@echo " major Bump major version"

View File

@@ -5,7 +5,7 @@ import logging
from collections.abc import AsyncGenerator from collections.abc import AsyncGenerator
from typing import Any from typing import Any
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
from .config import settings from .config import settings
from .prompts import PromptBuilder from .prompts import PromptBuilder

View File

@@ -3,7 +3,7 @@
import json import json
from typing import Any from typing import Any
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
from .registry import Tool from .registry import Tool
@@ -52,7 +52,7 @@ class PromptBuilder:
# Show first 5 results # Show first 5 results
for i, result in enumerate(result_list[:5]): for i, result in enumerate(result_list[:5]):
name = result.get("name", "Unknown") name = result.get("name", "Unknown")
lines.append(f" {i+1}. {name}") lines.append(f" {i + 1}. {name}")
if len(result_list) > 5: if len(result_list) > 5:
lines.append(f" ... and {len(result_list) - 5} more") lines.append(f" ... and {len(result_list) - 5} more")

View File

@@ -3,12 +3,12 @@
import logging import logging
from typing import Any from typing import Any
from application.movies import SearchMovieUseCase from alfred.application.movies import SearchMovieUseCase
from application.torrents import AddTorrentUseCase, SearchTorrentsUseCase from alfred.application.torrents import AddTorrentUseCase, SearchTorrentsUseCase
from infrastructure.api.knaben import knaben_client from alfred.infrastructure.api.knaben import knaben_client
from infrastructure.api.qbittorrent import qbittorrent_client from alfred.infrastructure.api.qbittorrent import qbittorrent_client
from infrastructure.api.tmdb import tmdb_client from alfred.infrastructure.api.tmdb import tmdb_client
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -2,8 +2,8 @@
from typing import Any from typing import Any
from application.filesystem import ListFolderUseCase, SetFolderPathUseCase from alfred.application.filesystem import ListFolderUseCase, SetFolderPathUseCase
from infrastructure.filesystem import FileManager from alfred.infrastructure.filesystem import FileManager
def set_path_for_folder(folder_name: str, path_value: str) -> dict[str, Any]: def set_path_for_folder(folder_name: str, path_value: str) -> dict[str, Any]:

View File

@@ -3,7 +3,7 @@
import logging import logging
from typing import Any from typing import Any
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -12,12 +12,12 @@ from fastapi.responses import JSONResponse, StreamingResponse
from fastapi.staticfiles import StaticFiles from fastapi.staticfiles import StaticFiles
from pydantic import BaseModel, Field, validator from pydantic import BaseModel, Field, validator
from agent.agent import Agent from alfred.agent.agent import Agent
from agent.config import settings from alfred.agent.config import settings
from agent.llm.deepseek import DeepSeekClient from alfred.agent.llm.deepseek import DeepSeekClient
from agent.llm.exceptions import LLMAPIError, LLMConfigurationError from alfred.agent.llm.exceptions import LLMAPIError, LLMConfigurationError
from agent.llm.ollama import OllamaClient from alfred.agent.llm.ollama import OllamaClient
from infrastructure.persistence import get_memory, init_memory from alfred.infrastructure.persistence import get_memory, init_memory
logging.basicConfig( logging.basicConfig(
level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
@@ -41,7 +41,6 @@ else:
) )
# Initialize memory context at startup # Initialize memory context at startup
# Use /data/memory in Docker, fallback to memory_data for local dev
storage_dir = os.getenv("MEMORY_STORAGE_DIR", "memory_data") storage_dir = os.getenv("MEMORY_STORAGE_DIR", "memory_data")
init_memory(storage_dir=storage_dir) init_memory(storage_dir=storage_dir)
logger.info(f"Memory context initialized (storage: {storage_dir})") logger.info(f"Memory context initialized (storage: {storage_dir})")

View File

@@ -2,7 +2,7 @@
import logging import logging
from infrastructure.filesystem import FileManager from alfred.infrastructure.filesystem import FileManager
from .dto import ListFolderResponse from .dto import ListFolderResponse

View File

@@ -2,7 +2,7 @@
import logging import logging
from infrastructure.filesystem import FileManager from alfred.infrastructure.filesystem import FileManager
from .dto import SetFolderPathResponse from .dto import SetFolderPathResponse

View File

@@ -2,7 +2,7 @@
import logging import logging
from infrastructure.api.tmdb import ( from alfred.infrastructure.api.tmdb import (
TMDBAPIError, TMDBAPIError,
TMDBClient, TMDBClient,
TMDBConfigurationError, TMDBConfigurationError,

View File

@@ -2,7 +2,7 @@
import logging import logging
from infrastructure.api.qbittorrent import ( from alfred.infrastructure.api.qbittorrent import (
QBittorrentAPIError, QBittorrentAPIError,
QBittorrentAuthError, QBittorrentAuthError,
QBittorrentClient, QBittorrentClient,

View File

@@ -2,7 +2,11 @@
import logging import logging
from infrastructure.api.knaben import KnabenAPIError, KnabenClient, KnabenNotFoundError from alfred.infrastructure.api.knaben import (
KnabenAPIError,
KnabenClient,
KnabenNotFoundError,
)
from .dto import SearchTorrentsResponse from .dto import SearchTorrentsResponse

View File

@@ -6,7 +6,7 @@ from typing import Any
import requests import requests
from requests.exceptions import HTTPError, RequestException, Timeout from requests.exceptions import HTTPError, RequestException, Timeout
from agent.config import Settings, settings from alfred.agent.config import Settings, settings
from .dto import TorrentResult from .dto import TorrentResult
from .exceptions import KnabenAPIError, KnabenNotFoundError from .exceptions import KnabenAPIError, KnabenNotFoundError

View File

@@ -6,7 +6,7 @@ from typing import Any
import requests import requests
from requests.exceptions import HTTPError, RequestException, Timeout from requests.exceptions import HTTPError, RequestException, Timeout
from agent.config import Settings, settings from alfred.agent.config import Settings, settings
from .dto import TorrentInfo from .dto import TorrentInfo
from .exceptions import QBittorrentAPIError, QBittorrentAuthError from .exceptions import QBittorrentAPIError, QBittorrentAuthError

View File

@@ -6,7 +6,7 @@ from typing import Any
import requests import requests
from requests.exceptions import HTTPError, RequestException, Timeout from requests.exceptions import HTTPError, RequestException, Timeout
from agent.config import Settings, settings from alfred.agent.config import Settings, settings
from .dto import MediaResult from .dto import MediaResult
from .exceptions import ( from .exceptions import (

View File

@@ -7,7 +7,7 @@ from enum import Enum
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
from .exceptions import PathTraversalError from .exceptions import PathTraversalError

View File

@@ -3,9 +3,9 @@
import logging import logging
from pathlib import Path from pathlib import Path
from domain.movies.entities import Movie from alfred.domain.movies.entities import Movie
from domain.tv_shows.entities import Episode, Season, TVShow from alfred.domain.tv_shows.entities import Episode, Season, TVShow
from domain.tv_shows.value_objects import SeasonNumber from alfred.domain.tv_shows.value_objects import SeasonNumber
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -6,7 +6,7 @@ without passing it explicitly through all function calls.
Usage: Usage:
# At application startup # At application startup
from infrastructure.persistence import init_memory, get_memory from alfred.infrastructure.persistence import init_memory, get_memory
init_memory("memory_data") init_memory("memory_data")

View File

@@ -4,11 +4,11 @@ import logging
from datetime import datetime from datetime import datetime
from typing import Any from typing import Any
from domain.movies.entities import Movie from alfred.domain.movies.entities import Movie
from domain.movies.repositories import MovieRepository from alfred.domain.movies.repositories import MovieRepository
from domain.movies.value_objects import MovieTitle, Quality, ReleaseYear from alfred.domain.movies.value_objects import MovieTitle, Quality, ReleaseYear
from domain.shared.value_objects import FilePath, FileSize, ImdbId from alfred.domain.shared.value_objects import FilePath, FileSize, ImdbId
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -3,11 +3,11 @@
import logging import logging
from typing import Any from typing import Any
from domain.shared.value_objects import FilePath, ImdbId from alfred.domain.shared.value_objects import FilePath, ImdbId
from domain.subtitles.entities import Subtitle from alfred.domain.subtitles.entities import Subtitle
from domain.subtitles.repositories import SubtitleRepository from alfred.domain.subtitles.repositories import SubtitleRepository
from domain.subtitles.value_objects import Language, SubtitleFormat, TimingOffset from alfred.domain.subtitles.value_objects import Language, SubtitleFormat, TimingOffset
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -4,11 +4,11 @@ import logging
from datetime import datetime from datetime import datetime
from typing import Any from typing import Any
from domain.shared.value_objects import ImdbId from alfred.domain.shared.value_objects import ImdbId
from domain.tv_shows.entities import TVShow from alfred.domain.tv_shows.entities import TVShow
from domain.tv_shows.repositories import TVShowRepository from alfred.domain.tv_shows.repositories import TVShowRepository
from domain.tv_shows.value_objects import ShowStatus from alfred.domain.tv_shows.value_objects import ShowStatus
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

231
cli.py Normal file
View File

@@ -0,0 +1,231 @@
#!/usr/bin/env python3
import os
import secrets
import shutil
import subprocess
import sys
from datetime import datetime
from enum import StrEnum
from pathlib import Path
from typing import NoReturn
REQUIRED_VARS = ["DEEPSEEK_API_KEY", "TMDB_API_KEY", "QBITTORRENT_URL"]
# Size in bytes
KEYS_TO_GENERATE = {
"JWT_SECRET": 32,
"JWT_REFRESH_SECRET": 32,
"CREDS_KEY": 32,
"CREDS_IV": 16,
}
class Style(StrEnum):
"""ANSI codes for styling output.
Usage: f"{Style.RED}Error{Style.RESET}"
"""
RESET = "\033[0m"
BOLD = "\033[1m"
RED = "\033[31m"
GREEN = "\033[32m"
YELLOW = "\033[33m"
CYAN = "\033[36m"
DIM = "\033[2m"
# Only for terminals and if not specified otherwise
USE_COLORS = sys.stdout.isatty() and "NO_COLOR" not in os.environ
def styled(text: str, color_code: str) -> str:
"""Apply color only if supported by the terminal."""
if USE_COLORS:
return f"{color_code}{text}{Style.RESET}"
return text
def log(msg: str, color: str | None = None, prefix="") -> None:
"""Print a formatted message."""
formatted_msg = styled(msg, color) if color else msg
print(f"{prefix}{formatted_msg}")
def error_exit(msg: str) -> NoReturn:
"""Print an error message in red and exit."""
log(f"{msg}", Style.RED)
sys.exit(1)
def is_docker_running() -> bool:
""" "Check if Docker is available and responsive."""
if shutil.which("docker") is None:
error_exit("Docker is not installed.")
result = subprocess.run(
["docker", "info"],
# Redirect stdout/stderr to keep output clean on success
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
# Prevent exception being raised
check=False,
)
return result.returncode == 0
def parse_env(content: str) -> dict[str, str]:
"""Parses existing keys and values into a dict (ignoring comments)."""
env_vars = {}
for raw_line in content.splitlines():
line = raw_line.strip()
if line and not line.startswith("#") and "=" in line:
key, value = line.split("=", 1)
env_vars[key.strip()] = value.strip()
return env_vars
def dump_env(content: str, data: dict[str, str]) -> str:
new_content: list[str] = []
processed_keys = set()
for raw_line in content.splitlines():
line = raw_line.strip()
# Fast line (empty, comment or not an assignation)
if len(line) == 0 or line.startswith("#") or "=" not in line:
new_content.append(raw_line)
continue
# Slow line (inline comment to be kept)
key_chunk, value_chunk = raw_line.split("=", 1)
key = key_chunk.strip()
# Not in the update list
if key not in data:
new_content.append(raw_line)
continue
processed_keys.add(key)
new_value = data[key]
if " #" not in value_chunk:
new_line = f"{key_chunk}={new_value}"
else:
_, comment = value_chunk.split(" #", 1)
new_line = f"{key_chunk}={new_value} #{comment}"
new_content.append(new_line)
for key, value in data.items():
if key not in processed_keys:
new_content.append(f"{key}={value}")
return "\n".join(new_content) + "\n"
def ensure_env() -> None:
"""Manage .env lifecycle: creation, secret generation, prompts."""
env_path = Path(".env")
env_example_path = Path(".env.example")
updated: bool = False
# Read .env if exists
if env_path.exists():
content: str = env_path.read_text(encoding="utf-8")
else:
content: str = env_example_path.read_text(encoding="utf-8")
existing_vars: dict[str, str] = parse_env(content)
# Generate missing secrets
for key, length in KEYS_TO_GENERATE.items():
if key not in existing_vars or not existing_vars[key]:
log(f"Generating {key}...", Style.GREEN, prefix=" ")
existing_vars[key] = secrets.token_hex(length)
updated = True
log("Done", Style.GREEN, prefix=" ")
# Prompt for missing mandatory keys
color = Style.YELLOW if USE_COLORS else ""
reset = Style.RESET if USE_COLORS else ""
for key in REQUIRED_VARS:
if key not in existing_vars or not existing_vars[key]:
try:
existing_vars[key] = input(
f" {color}Enter value for {key}: {reset}"
).strip()
updated = True
except KeyboardInterrupt:
print()
error_exit("Aborted by user.")
# Write to disk
if updated:
# But backup original first
if env_path.exists():
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
backup_path = Path(f"{env_path}.{timestamp}.bak")
shutil.copy(env_path, backup_path)
log(f"Backup created: {backup_path}", Style.DIM)
new_content = dump_env(content, existing_vars)
env_path.write_text(new_content, encoding="utf-8")
log(".env updated successfully.", Style.GREEN)
else:
log("Configuration is up to date.", Style.GREEN)
def setup() -> None:
"""Orchestrate initialization."""
is_docker_running()
ensure_env()
def status() -> None:
"""Display simple dashboard."""
# Hardcoded bold style for title if colors are enabled
title_style = Style.BOLD if USE_COLORS else ""
reset_style = Style.RESET if USE_COLORS else ""
print(f"\n{title_style}ALFRED STATUS{reset_style}")
print(f"{title_style}==============={reset_style}\n")
# Docker Check
if is_docker_running():
print(f" Docker: {styled('✓ running', Style.GREEN)}")
else:
print(f" Docker: {styled('✗ stopped', Style.RED)}")
# Env Check
if Path(".env").exists():
print(f" .env: {styled('✓ present', Style.GREEN)}")
else:
print(f" .env: {styled('✗ missing', Style.RED)}")
print("")
def check() -> None:
"""Silent check for prerequisites (used by 'make up')."""
setup()
def main() -> None:
if len(sys.argv) < 2:
print("Usage: python cli.py [setup|check|status]")
sys.exit(1)
cmd = sys.argv[1]
if cmd == "setup":
setup()
elif cmd == "check":
check()
elif cmd == "status":
status()
else:
error_exit(f"Unknown command: {cmd}")
if __name__ == "__main__":
main()

102
docker-compose.yaml Normal file
View File

@@ -0,0 +1,102 @@
services:
alfred:
container_name: alfred-core
build:
context: .
args:
PYTHON_VERSION: ${PYTHON_VERSION}
PYTHON_VERSION_SHORT: ${PYTHON_VERSION_SHORT}
RUNNER: ${RUNNER}
depends_on:
- librechat
restart: unless-stopped
env_file:
- .env
environment:
# LLM Configuration
LLM_PROVIDER: ${LLM_PROVIDER:-deepseek}
DEEPSEEK_API_KEY: ${DEEPSEEK_API_KEY:-}
# Memory storage
MEMORY_STORAGE_DIR: /data/memory
# External services
TMDB_API_KEY: ${TMDB_API_KEY:-}
QBITTORRENT_URL: ${QBITTORRENT_URL:-}
QBITTORRENT_USERNAME: ${QBITTORRENT_USERNAME:-}
QBITTORRENT_PASSWORD: ${QBITTORRENT_PASSWORD:-}
volumes:
- ./data/memory:/data/memory
- ./logs:/data/logs
# TODO: Development: mount code for hot reload (comment out in production)
# - ./alfred:/app/alfred
librechat:
container_name: alfred-librechat
image: ghcr.io/danny-avila/librechat:${LIBRECHAT_VERSION}
depends_on:
- mongodb
- meilisearch
- rag_api
restart: unless-stopped
env_file:
- .env
environment:
- HOST=0.0.0.0
- MONGO_URI=mongodb://mongodb:27017/LibreChat
- MEILI_HOST=http://meilisearch:7700
- RAG_PORT=${RAG_PORT:-8000}
- RAG_API_URL=http://rag_api:${RAG_PORT:-8000}
ports:
- "${LIBRECHAT_PORT:-3080}:3080"
volumes:
- ./data/librechat/images:/app/client/public/images
- ./data/librechat/uploads:/app/client/uploads
- ./logs:/app/api/logs
# Mount custom endpoint
- ./librechat/manifests:/app/manifests:ro
- ./librechat/librechat.yaml:/app/librechat.yaml:ro
mongodb:
container_name: alfred-mongodb
image: mongo:latest
restart: unless-stopped
volumes:
- ./data/mongo:/data/db
command: mongod --noauth
meilisearch:
container_name: alfred-meilisearch
image: getmeili/meilisearch:v1.12.3
restart: unless-stopped
environment:
- MEILI_NO_ANALYTICS=true
volumes:
- ./data/meili:/meili_data
#profiles: ["meili", "full"]
rag_api:
container_name: alfred-rag
image: ghcr.io/danny-avila/librechat-rag-api-dev-lite:${RAG_VERSION}
restart: unless-stopped
env_file:
- .env
environment:
- DB_HOST=vectordb
- DB_PORT=5432
- RAG_PORT=${RAG_PORT:-8000}
ports:
- "${RAG_PORT:-8000}:${RAG_PORT:-8000}"
#profiles: ["rag", "full"]
vectordb:
container_name: alfred-vectordb
image: pgvector/pgvector:0.8.0-pg16-bookworm
restart: unless-stopped
env_file:
- .env
ports:
- "${VECTOR_DB_PORT:-5432}:5432"
volumes:
- ./data/vectordb:/var/lib/postgresql/data
#profiles: ["rag", "full"]

View File

@@ -1,207 +0,0 @@
version: "3.4"
services:
# Da brain
agent-brain:
build:
context: ./brain
dockerfile: Dockerfile
args:
RUNNER: ${RUNNER} # Get it from Makefile
container_name: agent-brain
restart: unless-stopped
env_file: .env
ports:
- "8000:8000"
volumes:
# Persistent data volumes (outside container /app)
- agent-memory:/data/memory
- agent-logs:/data/logs
# Development: mount code for hot reload (comment out in production)
# - ./brain:/app
environment:
# LLM Configuration
LLM_PROVIDER: ${LLM_PROVIDER:-deepseek}
DEEPSEEK_API_KEY: ${DEEPSEEK_API_KEY:-}
# Memory storage
MEMORY_STORAGE_DIR: /data/memory
# External services
TMDB_API_KEY: ${TMDB_API_KEY:-}
QBITTORRENT_URL: ${QBITTORRENT_URL:-}
QBITTORRENT_USERNAME: ${QBITTORRENT_USERNAME:-}
QBITTORRENT_PASSWORD: ${QBITTORRENT_PASSWORD:-}
networks:
- agent-network
# Da face (LibreChat)
librechat:
image: ghcr.io/danny-avila/librechat-dev:latest
container_name: librechat-frontend
restart: unless-stopped
ports:
- "3080:3080"
depends_on:
- mongodb
- meilisearch
- rag_api
- agent-brain
env_file: .env
environment:
# MongoDB connection (no auth, matching LibreChat default)
MONGO_URI: mongodb://mongodb:27017/LibreChat
# App configuration
HOST: 0.0.0.0
PORT: 3080
# Security
JWT_SECRET: ${JWT_SECRET:-your-super-secret-jwt-key-change-this-in-production}
JWT_REFRESH_SECRET: ${JWT_REFRESH_SECRET:-your-super-secret-refresh-key-change-this-too}
CREDS_KEY: ${CREDS_KEY:-your-32-character-secret-key-here}
CREDS_IV: ${CREDS_IV:-your-16-character-iv-here}
# Session
SESSION_EXPIRY: ${SESSION_EXPIRY:-1000 * 60 * 15}
REFRESH_TOKEN_EXPIRY: ${REFRESH_TOKEN_EXPIRY:-1000 * 60 * 60 * 24 * 7}
# Domain
DOMAIN_CLIENT: ${DOMAIN_CLIENT:-http://localhost:3080}
DOMAIN_SERVER: ${DOMAIN_SERVER:-http://localhost:3080}
# Meilisearch
MEILI_HOST: http://meilisearch:7700
MEILI_MASTER_KEY: ${MEILI_MASTER_KEY:-DrhYf7zENyR6AlUCKmnz0eYASOQdl6zxH7s7MKFSfFU}
# RAG API
RAG_API_URL: http://rag_api:8000
# Endpoints
ENDPOINTS: custom
# Custom endpoint pointing to agent-brain
CUSTOM_API_KEY: ${AGENT_BRAIN_API_KEY:-agent-brain-secret-key}
# Debug (optional)
DEBUG_LOGGING: ${DEBUG_LOGGING:-false}
DEBUG_CONSOLE: ${DEBUG_CONSOLE:-false}
volumes:
- ./librechat/librechat.yaml:/app/librechat.yaml:ro
- librechat-images:/app/client/public/images
- librechat-logs:/app/api/logs
networks:
- agent-network
# MongoDB for LibreChat
mongodb:
image: mongo:latest
container_name: librechat-mongodb
restart: unless-stopped
volumes:
- mongodb-data:/data/db
command: mongod --noauth
ports:
- "27017:27017"
networks:
- agent-network
# Meilisearch - Search engine for LibreChat
#TODO: Follow currently used version on librechat's github
meilisearch:
image: getmeili/meilisearch:v1.12.3
container_name: librechat-meilisearch
restart: unless-stopped
volumes:
- meilisearch-data:/meili_data
environment:
MEILI_HOST: http://meilisearch:7700
MEILI_HTTP_ADDR: meilisearch:7700
MEILI_MASTER_KEY: ${MEILI_MASTER_KEY:-DrhYf7zENyR6AlUCKmnz0eYASOQdl6zxH7s7MKFSfFU}
ports:
- "7700:7700"
networks:
- agent-network
# PostgreSQL with pgvector for RAG API
pgvector:
image: ankane/pgvector:latest
container_name: librechat-pgvector
restart: unless-stopped
environment:
POSTGRES_DB: ${POSTGRES_DB:-librechat_rag}
POSTGRES_USER: ${POSTGRES_USER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
volumes:
- pgvector-data:/var/lib/postgresql/data
ports:
- "5432:5432"
networks:
- agent-network
# RAG API - Vector database for LibreChat
rag_api:
image: ghcr.io/danny-avila/librechat-rag-api-dev-lite:latest
container_name: librechat-rag-api
restart: unless-stopped
depends_on:
- pgvector
environment:
PORT: 8000
HOST: 0.0.0.0
# PostgreSQL connection (multiple variable names for compatibility)
DB_HOST: pgvector
DB_PORT: 5432
DB_NAME: ${POSTGRES_DB:-librechat_rag}
DB_USER: ${POSTGRES_USER:-postgres}
DB_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
POSTGRES_DB: ${POSTGRES_DB:-librechat_rag}
POSTGRES_USER: ${POSTGRES_USER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
# RAG configuration
COLLECTION_NAME: ${RAG_COLLECTION_NAME:-testcollection}
EMBEDDINGS_PROVIDER: ${RAG_EMBEDDINGS_PROVIDER:-openai}
EMBEDDINGS_MODEL: ${RAG_EMBEDDINGS_MODEL:-text-embedding-3-small}
OPENAI_API_KEY: ${OPENAI_API_KEY:-}
RAG_UPLOAD_DIR: /app/uploads
volumes:
- rag-uploads:/app/uploads
ports:
- "8001:8000"
networks:
- agent-network
# Named volumes for persistent data
volumes:
# MongoDB data
mongodb-data:
driver: local
# Meilisearch data
meilisearch-data:
driver: local
# PostgreSQL pgvector data
pgvector-data:
driver: local
# RAG API uploads
rag-uploads:
driver: local
# LibreChat data
librechat-images:
driver: local
librechat-logs:
driver: local
# Agent Brain data
agent-memory:
driver: local
agent-logs:
driver: local
# Network for inter-service communication
networks:
agent-network:
driver: bridge

View File

@@ -1,27 +0,0 @@
#!/bin/bash
# Script to generate secure keys for LibreChat
# Run this script to generate random secure keys for your .env file
echo "==================================="
echo "LibreChat Security Keys Generator"
echo "==================================="
echo ""
echo "# MongoDB Password"
echo "MONGO_PASSWORD=$(openssl rand -base64 24)"
echo ""
echo "# JWT Secrets"
echo "JWT_SECRET=$(openssl rand -base64 32)"
echo "JWT_REFRESH_SECRET=$(openssl rand -base64 32)"
echo ""
echo "# Credentials Encryption Keys"
echo "CREDS_KEY=$(openssl rand -hex 16)"
echo "CREDS_IV=$(openssl rand -hex 8)"
echo ""
echo "==================================="
echo "Copy these values to your .env file"
echo "==================================="

View File

@@ -22,7 +22,7 @@ endpoints:
manifest: manifest:
schema: schema:
type: openapi type: openapi
url: "http://agent-brain:8000/manifests/find_media_imdb_id.json" url: "http://alfred:8000/manifests/find_media_imdb_id.json"
auth: auth:
type: none type: none
@@ -32,7 +32,7 @@ endpoints:
manifest: manifest:
schema: schema:
type: openapi type: openapi
url: "http://agent-brain:8000/manifests/find_torrent.json" url: "http://alfred:8000/manifests/find_torrent.json"
auth: auth:
type: none type: none
@@ -42,7 +42,7 @@ endpoints:
manifest: manifest:
schema: schema:
type: openapi type: openapi
url: "http://agent-brain:8000/manifests/add_torrent_by_index.json" url: "http://alfred:8000/manifests/add_torrent_by_index.json"
auth: auth:
type: none type: none
@@ -52,7 +52,7 @@ endpoints:
manifest: manifest:
schema: schema:
type: openapi type: openapi
url: "http://agent-brain:8000/manifests/set_language.json" url: "http://alfred:8000/manifests/set_language.json"
auth: auth:
type: none type: none
@@ -60,7 +60,7 @@ endpoints:
# Backend Local Agent # Backend Local Agent
- name: "Local Agent" - name: "Local Agent"
apiKey: "dummy_key" apiKey: "dummy_key"
baseURL: "http://agent-brain:8000/v1" baseURL: "http://alfred:8000/v1"
models: models:
default: ["local-deepseek-agent"] default: ["local-deepseek-agent"]
fetch: false fetch: false
@@ -75,7 +75,7 @@ endpoints:
manifest: manifest:
schema: schema:
type: openapi type: openapi
url: "http://agent-brain:8000/manifests/find_media_imdb_id.json" url: "http://alfred:8000/manifests/find_media_imdb_id.json"
auth: auth:
type: none type: none
@@ -85,7 +85,7 @@ endpoints:
manifest: manifest:
schema: schema:
type: openapi type: openapi
url: "http://agent-brain:8000/manifests/find_torrent.json" url: "http://alfred:8000/manifests/find_torrent.json"
auth: auth:
type: none type: none
@@ -95,7 +95,7 @@ endpoints:
manifest: manifest:
schema: schema:
type: openapi type: openapi
url: "http://agent-brain:8000/manifests/add_torrent_by_index.json" url: "http://alfred:8000/manifests/add_torrent_by_index.json"
auth: auth:
type: none type: none
@@ -105,6 +105,6 @@ endpoints:
manifest: manifest:
schema: schema:
type: openapi type: openapi
url: "http://agent-brain:8000/manifests/set_language.json" url: "http://alfred:8000/manifests/set_language.json"
auth: auth:
type: none type: none

View File

@@ -372,13 +372,13 @@ testing = ["hatch", "pre-commit", "pytest", "tox"]
[[package]] [[package]]
name = "fastapi" name = "fastapi"
version = "0.127.0" version = "0.127.1"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
optional = false optional = false
python-versions = ">=3.9" python-versions = ">=3.9"
files = [ files = [
{file = "fastapi-0.127.0-py3-none-any.whl", hash = "sha256:725aa2bb904e2eff8031557cf4b9b77459bfedd63cae8427634744fd199f6a49"}, {file = "fastapi-0.127.1-py3-none-any.whl", hash = "sha256:31d670a4f9373cc6d7994420f98e4dc46ea693145207abc39696746c83a44430"},
{file = "fastapi-0.127.0.tar.gz", hash = "sha256:5a9246e03dcd1fdb19f1396db30894867c1d630f5107dc167dcbc5ed1ea7d259"}, {file = "fastapi-0.127.1.tar.gz", hash = "sha256:946a87ee5d931883b562b6bada787d6c8178becee2683cb3f9b980d593206359"},
] ]
[package.dependencies] [package.dependencies]

View File

@@ -1,11 +1,18 @@
[tool.poetry] [tool.poetry]
name = "agent-media" name = "alfred"
version = "0.1.6" version = "0.1.6"
description = "AI agent for managing a local media library" description = "AI agent for managing a local media library"
authors = ["Francwa <francois.hodiaumont@gmail.com>"] authors = ["Francwa <francois.hodiaumont@gmail.com>"]
readme = "README.md" readme = "README.md"
package-mode = false package-mode = false
[tool.alfred]
image_name = "alfred_media_organizer"
librechat_version = "v0.8.1"
rag_version = "v0.7.0"
runner = "poetry"
service_name = "alfred"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "==3.14.2" python = "==3.14.2"
python-dotenv = "^1.0.0" python-dotenv = "^1.0.0"
@@ -31,6 +38,8 @@ build-backend = "poetry.core.masonry.api"
[tool.pytest.ini_options] [tool.pytest.ini_options]
# Chemins où pytest cherche les tests # Chemins où pytest cherche les tests
testpaths = ["tests"] testpaths = ["tests"]
# Ajouter le répertoire racine au PYTHONPATH pour les imports
pythonpath = ["."]
# Patterns de fichiers/classes/fonctions à considérer comme tests # Patterns de fichiers/classes/fonctions à considérer comme tests
python_files = ["test_*.py"] # Fichiers commençant par "test_" python_files = ["test_*.py"] # Fichiers commençant par "test_"

View File

@@ -1,19 +1,17 @@
"""Pytest configuration and shared fixtures.""" """Pytest configuration and shared fixtures."""
import sys # TODO: Moved directory, should not be necessary anymore but need to check !!
from pathlib import Path
# Ajouter le dossier parent (brain) au PYTHONPATH # Ajouter le dossier parent (brain) au PYTHONPATH
sys.path.insert(0, str(Path(__file__).parent.parent)) # sys.path.insert(0, str(Path(__file__).parent.parent))
import shutil import shutil
import sys
import tempfile import tempfile
from pathlib import Path from pathlib import Path
from unittest.mock import MagicMock, Mock from unittest.mock import MagicMock, Mock
import pytest import pytest
from infrastructure.persistence import Memory, set_memory from alfred.infrastructure.persistence import Memory, set_memory
@pytest.fixture @pytest.fixture
@@ -24,6 +22,16 @@ def temp_dir():
shutil.rmtree(dirpath) shutil.rmtree(dirpath)
@pytest.fixture(autouse=True)
def mock_memory_storage_dir(monkeypatch):
"""Override MEMORY_STORAGE_DIR for all tests to use a temp directory."""
test_dir = tempfile.mkdtemp()
monkeypatch.setenv("MEMORY_STORAGE_DIR", test_dir)
yield
# Cleanup
shutil.rmtree(test_dir, ignore_errors=True)
@pytest.fixture @pytest.fixture
def memory(temp_dir): def memory(temp_dir):
"""Create a fresh Memory instance for testing.""" """Create a fresh Memory instance for testing."""
@@ -254,7 +262,6 @@ def mock_deepseek():
def test_something(mock_deepseek): def test_something(mock_deepseek):
# Your test code here # Your test code here
""" """
import sys
from unittest.mock import Mock from unittest.mock import Mock
# Save the original module if it exists # Save the original module if it exists

View File

@@ -2,8 +2,8 @@
from unittest.mock import Mock from unittest.mock import Mock
from agent.agent import Agent from alfred.agent.agent import Agent
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
class TestAgentInit: class TestAgentInit:

Some files were not shown because too many files have changed in this diff Show More