Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| aa89a3fb00 | |||
| 64aeb5fc80 | |||
| 9540520dc4 | |||
| 300ed387f5 | |||
| dea81de5b5 |
@@ -34,6 +34,9 @@ jobs:
|
|||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Generate build variables
|
||||||
|
run: python scripts/generate_build_vars.py
|
||||||
|
|
||||||
- name: Load config from Makefile
|
- name: Load config from Makefile
|
||||||
id: config
|
id: config
|
||||||
run: make -s _ci-dump-config >> $GITHUB_OUTPUT
|
run: make -s _ci-dump-config >> $GITHUB_OUTPUT
|
||||||
|
|||||||
10
Makefile
10
Makefile
@@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
# --- Profiles management ---
|
# --- Profiles management ---
|
||||||
# Usage: make up p=rag,meili
|
# Usage: make up p=rag,meili
|
||||||
p ?= core
|
p ?= full
|
||||||
PROFILES_PARAM := COMPOSE_PROFILES=$(p)
|
PROFILES_PARAM := COMPOSE_PROFILES=$(p)
|
||||||
|
|
||||||
# --- Commands ---
|
# --- Commands ---
|
||||||
@@ -16,8 +16,8 @@ DOCKER_BUILD := docker build --no-cache \
|
|||||||
--build-arg RUNNER=$(RUNNER)
|
--build-arg RUNNER=$(RUNNER)
|
||||||
|
|
||||||
# --- Phony ---
|
# --- Phony ---
|
||||||
.PHONY: .env up down restart logs ps shell build build-test install update \
|
.PHONY: .env bootstrap up down restart logs ps shell build build-test install \
|
||||||
install-hooks test coverage lint format clean major minor patch help
|
update install-hooks test coverage lint format clean major minor patch help
|
||||||
|
|
||||||
# --- Setup ---
|
# --- Setup ---
|
||||||
.env .env.make:
|
.env .env.make:
|
||||||
@@ -30,14 +30,14 @@ bootstrap: .env .env.make
|
|||||||
|
|
||||||
# --- Docker ---
|
# --- Docker ---
|
||||||
up: .env
|
up: .env
|
||||||
@echo "Starting containers with profiles: [$(p)]..."
|
@echo "Starting containers with profiles: [full]..."
|
||||||
@$(PROFILES_PARAM) $(DOCKER_COMPOSE) up -d --remove-orphans \
|
@$(PROFILES_PARAM) $(DOCKER_COMPOSE) up -d --remove-orphans \
|
||||||
&& echo "✓ Containers started" \
|
&& echo "✓ Containers started" \
|
||||||
|| (echo "✗ Failed to start containers" && exit 1)
|
|| (echo "✗ Failed to start containers" && exit 1)
|
||||||
|
|
||||||
down:
|
down:
|
||||||
@echo "Stopping containers..."
|
@echo "Stopping containers..."
|
||||||
@$(DOCKER_COMPOSE) down \
|
@$(PROFILES_PARAM) $(DOCKER_COMPOSE) down \
|
||||||
&& echo "✓ Containers stopped" \
|
&& echo "✓ Containers stopped" \
|
||||||
|| (echo "✗ Failed to stop containers" && exit 1)
|
|| (echo "✗ Failed to stop containers" && exit 1)
|
||||||
|
|
||||||
|
|||||||
651
README.md
651
README.md
@@ -1,89 +1,277 @@
|
|||||||
# Agent Media 🎬
|
# Alfred Media Organizer 🎬
|
||||||
|
|
||||||
An AI-powered agent for managing your local media library with natural language. Search, download, and organize movies and TV shows effortlessly.
|
An AI-powered agent for managing your local media library with natural language. Search, download, and organize movies and TV shows effortlessly through a conversational interface.
|
||||||
|
|
||||||
## Features
|
[](https://www.python.org/downloads/)
|
||||||
|
[](https://python-poetry.org/)
|
||||||
|
[](https://opensource.org/licenses/MIT)
|
||||||
|
[](https://github.com/astral-sh/ruff)
|
||||||
|
|
||||||
- 🤖 **Natural Language Interface**: Talk to your media library in plain language
|
## ✨ Features
|
||||||
- 🔍 **Smart Search**: Find movies and TV shows via TMDB
|
|
||||||
- 📥 **Torrent Integration**: Search and download via qBittorrent
|
|
||||||
- 🧠 **Contextual Memory**: Remembers your preferences and conversation history
|
|
||||||
- 📁 **Auto-Organization**: Keeps your media library tidy
|
|
||||||
- 🌐 **API Compatible**: OpenAI-compatible API for easy integration
|
|
||||||
|
|
||||||
## Architecture
|
- 🤖 **Natural Language Interface** — Talk to your media library in plain language
|
||||||
|
- 🔍 **Smart Search** — Find movies and TV shows via TMDB with rich metadata
|
||||||
|
- 📥 **Torrent Integration** — Search and download via qBittorrent
|
||||||
|
- 🧠 **Contextual Memory** — Remembers your preferences and conversation history
|
||||||
|
- 📁 **Auto-Organization** — Keeps your media library tidy and well-structured
|
||||||
|
- 🌐 **OpenAI-Compatible API** — Works with any OpenAI-compatible client
|
||||||
|
- 🖥️ **LibreChat Frontend** — Beautiful web UI included out of the box
|
||||||
|
- 🔒 **Secure by Default** — Auto-generated secrets and encrypted credentials
|
||||||
|
|
||||||
Built with **Domain-Driven Design (DDD)** principles:
|
## 🏗️ Architecture
|
||||||
|
|
||||||
|
Built with **Domain-Driven Design (DDD)** principles for clean separation of concerns:
|
||||||
|
|
||||||
```
|
```
|
||||||
agent_media/
|
alfred/
|
||||||
├── agent/ # AI agent orchestration
|
├── agent/ # AI agent orchestration
|
||||||
├── application/ # Use cases & DTOs
|
│ ├── llm/ # LLM clients (Ollama, DeepSeek)
|
||||||
├── domain/ # Business logic & entities
|
│ └── tools/ # Tool implementations
|
||||||
└── infrastructure/ # External services & persistence
|
├── application/ # Use cases & DTOs
|
||||||
|
│ ├── movies/ # Movie search use cases
|
||||||
|
│ ├── torrents/ # Torrent management
|
||||||
|
│ └── filesystem/ # File operations
|
||||||
|
├── domain/ # Business logic & entities
|
||||||
|
│ ├── movies/ # Movie entities
|
||||||
|
│ ├── tv_shows/ # TV show entities
|
||||||
|
│ └── subtitles/ # Subtitle entities
|
||||||
|
└── infrastructure/ # External services & persistence
|
||||||
|
├── api/ # External API clients (TMDB, qBittorrent)
|
||||||
|
├── filesystem/ # File system operations
|
||||||
|
└── persistence/ # Memory & repositories
|
||||||
```
|
```
|
||||||
|
|
||||||
See [architecture_diagram.md](docs/architecture_diagram.md) for architectural details.
|
See [docs/architecture_diagram.md](docs/architecture_diagram.md) for detailed architectural diagrams.
|
||||||
|
|
||||||
## Quick Start
|
## 🚀 Quick Start
|
||||||
|
|
||||||
### Prerequisites
|
### Prerequisites
|
||||||
|
|
||||||
- Python 3.12+
|
- **Python 3.14+** (required)
|
||||||
- Poetry
|
- **Poetry** (dependency manager)
|
||||||
- qBittorrent (optional, for downloads)
|
- **Docker & Docker Compose** (recommended for full stack)
|
||||||
- API Keys:
|
- **API Keys:**
|
||||||
- DeepSeek API key (or Ollama for local LLM)
|
- TMDB API key ([get one here](https://www.themoviedb.org/settings/api))
|
||||||
- TMDB API key
|
- Optional: DeepSeek, OpenAI, Anthropic, or other LLM provider keys
|
||||||
|
|
||||||
### Installation
|
### Installation
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Clone the repository
|
# Clone the repository
|
||||||
git clone https://github.com/your-username/agent-media.git
|
git clone https://github.com/francwa/alfred_media_organizer.git
|
||||||
cd agent-media
|
cd alfred_media_organizer
|
||||||
|
|
||||||
# Install dependencies
|
# Install dependencies
|
||||||
poetry install
|
make install
|
||||||
|
|
||||||
# Copy environment template
|
# Bootstrap environment (generates .env with secure secrets)
|
||||||
cp .env.example .env
|
make bootstrap
|
||||||
|
|
||||||
# Edit .env with your API keys
|
# Edit .env with your API keys
|
||||||
nano .env
|
nano .env
|
||||||
```
|
```
|
||||||
|
|
||||||
### Configuration
|
### Running with Docker (Recommended)
|
||||||
|
|
||||||
Edit `.env`:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# LLM Provider (deepseek or ollama)
|
# Start all services (LibreChat + Alfred + MongoDB + Ollama)
|
||||||
LLM_PROVIDER=deepseek
|
make up
|
||||||
DEEPSEEK_API_KEY=your-api-key-here
|
|
||||||
|
|
||||||
# TMDB (for movie/TV show metadata)
|
# Or start with specific profiles
|
||||||
TMDB_API_KEY=your-tmdb-key-here
|
make up p=rag,meili # Include RAG and Meilisearch
|
||||||
|
make up p=qbittorrent # Include qBittorrent
|
||||||
|
make up p=full # Everything
|
||||||
|
|
||||||
# qBittorrent (optional)
|
# View logs
|
||||||
QBITTORRENT_HOST=http://localhost:8080
|
make logs
|
||||||
QBITTORRENT_USERNAME=admin
|
|
||||||
QBITTORRENT_PASSWORD=adminadmin
|
# Stop all services
|
||||||
|
make down
|
||||||
```
|
```
|
||||||
|
|
||||||
### Run
|
The web interface will be available at **http://localhost:3080**
|
||||||
|
|
||||||
|
### Running Locally (Development)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
# Install dependencies
|
||||||
|
poetry install
|
||||||
|
|
||||||
# Start the API server
|
# Start the API server
|
||||||
poetry run uvicorn app:app --reload
|
poetry run uvicorn alfred.app:app --reload --port 8000
|
||||||
|
|
||||||
# Or with Docker
|
|
||||||
docker-compose up
|
|
||||||
```
|
```
|
||||||
|
|
||||||
The API will be available at `http://localhost:8000`
|
## ⚙️ Configuration
|
||||||
|
|
||||||
## Usage
|
### Environment Bootstrap
|
||||||
|
|
||||||
|
Alfred uses a smart bootstrap system that:
|
||||||
|
|
||||||
|
1. **Generates secure secrets** automatically (JWT tokens, database passwords, encryption keys)
|
||||||
|
2. **Syncs build variables** from `pyproject.toml` (versions, image names)
|
||||||
|
3. **Preserves existing secrets** when re-running (never overwrites your API keys)
|
||||||
|
4. **Computes database URIs** automatically from individual components
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# First time setup
|
||||||
|
make bootstrap
|
||||||
|
|
||||||
|
# Re-run after updating pyproject.toml (secrets are preserved)
|
||||||
|
make bootstrap
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configuration File (.env)
|
||||||
|
|
||||||
|
The `.env` file is generated from `.env.example` with secure defaults:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# --- CORE SETTINGS ---
|
||||||
|
HOST=0.0.0.0
|
||||||
|
PORT=3080
|
||||||
|
MAX_HISTORY_MESSAGES=10
|
||||||
|
MAX_TOOL_ITERATIONS=10
|
||||||
|
|
||||||
|
# --- LLM CONFIGURATION ---
|
||||||
|
# Providers: 'local' (Ollama), 'deepseek', 'openai', 'anthropic', 'google'
|
||||||
|
DEFAULT_LLM_PROVIDER=local
|
||||||
|
|
||||||
|
# Local LLM (Ollama - included in Docker stack)
|
||||||
|
OLLAMA_BASE_URL=http://ollama:11434
|
||||||
|
OLLAMA_MODEL=llama3.3:latest
|
||||||
|
LLM_TEMPERATURE=0.2
|
||||||
|
|
||||||
|
# --- API KEYS (fill only what you need) ---
|
||||||
|
TMDB_API_KEY=your-tmdb-key-here # Required for movie search
|
||||||
|
DEEPSEEK_API_KEY= # Optional
|
||||||
|
OPENAI_API_KEY= # Optional
|
||||||
|
ANTHROPIC_API_KEY= # Optional
|
||||||
|
|
||||||
|
# --- SECURITY (auto-generated, don't modify) ---
|
||||||
|
JWT_SECRET=<auto-generated>
|
||||||
|
JWT_REFRESH_SECRET=<auto-generated>
|
||||||
|
CREDS_KEY=<auto-generated>
|
||||||
|
CREDS_IV=<auto-generated>
|
||||||
|
|
||||||
|
# --- DATABASES (auto-generated passwords) ---
|
||||||
|
MONGO_PASSWORD=<auto-generated>
|
||||||
|
POSTGRES_PASSWORD=<auto-generated>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Security Keys
|
||||||
|
|
||||||
|
Security keys are defined in `pyproject.toml` and generated automatically:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[tool.alfred.security]
|
||||||
|
jwt_secret = "32:b64" # 32 bytes, base64 URL-safe
|
||||||
|
jwt_refresh_secret = "32:b64"
|
||||||
|
creds_key = "32:hex" # 32 bytes, hexadecimal (AES-256)
|
||||||
|
creds_iv = "16:hex" # 16 bytes, hexadecimal (AES IV)
|
||||||
|
mongo_password = "16:hex"
|
||||||
|
postgres_password = "16:hex"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Formats:**
|
||||||
|
- `b64` — Base64 URL-safe (for JWT tokens)
|
||||||
|
- `hex` — Hexadecimal (for encryption keys, passwords)
|
||||||
|
|
||||||
|
## 🐳 Docker Services
|
||||||
|
|
||||||
|
### Service Architecture
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────────────────────────┐
|
||||||
|
│ alfred-net (bridge) │
|
||||||
|
├─────────────────────────────────────────────────────────────┤
|
||||||
|
│ │
|
||||||
|
│ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │
|
||||||
|
│ │ LibreChat │───▶│ Alfred │───▶│ MongoDB │ │
|
||||||
|
│ │ :3080 │ │ (core) │ │ :27017 │ │
|
||||||
|
│ └──────────────┘ └──────────────┘ └──────────────┘ │
|
||||||
|
│ │ │ │
|
||||||
|
│ │ ▼ │
|
||||||
|
│ │ ┌──────────────┐ │
|
||||||
|
│ │ │ Ollama │ │
|
||||||
|
│ │ │ (local) │ │
|
||||||
|
│ │ └──────────────┘ │
|
||||||
|
│ │ │
|
||||||
|
│ ┌──────┴───────────────────────────────────────────────┐ │
|
||||||
|
│ │ Optional Services (profiles) │ │
|
||||||
|
│ ├──────────────┬──────────────┬──────────────┬─────────┤ │
|
||||||
|
│ │ Meilisearch │ RAG API │ VectorDB │qBittor- │ │
|
||||||
|
│ │ :7700 │ :8000 │ :5432 │ rent │ │
|
||||||
|
│ │ [meili] │ [rag] │ [rag] │[qbit..] │ │
|
||||||
|
│ └──────────────┴──────────────┴──────────────┴─────────┘ │
|
||||||
|
│ │
|
||||||
|
└─────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
### Docker Profiles
|
||||||
|
|
||||||
|
| Profile | Services | Use Case |
|
||||||
|
|---------|----------|----------|
|
||||||
|
| (default) | LibreChat, Alfred, MongoDB, Ollama | Basic setup |
|
||||||
|
| `meili` | + Meilisearch | Fast search |
|
||||||
|
| `rag` | + RAG API, VectorDB | Document retrieval |
|
||||||
|
| `qbittorrent` | + qBittorrent | Torrent downloads |
|
||||||
|
| `full` | All services | Complete setup |
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start with specific profiles
|
||||||
|
make up p=rag,meili
|
||||||
|
make up p=full
|
||||||
|
```
|
||||||
|
|
||||||
|
### Docker Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make up # Start containers (default profile)
|
||||||
|
make up p=full # Start with all services
|
||||||
|
make down # Stop all containers
|
||||||
|
make restart # Restart containers
|
||||||
|
make logs # Follow logs
|
||||||
|
make ps # Show container status
|
||||||
|
make shell # Open bash in Alfred container
|
||||||
|
make build # Build production image
|
||||||
|
make build-test # Build test image
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🛠️ Available Tools
|
||||||
|
|
||||||
|
The agent has access to these tools for interacting with your media library:
|
||||||
|
|
||||||
|
| Tool | Description |
|
||||||
|
|------|-------------|
|
||||||
|
| `find_media_imdb_id` | Search for movies/TV shows on TMDB by title |
|
||||||
|
| `find_torrent` | Search for torrents across multiple indexers |
|
||||||
|
| `get_torrent_by_index` | Get detailed info about a specific torrent result |
|
||||||
|
| `add_torrent_by_index` | Download a torrent by its index in search results |
|
||||||
|
| `add_torrent_to_qbittorrent` | Add a torrent via magnet link directly |
|
||||||
|
| `set_path_for_folder` | Configure folder paths for media organization |
|
||||||
|
| `list_folder` | List contents of a folder |
|
||||||
|
| `set_language` | Set preferred language for searches |
|
||||||
|
|
||||||
|
## 💬 Usage Examples
|
||||||
|
|
||||||
|
### Via Web Interface (LibreChat)
|
||||||
|
|
||||||
|
Navigate to **http://localhost:3080** and start chatting:
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Find Inception in 1080p
|
||||||
|
Alfred: I found 3 torrents for Inception (2010):
|
||||||
|
1. Inception.2010.1080p.BluRay.x264 (150 seeders) - 2.1 GB
|
||||||
|
2. Inception.2010.1080p.WEB-DL.x265 (80 seeders) - 1.8 GB
|
||||||
|
3. Inception.2010.1080p.REMUX (45 seeders) - 25 GB
|
||||||
|
|
||||||
|
You: Download the first one
|
||||||
|
Alfred: ✓ Added to qBittorrent! Download started.
|
||||||
|
Saving to: /downloads/Movies/Inception (2010)/
|
||||||
|
|
||||||
|
You: What's downloading right now?
|
||||||
|
Alfred: You have 1 active download:
|
||||||
|
- Inception.2010.1080p.BluRay.x264 (45% complete, ETA: 12 min)
|
||||||
|
```
|
||||||
|
|
||||||
### Via API
|
### Via API
|
||||||
|
|
||||||
@@ -91,219 +279,177 @@ The API will be available at `http://localhost:8000`
|
|||||||
# Health check
|
# Health check
|
||||||
curl http://localhost:8000/health
|
curl http://localhost:8000/health
|
||||||
|
|
||||||
# Chat with the agent
|
# Chat with the agent (OpenAI-compatible)
|
||||||
curl -X POST http://localhost:8000/v1/chat/completions \
|
curl -X POST http://localhost:8000/v1/chat/completions \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
-d '{
|
-d '{
|
||||||
"model": "agent-media",
|
"model": "alfred",
|
||||||
"messages": [
|
"messages": [
|
||||||
{"role": "user", "content": "Find Inception 1080p"}
|
{"role": "user", "content": "Find The Matrix 4K"}
|
||||||
]
|
]
|
||||||
}'
|
}'
|
||||||
|
|
||||||
|
# List available models
|
||||||
|
curl http://localhost:8000/v1/models
|
||||||
|
|
||||||
|
# View memory state (debug)
|
||||||
|
curl http://localhost:8000/memory/state
|
||||||
|
|
||||||
|
# Clear session memory
|
||||||
|
curl -X POST http://localhost:8000/memory/clear-session
|
||||||
```
|
```
|
||||||
|
|
||||||
### Via OpenWebUI
|
### Via OpenWebUI or Other Clients
|
||||||
|
|
||||||
Agent Media is compatible with [OpenWebUI](https://github.com/open-webui/open-webui):
|
Alfred is compatible with any OpenAI-compatible client:
|
||||||
|
|
||||||
1. Add as OpenAI-compatible endpoint: `http://localhost:8000/v1`
|
1. Add as OpenAI-compatible endpoint: `http://localhost:8000/v1`
|
||||||
2. Model name: `agent-media`
|
2. Model name: `alfred`
|
||||||
3. Start chatting!
|
3. No API key required (or use any placeholder)
|
||||||
|
|
||||||
### Example Conversations
|
## 🧠 Memory System
|
||||||
|
|
||||||
```
|
Alfred uses a three-tier memory system for context management:
|
||||||
You: Find Inception in 1080p
|
|
||||||
Agent: I found 3 torrents for Inception:
|
|
||||||
1. Inception.2010.1080p.BluRay.x264 (150 seeders)
|
|
||||||
2. Inception.2010.1080p.WEB-DL.x265 (80 seeders)
|
|
||||||
3. Inception.2010.720p.BluRay (45 seeders)
|
|
||||||
|
|
||||||
You: Download the first one
|
|
||||||
Agent: Added to qBittorrent! Download started.
|
|
||||||
|
|
||||||
You: List my downloads
|
|
||||||
Agent: You have 1 active download:
|
|
||||||
- Inception.2010.1080p.BluRay.x264 (45% complete)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Available Tools
|
|
||||||
|
|
||||||
The agent has access to these tools:
|
|
||||||
|
|
||||||
| Tool | Description |
|
|
||||||
|------|-------------|
|
|
||||||
| `find_media_imdb_id` | Search for movies/TV shows on TMDB |
|
|
||||||
| `find_torrents` | Search for torrents |
|
|
||||||
| `get_torrent_by_index` | Get torrent details by index |
|
|
||||||
| `add_torrent_by_index` | Download torrent by index |
|
|
||||||
| `add_torrent_to_qbittorrent` | Add torrent via magnet link |
|
|
||||||
| `set_path_for_folder` | Configure folder paths |
|
|
||||||
| `list_folder` | List folder contents |
|
|
||||||
|
|
||||||
## Memory System
|
|
||||||
|
|
||||||
Agent Media uses a three-tier memory system:
|
|
||||||
|
|
||||||
### Long-Term Memory (LTM)
|
### Long-Term Memory (LTM)
|
||||||
- **Persistent** (saved to JSON)
|
- **Persistent** — Saved to JSON files
|
||||||
- Configuration, preferences, media library
|
- **Contents:** Configuration, user preferences, media library state
|
||||||
- Survives restarts
|
- **Survives:** Application restarts
|
||||||
|
|
||||||
### Short-Term Memory (STM)
|
### Short-Term Memory (STM)
|
||||||
- **Session-based** (RAM only)
|
- **Session-based** — Stored in RAM
|
||||||
- Conversation history, current workflow
|
- **Contents:** Conversation history, current workflow state
|
||||||
- Cleared on restart
|
- **Cleared:** On session end or restart
|
||||||
|
|
||||||
### Episodic Memory
|
### Episodic Memory
|
||||||
- **Transient** (RAM only)
|
- **Transient** — Stored in RAM
|
||||||
- Search results, active downloads, recent errors
|
- **Contents:** Search results, active downloads, recent errors
|
||||||
- Cleared frequently
|
- **Cleared:** Frequently, after task completion
|
||||||
|
|
||||||
## Development
|
## 🧪 Development
|
||||||
|
|
||||||
### Project Structure
|
### Project Setup
|
||||||
|
|
||||||
```
|
```bash
|
||||||
agent_media/
|
# Install all dependencies (including dev)
|
||||||
├── agent/
|
poetry install
|
||||||
│ ├── agent.py # Main agent orchestrator
|
|
||||||
│ ├── prompts.py # System prompt builder
|
# Install pre-commit hooks
|
||||||
│ ├── registry.py # Tool registration
|
make install-hooks
|
||||||
│ ├── tools/ # Tool implementations
|
|
||||||
│ └── llm/ # LLM clients (DeepSeek, Ollama)
|
# Run the development server
|
||||||
├── application/
|
poetry run uvicorn alfred.app:app --reload
|
||||||
│ ├── movies/ # Movie use cases
|
|
||||||
│ ├── torrents/ # Torrent use cases
|
|
||||||
│ └── filesystem/ # Filesystem use cases
|
|
||||||
├── domain/
|
|
||||||
│ ├── movies/ # Movie entities & value objects
|
|
||||||
│ ├── tv_shows/ # TV show entities
|
|
||||||
│ ├── subtitles/ # Subtitle entities
|
|
||||||
│ └── shared/ # Shared value objects
|
|
||||||
├── infrastructure/
|
|
||||||
│ ├── api/ # External API clients
|
|
||||||
│ │ ├── tmdb/ # TMDB client
|
|
||||||
│ │ ├── knaben/ # Torrent search
|
|
||||||
│ │ └── qbittorrent/ # qBittorrent client
|
|
||||||
│ ├── filesystem/ # File operations
|
|
||||||
│ └── persistence/ # Memory & repositories
|
|
||||||
├── tests/ # Test suite (~500 tests)
|
|
||||||
└── docs/ # Documentation
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Running Tests
|
### Running Tests
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Run all tests
|
# Run all tests (parallel execution)
|
||||||
poetry run pytest
|
make test
|
||||||
|
|
||||||
# Run with coverage
|
# Run with coverage report
|
||||||
poetry run pytest --cov
|
make coverage
|
||||||
|
|
||||||
# Run specific test file
|
# Run specific test file
|
||||||
poetry run pytest tests/test_agent.py
|
poetry run pytest tests/test_agent.py -v
|
||||||
|
|
||||||
# Run specific test
|
# Run specific test
|
||||||
poetry run pytest tests/test_agent.py::TestAgent::test_step
|
poetry run pytest tests/test_config_loader.py::TestBootstrapEnv -v
|
||||||
```
|
```
|
||||||
|
|
||||||
### Code Quality
|
### Code Quality
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Linting
|
# Lint and auto-fix
|
||||||
poetry run ruff check .
|
make lint
|
||||||
|
|
||||||
# Formatting
|
# Format code
|
||||||
poetry run black .
|
make format
|
||||||
|
|
||||||
# Type checking (if mypy is installed)
|
# Clean build artifacts
|
||||||
poetry run mypy .
|
make clean
|
||||||
```
|
```
|
||||||
|
|
||||||
### Adding a New Tool
|
### Adding a New Tool
|
||||||
|
|
||||||
Quick example:
|
1. **Create the tool function** in `alfred/agent/tools/`:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
# 1. Create the tool function in agent/tools/api.py
|
# alfred/agent/tools/api.py
|
||||||
def my_new_tool(param: str) -> Dict[str, Any]:
|
def my_new_tool(param: str) -> dict[str, Any]:
|
||||||
"""Tool description."""
|
"""
|
||||||
|
Short description of what this tool does.
|
||||||
|
|
||||||
|
This will be shown to the LLM to help it decide when to use this tool.
|
||||||
|
"""
|
||||||
memory = get_memory()
|
memory = get_memory()
|
||||||
# Implementation
|
|
||||||
return {"status": "ok", "data": "result"}
|
# Your implementation here
|
||||||
|
result = do_something(param)
|
||||||
# 2. Register in agent/registry.py
|
|
||||||
Tool(
|
return {
|
||||||
name="my_new_tool",
|
"status": "success",
|
||||||
description="What this tool does",
|
"data": result
|
||||||
func=api_tools.my_new_tool,
|
}
|
||||||
parameters={
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"param": {"type": "string", "description": "Parameter description"},
|
|
||||||
},
|
|
||||||
"required": ["param"],
|
|
||||||
},
|
|
||||||
),
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Docker
|
2. **Register in the registry** (`alfred/agent/registry.py`):
|
||||||
|
|
||||||
### Build
|
```python
|
||||||
|
tool_functions = [
|
||||||
|
# ... existing tools ...
|
||||||
|
api_tools.my_new_tool, # Add your tool here
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
The tool will be automatically registered with its parameters extracted from the function signature.
|
||||||
|
|
||||||
|
### Version Management
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker build -t agent-media .
|
# Bump version (must be on main branch)
|
||||||
|
make patch # 0.1.7 -> 0.1.8
|
||||||
|
make minor # 0.1.7 -> 0.2.0
|
||||||
|
make major # 0.1.7 -> 1.0.0
|
||||||
```
|
```
|
||||||
|
|
||||||
### Run
|
## 📚 API Reference
|
||||||
|
|
||||||
```bash
|
|
||||||
docker run -p 8000:8000 \
|
|
||||||
-e DEEPSEEK_API_KEY=your-key \
|
|
||||||
-e TMDB_API_KEY=your-key \
|
|
||||||
-v $(pwd)/memory_data:/app/memory_data \
|
|
||||||
agent-media
|
|
||||||
```
|
|
||||||
|
|
||||||
### Docker Compose
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Start all services (agent + qBittorrent)
|
|
||||||
docker-compose up -d
|
|
||||||
|
|
||||||
# View logs
|
|
||||||
docker-compose logs -f
|
|
||||||
|
|
||||||
# Stop
|
|
||||||
docker-compose down
|
|
||||||
```
|
|
||||||
|
|
||||||
## API Documentation
|
|
||||||
|
|
||||||
### Endpoints
|
### Endpoints
|
||||||
|
|
||||||
#### `GET /health`
|
#### `GET /health`
|
||||||
Health check endpoint.
|
Health check endpoint.
|
||||||
|
|
||||||
**Response:**
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"status": "healthy",
|
"status": "healthy",
|
||||||
"version": "0.2.0"
|
"version": "0.1.7"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
#### `GET /v1/models`
|
#### `GET /v1/models`
|
||||||
List available models (OpenAI-compatible).
|
List available models (OpenAI-compatible).
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"object": "list",
|
||||||
|
"data": [
|
||||||
|
{
|
||||||
|
"id": "alfred",
|
||||||
|
"object": "model",
|
||||||
|
"owned_by": "alfred"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
#### `POST /v1/chat/completions`
|
#### `POST /v1/chat/completions`
|
||||||
Chat with the agent (OpenAI-compatible).
|
Chat with the agent (OpenAI-compatible).
|
||||||
|
|
||||||
**Request:**
|
**Request:**
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"model": "agent-media",
|
"model": "alfred",
|
||||||
"messages": [
|
"messages": [
|
||||||
{"role": "user", "content": "Find Inception"}
|
{"role": "user", "content": "Find Inception"}
|
||||||
],
|
],
|
||||||
@@ -317,7 +463,7 @@ Chat with the agent (OpenAI-compatible).
|
|||||||
"id": "chatcmpl-xxx",
|
"id": "chatcmpl-xxx",
|
||||||
"object": "chat.completion",
|
"object": "chat.completion",
|
||||||
"created": 1234567890,
|
"created": 1234567890,
|
||||||
"model": "agent-media",
|
"model": "alfred",
|
||||||
"choices": [{
|
"choices": [{
|
||||||
"index": 0,
|
"index": 0,
|
||||||
"message": {
|
"message": {
|
||||||
@@ -330,71 +476,120 @@ Chat with the agent (OpenAI-compatible).
|
|||||||
```
|
```
|
||||||
|
|
||||||
#### `GET /memory/state`
|
#### `GET /memory/state`
|
||||||
View full memory state (debug).
|
View full memory state (debug endpoint).
|
||||||
|
|
||||||
#### `POST /memory/clear-session`
|
#### `POST /memory/clear-session`
|
||||||
Clear session memories (STM + Episodic).
|
Clear session memories (STM + Episodic).
|
||||||
|
|
||||||
## Troubleshooting
|
## 🔧 Troubleshooting
|
||||||
|
|
||||||
### Agent doesn't respond
|
### Agent doesn't respond
|
||||||
- Check API keys in `.env`
|
|
||||||
- Verify LLM provider is running (Ollama) or accessible (DeepSeek)
|
1. Check API keys in `.env`
|
||||||
- Check logs: `docker-compose logs agent-media`
|
2. Verify LLM provider is running:
|
||||||
|
```bash
|
||||||
|
# For Ollama
|
||||||
|
docker logs alfred-ollama
|
||||||
|
|
||||||
|
# Check if model is pulled
|
||||||
|
docker exec alfred-ollama ollama list
|
||||||
|
```
|
||||||
|
3. Check Alfred logs: `docker logs alfred-core`
|
||||||
|
|
||||||
### qBittorrent connection failed
|
### qBittorrent connection failed
|
||||||
- Verify qBittorrent is running
|
|
||||||
- Check `QBITTORRENT_HOST` in `.env`
|
1. Verify qBittorrent is running: `docker ps | grep qbittorrent`
|
||||||
- Ensure Web UI is enabled in qBittorrent settings
|
2. Check Web UI is enabled in qBittorrent settings
|
||||||
|
3. Verify credentials in `.env`:
|
||||||
|
```bash
|
||||||
|
QBITTORRENT_URL=http://qbittorrent:16140
|
||||||
|
QBITTORRENT_USERNAME=admin
|
||||||
|
QBITTORRENT_PASSWORD=<check-your-env>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Database connection issues
|
||||||
|
|
||||||
|
1. Check MongoDB is healthy: `docker logs alfred-mongodb`
|
||||||
|
2. Verify credentials match in `.env`
|
||||||
|
3. Try restarting: `make restart`
|
||||||
|
|
||||||
### Memory not persisting
|
### Memory not persisting
|
||||||
- Check `memory_data/` directory exists and is writable
|
|
||||||
- Verify volume mounts in Docker
|
1. Check `data/` directory exists and is writable
|
||||||
|
2. Verify volume mounts in `docker-compose.yaml`
|
||||||
|
3. Check file permissions: `ls -la data/`
|
||||||
|
|
||||||
|
### Bootstrap fails
|
||||||
|
|
||||||
|
1. Ensure `.env.example` exists
|
||||||
|
2. Check `pyproject.toml` has required sections:
|
||||||
|
```toml
|
||||||
|
[tool.alfred.settings]
|
||||||
|
[tool.alfred.security]
|
||||||
|
```
|
||||||
|
3. Run manually: `python scripts/bootstrap.py`
|
||||||
|
|
||||||
### Tests failing
|
### Tests failing
|
||||||
- Run `poetry install` to ensure dependencies are up to date
|
|
||||||
- Check logs for specific error messages
|
|
||||||
|
|
||||||
## Contributing
|
1. Update dependencies: `poetry install`
|
||||||
|
2. Check Python version: `python --version` (needs 3.14+)
|
||||||
|
3. Run specific failing test with verbose output:
|
||||||
|
```bash
|
||||||
|
poetry run pytest tests/test_failing.py -v --tb=long
|
||||||
|
```
|
||||||
|
|
||||||
Contributions are welcome!
|
## 🤝 Contributing
|
||||||
|
|
||||||
### Development Workflow
|
Contributions are welcome! Please follow these steps:
|
||||||
|
|
||||||
1. Fork the repository
|
1. **Fork** the repository
|
||||||
2. Create a feature branch: `git checkout -b feature/my-feature`
|
2. **Create** a feature branch: `git checkout -b feature/my-feature`
|
||||||
3. Make your changes
|
3. **Make** your changes
|
||||||
4. Run tests: `poetry run pytest`
|
4. **Run** tests: `make test`
|
||||||
5. Run linting: `poetry run ruff check . && poetry run black .`
|
5. **Run** linting: `make lint && make format`
|
||||||
6. Commit: `git commit -m "Add my feature"`
|
6. **Commit**: `git commit -m "feat: add my feature"`
|
||||||
7. Push: `git push origin feature/my-feature`
|
7. **Push**: `git push origin feature/my-feature`
|
||||||
8. Create a Pull Request
|
8. **Create** a Pull Request
|
||||||
|
|
||||||
## Documentation
|
### Commit Convention
|
||||||
|
|
||||||
- [Architecture Diagram](docs/architecture_diagram.md) - System architecture overview
|
We use [Conventional Commits](https://www.conventionalcommits.org/):
|
||||||
- [Class Diagram](docs/class_diagram.md) - Class structure and relationships
|
|
||||||
- [Component Diagram](docs/component_diagram.md) - Component interactions
|
|
||||||
- [Sequence Diagram](docs/sequence_diagram.md) - Sequence flows
|
|
||||||
- [Flowchart](docs/flowchart.md) - System flowcharts
|
|
||||||
|
|
||||||
## License
|
- `feat:` New feature
|
||||||
|
- `fix:` Bug fix
|
||||||
|
- `docs:` Documentation
|
||||||
|
- `refactor:` Code refactoring
|
||||||
|
- `test:` Adding tests
|
||||||
|
- `chore:` Maintenance
|
||||||
|
|
||||||
MIT License - see [LICENSE](LICENSE) file for details.
|
## 📖 Documentation
|
||||||
|
|
||||||
## Acknowledgments
|
- [Architecture Diagram](docs/architecture_diagram.md) — System architecture overview
|
||||||
|
- [Class Diagram](docs/class_diagram.md) — Class structure and relationships
|
||||||
|
- [Component Diagram](docs/component_diagram.md) — Component interactions
|
||||||
|
- [Sequence Diagram](docs/sequence_diagram.md) — Sequence flows
|
||||||
|
- [Flowchart](docs/flowchart.md) — System flowcharts
|
||||||
|
|
||||||
- [DeepSeek](https://www.deepseek.com/) - LLM provider
|
## 📄 License
|
||||||
- [TMDB](https://www.themoviedb.org/) - Movie database
|
|
||||||
- [qBittorrent](https://www.qbittorrent.org/) - Torrent client
|
|
||||||
- [FastAPI](https://fastapi.tiangolo.com/) - Web framework
|
|
||||||
|
|
||||||
## Support
|
MIT License — see [LICENSE](LICENSE) file for details.
|
||||||
|
|
||||||
|
## 🙏 Acknowledgments
|
||||||
|
|
||||||
|
- [LibreChat](https://github.com/danny-avila/LibreChat) — Beautiful chat interface
|
||||||
|
- [Ollama](https://ollama.ai/) — Local LLM runtime
|
||||||
|
- [DeepSeek](https://www.deepseek.com/) — LLM provider
|
||||||
|
- [TMDB](https://www.themoviedb.org/) — Movie database
|
||||||
|
- [qBittorrent](https://www.qbittorrent.org/) — Torrent client
|
||||||
|
- [FastAPI](https://fastapi.tiangolo.com/) — Web framework
|
||||||
|
- [Pydantic](https://docs.pydantic.dev/) — Data validation
|
||||||
|
|
||||||
|
## 📬 Support
|
||||||
|
|
||||||
- 📧 Email: francois.hodiaumont@gmail.com
|
- 📧 Email: francois.hodiaumont@gmail.com
|
||||||
- 🐛 Issues: [GitHub Issues](https://github.com/your-username/agent-media/issues)
|
- 🐛 Issues: [GitHub Issues](https://github.com/francwa/alfred_media_organizer/issues)
|
||||||
- 💬 Discussions: [GitHub Discussions](https://github.com/your-username/agent-media/discussions)
|
- 💬 Discussions: [GitHub Discussions](https://github.com/francwa/alfred_media_organizer/discussions)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
Made with ❤️ by Francwa
|
<p align="center">Made with ❤️ by <a href="https://github.com/francwa">Francwa</a></p>
|
||||||
|
|||||||
231
cli.py
231
cli.py
@@ -1,231 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
import os
|
|
||||||
import secrets
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from datetime import datetime
|
|
||||||
from enum import StrEnum
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import NoReturn
|
|
||||||
|
|
||||||
REQUIRED_VARS = ["DEEPSEEK_API_KEY", "TMDB_API_KEY", "QBITTORRENT_URL"]
|
|
||||||
|
|
||||||
# Size in bytes
|
|
||||||
KEYS_TO_GENERATE = {
|
|
||||||
"JWT_SECRET": 32,
|
|
||||||
"JWT_REFRESH_SECRET": 32,
|
|
||||||
"CREDS_KEY": 32,
|
|
||||||
"CREDS_IV": 16,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class Style(StrEnum):
|
|
||||||
"""ANSI codes for styling output.
|
|
||||||
Usage: f"{Style.RED}Error{Style.RESET}"
|
|
||||||
"""
|
|
||||||
|
|
||||||
RESET = "\033[0m"
|
|
||||||
BOLD = "\033[1m"
|
|
||||||
RED = "\033[31m"
|
|
||||||
GREEN = "\033[32m"
|
|
||||||
YELLOW = "\033[33m"
|
|
||||||
CYAN = "\033[36m"
|
|
||||||
DIM = "\033[2m"
|
|
||||||
|
|
||||||
|
|
||||||
# Only for terminals and if not specified otherwise
|
|
||||||
USE_COLORS = sys.stdout.isatty() and "NO_COLOR" not in os.environ
|
|
||||||
|
|
||||||
|
|
||||||
def styled(text: str, color_code: str) -> str:
|
|
||||||
"""Apply color only if supported by the terminal."""
|
|
||||||
if USE_COLORS:
|
|
||||||
return f"{color_code}{text}{Style.RESET}"
|
|
||||||
return text
|
|
||||||
|
|
||||||
|
|
||||||
def log(msg: str, color: str | None = None, prefix="") -> None:
|
|
||||||
"""Print a formatted message."""
|
|
||||||
formatted_msg = styled(msg, color) if color else msg
|
|
||||||
print(f"{prefix}{formatted_msg}")
|
|
||||||
|
|
||||||
|
|
||||||
def error_exit(msg: str) -> NoReturn:
|
|
||||||
"""Print an error message in red and exit."""
|
|
||||||
log(f"❌ {msg}", Style.RED)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def is_docker_running() -> bool:
|
|
||||||
""" "Check if Docker is available and responsive."""
|
|
||||||
if shutil.which("docker") is None:
|
|
||||||
error_exit("Docker is not installed.")
|
|
||||||
|
|
||||||
result = subprocess.run(
|
|
||||||
["docker", "info"],
|
|
||||||
# Redirect stdout/stderr to keep output clean on success
|
|
||||||
stdout=subprocess.DEVNULL,
|
|
||||||
stderr=subprocess.DEVNULL,
|
|
||||||
# Prevent exception being raised
|
|
||||||
check=False,
|
|
||||||
)
|
|
||||||
return result.returncode == 0
|
|
||||||
|
|
||||||
|
|
||||||
def parse_env(content: str) -> dict[str, str]:
|
|
||||||
"""Parses existing keys and values into a dict (ignoring comments)."""
|
|
||||||
env_vars = {}
|
|
||||||
for raw_line in content.splitlines():
|
|
||||||
line = raw_line.strip()
|
|
||||||
if line and not line.startswith("#") and "=" in line:
|
|
||||||
key, value = line.split("=", 1)
|
|
||||||
env_vars[key.strip()] = value.strip()
|
|
||||||
|
|
||||||
return env_vars
|
|
||||||
|
|
||||||
|
|
||||||
def dump_env(content: str, data: dict[str, str]) -> str:
|
|
||||||
new_content: list[str] = []
|
|
||||||
processed_keys = set()
|
|
||||||
|
|
||||||
for raw_line in content.splitlines():
|
|
||||||
line = raw_line.strip()
|
|
||||||
# Fast line (empty, comment or not an assignation)
|
|
||||||
if len(line) == 0 or line.startswith("#") or "=" not in line:
|
|
||||||
new_content.append(raw_line)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Slow line (inline comment to be kept)
|
|
||||||
key_chunk, value_chunk = raw_line.split("=", 1)
|
|
||||||
key = key_chunk.strip()
|
|
||||||
|
|
||||||
# Not in the update list
|
|
||||||
if key not in data:
|
|
||||||
new_content.append(raw_line)
|
|
||||||
continue
|
|
||||||
|
|
||||||
processed_keys.add(key)
|
|
||||||
new_value = data[key]
|
|
||||||
|
|
||||||
if " #" not in value_chunk:
|
|
||||||
new_line = f"{key_chunk}={new_value}"
|
|
||||||
else:
|
|
||||||
_, comment = value_chunk.split(" #", 1)
|
|
||||||
new_line = f"{key_chunk}={new_value} #{comment}"
|
|
||||||
|
|
||||||
new_content.append(new_line)
|
|
||||||
|
|
||||||
for key, value in data.items():
|
|
||||||
if key not in processed_keys:
|
|
||||||
new_content.append(f"{key}={value}")
|
|
||||||
|
|
||||||
return "\n".join(new_content) + "\n"
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_env() -> None:
|
|
||||||
"""Manage .env lifecycle: creation, secret generation, prompts."""
|
|
||||||
env_path = Path(".env")
|
|
||||||
env_example_path = Path(".env.example")
|
|
||||||
updated: bool = False
|
|
||||||
|
|
||||||
# Read .env if exists
|
|
||||||
if env_path.exists():
|
|
||||||
content: str = env_path.read_text(encoding="utf-8")
|
|
||||||
else:
|
|
||||||
content: str = env_example_path.read_text(encoding="utf-8")
|
|
||||||
|
|
||||||
existing_vars: dict[str, str] = parse_env(content)
|
|
||||||
|
|
||||||
# Generate missing secrets
|
|
||||||
for key, length in KEYS_TO_GENERATE.items():
|
|
||||||
if key not in existing_vars or not existing_vars[key]:
|
|
||||||
log(f"Generating {key}...", Style.GREEN, prefix=" ")
|
|
||||||
existing_vars[key] = secrets.token_hex(length)
|
|
||||||
updated = True
|
|
||||||
log("Done", Style.GREEN, prefix=" ")
|
|
||||||
|
|
||||||
# Prompt for missing mandatory keys
|
|
||||||
color = Style.YELLOW if USE_COLORS else ""
|
|
||||||
reset = Style.RESET if USE_COLORS else ""
|
|
||||||
for key in REQUIRED_VARS:
|
|
||||||
if key not in existing_vars or not existing_vars[key]:
|
|
||||||
try:
|
|
||||||
existing_vars[key] = input(
|
|
||||||
f" {color}Enter value for {key}: {reset}"
|
|
||||||
).strip()
|
|
||||||
updated = True
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print()
|
|
||||||
error_exit("Aborted by user.")
|
|
||||||
|
|
||||||
# Write to disk
|
|
||||||
if updated:
|
|
||||||
# But backup original first
|
|
||||||
if env_path.exists():
|
|
||||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
||||||
backup_path = Path(f"{env_path}.{timestamp}.bak")
|
|
||||||
shutil.copy(env_path, backup_path)
|
|
||||||
log(f"Backup created: {backup_path}", Style.DIM)
|
|
||||||
|
|
||||||
new_content = dump_env(content, existing_vars)
|
|
||||||
env_path.write_text(new_content, encoding="utf-8")
|
|
||||||
log(".env updated successfully.", Style.GREEN)
|
|
||||||
else:
|
|
||||||
log("Configuration is up to date.", Style.GREEN)
|
|
||||||
|
|
||||||
|
|
||||||
def setup() -> None:
|
|
||||||
"""Orchestrate initialization."""
|
|
||||||
is_docker_running()
|
|
||||||
ensure_env()
|
|
||||||
|
|
||||||
|
|
||||||
def status() -> None:
|
|
||||||
"""Display simple dashboard."""
|
|
||||||
# Hardcoded bold style for title if colors are enabled
|
|
||||||
title_style = Style.BOLD if USE_COLORS else ""
|
|
||||||
reset_style = Style.RESET if USE_COLORS else ""
|
|
||||||
|
|
||||||
print(f"\n{title_style}ALFRED STATUS{reset_style}")
|
|
||||||
print(f"{title_style}==============={reset_style}\n")
|
|
||||||
|
|
||||||
# Docker Check
|
|
||||||
if is_docker_running():
|
|
||||||
print(f" Docker: {styled('✓ running', Style.GREEN)}")
|
|
||||||
else:
|
|
||||||
print(f" Docker: {styled('✗ stopped', Style.RED)}")
|
|
||||||
|
|
||||||
# Env Check
|
|
||||||
if Path(".env").exists():
|
|
||||||
print(f" .env: {styled('✓ present', Style.GREEN)}")
|
|
||||||
else:
|
|
||||||
print(f" .env: {styled('✗ missing', Style.RED)}")
|
|
||||||
|
|
||||||
print("")
|
|
||||||
|
|
||||||
|
|
||||||
def check() -> None:
|
|
||||||
"""Silent check for prerequisites (used by 'make up')."""
|
|
||||||
setup()
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
|
||||||
if len(sys.argv) < 2:
|
|
||||||
print("Usage: python cli.py [setup|check|status]")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
cmd = sys.argv[1]
|
|
||||||
|
|
||||||
if cmd == "setup":
|
|
||||||
setup()
|
|
||||||
elif cmd == "check":
|
|
||||||
check()
|
|
||||||
elif cmd == "status":
|
|
||||||
status()
|
|
||||||
else:
|
|
||||||
error_exit(f"Unknown command: {cmd}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
import re
|
|
||||||
import secrets
|
import secrets
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import tomllib
|
import tomllib
|
||||||
|
from config_loader import load_build_config, write_env_make
|
||||||
|
|
||||||
|
|
||||||
def generate_secret(rule: str) -> str:
|
def generate_secret(rule: str) -> str:
|
||||||
@@ -31,6 +31,8 @@ def extract_python_version(version_string: str) -> tuple[str, str]:
|
|||||||
"~3.14.2" -> ("3.14.2", "3.14")
|
"~3.14.2" -> ("3.14.2", "3.14")
|
||||||
"3.14.2" -> ("3.14.2", "3.14")
|
"3.14.2" -> ("3.14.2", "3.14")
|
||||||
"""
|
"""
|
||||||
|
import re # noqa: PLC0415
|
||||||
|
|
||||||
# Remove poetry version operators (==, ^, ~, >=, etc.)
|
# Remove poetry version operators (==, ^, ~, >=, etc.)
|
||||||
clean_version = re.sub(r"^[=^~><]+", "", version_string.strip())
|
clean_version = re.sub(r"^[=^~><]+", "", version_string.strip())
|
||||||
|
|
||||||
@@ -148,7 +150,9 @@ def bootstrap(): # noqa: PLR0912, PLR0915
|
|||||||
elif key == "ALFRED_VERSION":
|
elif key == "ALFRED_VERSION":
|
||||||
if existing_env.get(key) != alfred_version:
|
if existing_env.get(key) != alfred_version:
|
||||||
new_lines.append(f"{key}={alfred_version}\n")
|
new_lines.append(f"{key}={alfred_version}\n")
|
||||||
print(f" ↻ Updated Alfred version: {existing_env.get(key, 'N/A')} → {alfred_version}")
|
print(
|
||||||
|
f" ↻ Updated Alfred version: {existing_env.get(key, 'N/A')} → {alfred_version}"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
new_lines.append(f"{key}={alfred_version}\n")
|
new_lines.append(f"{key}={alfred_version}\n")
|
||||||
print(f" ↻ Kept Alfred version: {alfred_version}")
|
print(f" ↻ Kept Alfred version: {alfred_version}")
|
||||||
@@ -224,20 +228,10 @@ def bootstrap(): # noqa: PLR0912, PLR0915
|
|||||||
f.writelines(new_lines)
|
f.writelines(new_lines)
|
||||||
print(f"\n✅ {env_path.name} generated successfully.")
|
print(f"\n✅ {env_path.name} generated successfully.")
|
||||||
|
|
||||||
# Generate .env.make for Makefile
|
# Generate .env.make for Makefile using shared config loader
|
||||||
env_make_path = base_dir / ".env.make"
|
config = load_build_config(base_dir)
|
||||||
with open(env_make_path, "w", encoding="utf-8") as f:
|
write_env_make(config, base_dir)
|
||||||
f.write("# Auto-generated from pyproject.toml by bootstrap.py\n")
|
print("✅ .env.make generated for Makefile.")
|
||||||
f.write(f"export ALFRED_VERSION={alfred_version}\n")
|
|
||||||
f.write(f"export PYTHON_VERSION={python_version_full}\n")
|
|
||||||
f.write(f"export PYTHON_VERSION_SHORT={python_version_short}\n")
|
|
||||||
f.write(f"export RUNNER={settings_keys['runner']}\n")
|
|
||||||
f.write(f"export IMAGE_NAME={settings_keys['image_name']}\n")
|
|
||||||
f.write(f"export SERVICE_NAME={settings_keys['service_name']}\n")
|
|
||||||
f.write(f"export LIBRECHAT_VERSION={settings_keys['librechat_version']}\n")
|
|
||||||
f.write(f"export RAG_VERSION={settings_keys['rag_version']}\n")
|
|
||||||
|
|
||||||
print(f"✅ {env_make_path.name} generated for Makefile.")
|
|
||||||
print("\n⚠️ Reminder: Please manually add your API keys to the .env file.")
|
print("\n⚠️ Reminder: Please manually add your API keys to the .env file.")
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
89
scripts/config_loader.py
Normal file
89
scripts/config_loader.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
"""Shared configuration loader for bootstrap and CI."""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import NamedTuple
|
||||||
|
|
||||||
|
import tomllib
|
||||||
|
|
||||||
|
|
||||||
|
class BuildConfig(NamedTuple):
|
||||||
|
"""Build configuration extracted from pyproject.toml."""
|
||||||
|
|
||||||
|
alfred_version: str
|
||||||
|
python_version: str
|
||||||
|
python_version_short: str
|
||||||
|
runner: str
|
||||||
|
image_name: str
|
||||||
|
service_name: str
|
||||||
|
librechat_version: str
|
||||||
|
rag_version: str
|
||||||
|
|
||||||
|
|
||||||
|
def extract_python_version(version_string: str) -> tuple[str, str]:
|
||||||
|
"""
|
||||||
|
Extract Python version from poetry dependency string.
|
||||||
|
Examples:
|
||||||
|
"==3.14.2" -> ("3.14.2", "3.14")
|
||||||
|
"^3.14.2" -> ("3.14.2", "3.14")
|
||||||
|
"~3.14.2" -> ("3.14.2", "3.14")
|
||||||
|
"3.14.2" -> ("3.14.2", "3.14")
|
||||||
|
"""
|
||||||
|
clean_version = re.sub(r"^[=^~><]+", "", version_string.strip())
|
||||||
|
parts = clean_version.split(".")
|
||||||
|
|
||||||
|
if len(parts) >= 2:
|
||||||
|
full_version = clean_version
|
||||||
|
short_version = f"{parts[0]}.{parts[1]}"
|
||||||
|
return full_version, short_version
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Invalid Python version format: {version_string}")
|
||||||
|
|
||||||
|
|
||||||
|
def load_build_config(base_dir: Path | None = None) -> BuildConfig:
|
||||||
|
"""Load build configuration from pyproject.toml."""
|
||||||
|
if base_dir is None:
|
||||||
|
base_dir = Path(__file__).resolve().parent.parent
|
||||||
|
|
||||||
|
toml_path = base_dir / "pyproject.toml"
|
||||||
|
if not toml_path.exists():
|
||||||
|
raise FileNotFoundError(f"pyproject.toml not found: {toml_path}")
|
||||||
|
|
||||||
|
with open(toml_path, "rb") as f:
|
||||||
|
data = tomllib.load(f)
|
||||||
|
settings_keys = data["tool"]["alfred"]["settings"]
|
||||||
|
dependencies = data["tool"]["poetry"]["dependencies"]
|
||||||
|
alfred_version = data["tool"]["poetry"]["version"]
|
||||||
|
|
||||||
|
python_version_full, python_version_short = extract_python_version(
|
||||||
|
dependencies["python"]
|
||||||
|
)
|
||||||
|
|
||||||
|
return BuildConfig(
|
||||||
|
alfred_version=alfred_version,
|
||||||
|
python_version=python_version_full,
|
||||||
|
python_version_short=python_version_short,
|
||||||
|
runner=settings_keys["runner"],
|
||||||
|
image_name=settings_keys["image_name"],
|
||||||
|
service_name=settings_keys["service_name"],
|
||||||
|
librechat_version=settings_keys["librechat_version"],
|
||||||
|
rag_version=settings_keys["rag_version"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def write_env_make(config: BuildConfig, base_dir: Path | None = None) -> None:
|
||||||
|
"""Write .env.make file for Makefile."""
|
||||||
|
if base_dir is None:
|
||||||
|
base_dir = Path(__file__).resolve().parent.parent
|
||||||
|
|
||||||
|
env_make_path = base_dir / ".env.make"
|
||||||
|
with open(env_make_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write("# Auto-generated from pyproject.toml\n")
|
||||||
|
f.write(f"export ALFRED_VERSION={config.alfred_version}\n")
|
||||||
|
f.write(f"export PYTHON_VERSION={config.python_version}\n")
|
||||||
|
f.write(f"export PYTHON_VERSION_SHORT={config.python_version_short}\n")
|
||||||
|
f.write(f"export RUNNER={config.runner}\n")
|
||||||
|
f.write(f"export IMAGE_NAME={config.image_name}\n")
|
||||||
|
f.write(f"export SERVICE_NAME={config.service_name}\n")
|
||||||
|
f.write(f"export LIBRECHAT_VERSION={config.librechat_version}\n")
|
||||||
|
f.write(f"export RAG_VERSION={config.rag_version}\n")
|
||||||
22
scripts/generate_build_vars.py
Normal file
22
scripts/generate_build_vars.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Generate .env.make for CI/CD without generating secrets."""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from config_loader import load_build_config, write_env_make
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Generate .env.make from pyproject.toml."""
|
||||||
|
try:
|
||||||
|
config = load_build_config()
|
||||||
|
write_env_make(config)
|
||||||
|
print("✅ .env.make generated successfully.")
|
||||||
|
return 0
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Failed to generate .env.make: {e}")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
||||||
Reference in New Issue
Block a user