infra: added orchestration and app bootstrap (.env)

This commit is contained in:
2026-01-01 04:06:55 +01:00
parent 7236730aea
commit 08313eaa9b
7 changed files with 522 additions and 161 deletions

View File

@@ -22,8 +22,7 @@ venv
.venv .venv
env env
.env .env
.env.* .env-
# IDE # IDE
.vscode .vscode
.idea .idea

View File

@@ -1,53 +1,92 @@
# Configuration MAX_HISTORY_MESSAGES=10
LIBRECHAT_VERSION=v0.8.1 MAX_TOOL_ITERATIONS=10
RAG_VERSION=v0.7.0 REQUEST_TIMEOUT=30
# Keys # LLM Settings
# - Deepseek API LLM_TEMPERATURE=0.2
DEEPSEEK_API_KEY=
# - Google API # Persistence
GOOGLE_API_KEY= DATA_STORAGE_DIR=data
#GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite,gemini-2.0-flash,gemini-2.0-flash-lite #TODO:Update models
# - Anthropic API # Network configuration
ANTHROPIC_API_KEY= HOST=0.0.0.0
PORT=3080
# - Kimi API # Build informations (Synced with pyproject.toml via bootstrap)
KIMI_API_KEY= IMAGE_NAME=
LIBRECHAT_VERSION=
PYTHON_VERSION=
PYTHON_VERSION_SHORT=
RAG_VERSION=
RUNNER=
SERVICE_NAME=
# - ChatGPT/Open API # --- SECURITY KEYS (CRITICAL) ---
OPENAI_API_KEY= # These are used for session tokens and encrypting sensitive data in MongoDB.
# If you lose these, you lose access to encrypted stored credentials.
# - Themoviedb.org API (media metadata)
TMDB_API_KEY=
# - Security keys
JWT_SECRET= JWT_SECRET=
JWT_REFRESH_SECRET= JWT_REFRESH_SECRET=
CREDS_KEY= CREDS_KEY=
CREDS_IV= CREDS_IV=
# Local LLM # --- DATABASES (AUTO-SECURED) ---
OLLAMA_BASE_URL= # Alfred uses MongoDB for application state and PostgreSQL for Vector RAG.
OLLAMA_MODEL= # Passwords will be generated as 24-character secure tokens if left blank.
# Alfred Configuration # MongoDB (Application Data)
LLM_PROVIDER=deepseek MONGO_URI=
MONGO_HOST=mongodb
MONGO_PORT=27017
MONGO_USER=alfred
MONGO_PASSWORD=
MONGO_DB_NAME=alfred
# Memory storage directory (inside container) # PostgreSQL (Vector Database / RAG)
MEMORY_STORAGE_DIR=/data/memory POSTGRES_URI=
POSTGRES_HOST=vectordb
# qBittorrent Configuration POSTGRES_PORT=5432
QBITTORRENT_URL= POSTGRES_USER=alfred
QBITTORRENT_USERNAME=admin
QBITTORRENT_PASSWORD=adminadmin
# Debug Options
DEBUG_LOGGING=false
DEBUG_CONSOLE=false
# Postgres (RAG)
POSTGRES_DB=
POSTGRES_USER=
POSTGRES_PASSWORD= POSTGRES_PASSWORD=
POSTGRES_DB_NAME=alfred
# --- EXTERNAL SERVICES ---
# Media Metadata (Required)
# Get your key at https://www.themoviedb.org/
TMDB_API_KEY=
TMDB_BASE_URL=https://api.themoviedb.org/3
# qBittorrent integration
QBITTORRENT_URL=http://qbittorrent:16140
QBITTORRENT_USERNAME=admin
QBITTORRENT_PASSWORD=
QBITTORRENT_PORT=16140
# Meilisearch
MEILI_ENABLED=FALSE
MEILI_NO_ANALYTICS=TRUE
MEILI_HOST=http://meilisearch:7700
MEILI_MASTER_KEY=
# --- LLM CONFIGURATION ---
# Providers: 'local', 'openai', 'anthropic', 'deepseek', 'google', 'kimi'
DEFAULT_LLM_PROVIDER=local
# Local LLM (Ollama)
OLLAMA_BASE_URL=http://ollama:11434
OLLAMA_MODEL=llama3.3:latest
# --- API KEYS (OPTIONAL) ---
# Fill only the ones you intend to use.
ANTHROPIC_API_KEY=
DEEPSEEK_API_KEY=
GOOGLE_API_KEY=
KIMI_API_KEY=
OPENAI_API_KEY=
# --- RAG ENGINE ---
# Enable/Disable the Retrieval Augmented Generation system
RAG_ENABLED=TRUE
RAG_API_URL=http://rag_api:8000
RAG_API_PORT=8000
EMBEDDINGS_PROVIDER=ollama
EMBEDDINGS_MODEL=nomic-embed-text

View File

@@ -43,6 +43,9 @@ RUN --mount=type=cache,target=/root/.cache/pip \
uv pip install --system -r pyproject.toml; \ uv pip install --system -r pyproject.toml; \
fi fi
COPY scripts/ ./scripts/
COPY .env.example ./
# =========================================== # ===========================================
# Stage 2: Testing # Stage 2: Testing
# =========================================== # ===========================================
@@ -60,7 +63,8 @@ RUN --mount=type=cache,target=/root/.cache/pip \
fi fi
COPY alfred/ ./alfred COPY alfred/ ./alfred
COPY tests/ ./tests COPY scripts ./scripts
COPY tests/ ./tests
# =========================================== # ===========================================
# Stage 3: Runtime # Stage 3: Runtime
@@ -69,10 +73,11 @@ FROM python:${PYTHON_VERSION}-slim-bookworm AS runtime
ARG PYTHON_VERSION_SHORT ARG PYTHON_VERSION_SHORT
# TODO: A-t-on encore besoin de toutes les clés ?
ENV LLM_PROVIDER=deepseek \ ENV LLM_PROVIDER=deepseek \
MEMORY_STORAGE_DIR=/data/memory \ MEMORY_STORAGE_DIR=/data/memory \
PYTHONDONTWRITEBYTECODE=1 \ PYTHONDONTWRITEBYTECODE=1 \
PYTHONPATH=/home/appuser/app \ PYTHONPATH=/home/appuser \
PYTHONUNBUFFERED=1 PYTHONUNBUFFERED=1
# Install runtime dependencies (needs root) # Install runtime dependencies (needs root)
@@ -85,8 +90,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
RUN useradd -m -u 1000 -s /bin/bash appuser RUN useradd -m -u 1000 -s /bin/bash appuser
# Create data directories (needs root for /data) # Create data directories (needs root for /data)
RUN mkdir -p /data/memory /data/logs \ RUN mkdir -p /data /logs \
&& chown -R appuser:appuser /data && chown -R appuser:appuser /data /logs
# Switch to non-root user # Switch to non-root user
USER appuser USER appuser
@@ -100,9 +105,12 @@ COPY --from=builder /usr/local/bin /usr/local/bin
# Copy application code (already owned by appuser) # Copy application code (already owned by appuser)
COPY --chown=appuser:appuser alfred/ ./alfred COPY --chown=appuser:appuser alfred/ ./alfred
COPY --chown=appuser:appuser scripts/ ./scripts
COPY --chown=appuser:appuser .env.example ./
COPY --chown=appuser:appuser pyproject.toml ./
# Create volumes for persistent data # Create volumes for persistent data
VOLUME ["/data/memory", "/data/logs"] VOLUME ["/data", "/logs"]
# Expose port # Expose port
EXPOSE 8000 EXPOSE 8000
@@ -111,5 +119,4 @@ EXPOSE 8000
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD python -c "import requests; requests.get('http://localhost:8000/health', timeout=5).raise_for_status()" || exit 1 CMD python -c "import requests; requests.get('http://localhost:8000/health', timeout=5).raise_for_status()" || exit 1
# Run the application CMD ["python", "-m", "uvicorn", "alfred.app:app", "--host", "0.0.0.0", "--port", "8000"]
CMD ["python", "-m", "uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8000"]

115
Makefile
View File

@@ -1,48 +1,37 @@
.DEFAULT_GOAL := help .DEFAULT_GOAL := help
# --- Config --- # --- Load Config from pyproject.toml ---
export IMAGE_NAME := alfred_media_organizer -include .env.make
export LIBRECHAT_VERSION := v0.8.1
export PYTHON_VERSION := 3.14.2 # --- Profiles management ---
export PYTHON_VERSION_SHORT := 3.14 # Usage: make up p=rag,meili
export RAG_VERSION := v0.7.0 p ?= core
export RUNNER := poetry PROFILES_PARAM := COMPOSE_PROFILES=$(p)
export SERVICE_NAME := alfred
# --- Commands --- # --- Commands ---
CLI := python3 cli.py
DOCKER_COMPOSE := docker compose DOCKER_COMPOSE := docker compose
DOCKER_BUILD := docker build \ DOCKER_BUILD := docker build --no-cache \
--build-arg PYTHON_VERSION=$(PYTHON_VERSION) \ --build-arg PYTHON_VERSION=$(PYTHON_VERSION) \
--build-arg PYTHON_VERSION_SHORT=$(PYTHON_VERSION_SHORT) \ --build-arg PYTHON_VERSION_SHORT=$(PYTHON_VERSION_SHORT) \
--build-arg RUNNER=$(RUNNER) --build-arg RUNNER=$(RUNNER)
# --- Phony --- # --- Phony ---
.PHONY: setup status check .PHONY: .env up down restart logs ps shell build build-test install update \
.PHONY: up down restart logs ps shell install-hooks test coverage lint format clean major minor patch help
.PHONY: build build-test
.PHONY: install update install-hooks
.PHONY: test coverage lint format clean prune
.PHONY: major minor patch
.PHONY: help
# --- Setup --- # --- Setup ---
setup: .env .env.make:
@echo "Initializing environment..." @echo "Initializing environment..."
@$(CLI) setup \ @python scripts/bootstrap.py \
&& echo "✓ Environment ready" \ && echo "✓ Environment ready" \
|| (echo "✗ Setup failed" && exit 1) || (echo "✗ Environment setup failed" && exit 1)
status: bootstrap: .env .env.make
@$(CLI) status
check:
@$(CLI) check
# --- Docker --- # --- Docker ---
up: check up: .env
@echo "Starting containers..." @echo "Starting containers with profiles: [$(p)]..."
@$(DOCKER_COMPOSE) up -d --remove-orphans \ @$(PROFILES_PARAM) $(DOCKER_COMPOSE) up -d --remove-orphans \
&& echo "✓ Containers started" \ && echo "✓ Containers started" \
|| (echo "✗ Failed to start containers" && exit 1) || (echo "✗ Failed to start containers" && exit 1)
@@ -54,30 +43,30 @@ down:
restart: restart:
@echo "Restarting containers..." @echo "Restarting containers..."
@$(DOCKER_COMPOSE) restart \ @$(PROFILES_PARAM) $(DOCKER_COMPOSE) restart \
&& echo "✓ Containers restarted" \ && echo "✓ Containers restarted" \
|| (echo "✗ Failed to restart containers" && exit 1) || (echo "✗ Failed to restart containers" && exit 1)
logs: logs:
@echo "Following logs (Ctrl+C to exit)..." @echo "Following logs (Ctrl+C to exit)..."
@$(DOCKER_COMPOSE) logs -f @$(PROFILES_PARAM) $(DOCKER_COMPOSE) logs -f
ps: ps:
@echo "Container status:" @echo "Container status:"
@$(DOCKER_COMPOSE) ps @$(PROFILES_PARAM) $(DOCKER_COMPOSE) ps
shell: shell:
@echo "Opening shell in $(SERVICE_NAME)..." @echo "Opening shell in $(SERVICE_NAME)..."
@$(DOCKER_COMPOSE) exec $(SERVICE_NAME) /bin/bash @$(DOCKER_COMPOSE) exec $(SERVICE_NAME) /bin/bash
# --- Build --- # --- Build ---
build: check build: .env.make
@echo "Building image $(IMAGE_NAME):latest ..." @echo "Building image $(IMAGE_NAME):latest ..."
@$(DOCKER_BUILD) -t $(IMAGE_NAME):latest . \ @$(DOCKER_BUILD) -t $(IMAGE_NAME):latest . \
&& echo "✓ Build complete" \ && echo "✓ Build complete" \
|| (echo "✗ Build failed" && exit 1) || (echo "✗ Build failed" && exit 1)
build-test: check build-test: .env.make
@echo "Building test image $(IMAGE_NAME):test..." @echo "Building test image $(IMAGE_NAME):test..."
@$(DOCKER_BUILD) --target test -t $(IMAGE_NAME):test . \ @$(DOCKER_BUILD) --target test -t $(IMAGE_NAME):test . \
&& echo "✓ Test image built" \ && echo "✓ Test image built" \
@@ -90,18 +79,18 @@ install:
&& echo "✓ Dependencies installed" \ && echo "✓ Dependencies installed" \
|| (echo "✗ Installation failed" && exit 1) || (echo "✗ Installation failed" && exit 1)
update:
@echo "Updating dependencies with $(RUNNER)..."
@$(RUNNER) update \
&& echo "✓ Dependencies updated" \
|| (echo "✗ Update failed" && exit 1)
install-hooks: install-hooks:
@echo "Installing pre-commit hooks..." @echo "Installing pre-commit hooks..."
@$(RUNNER) run pre-commit install \ @$(RUNNER) run pre-commit install \
&& echo "✓ Hooks installed" \ && echo "✓ Hooks installed" \
|| (echo "✗ Hook installation failed" && exit 1) || (echo "✗ Hook installation failed" && exit 1)
update:
@echo "Updating dependencies with $(RUNNER)..."
@$(RUNNER) update \
&& echo "✓ Dependencies updated" \
|| (echo "✗ Update failed" && exit 1)
# --- Quality --- # --- Quality ---
test: test:
@echo "Running tests..." @echo "Running tests..."
@@ -133,12 +122,6 @@ clean:
@find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true @find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true
@echo "✓ Cleanup complete" @echo "✓ Cleanup complete"
prune:
@echo "Pruning Docker system..."
@docker system prune -af \
&& echo "✓ Docker pruned" \
|| (echo "✗ Prune failed" && exit 1)
# --- Versioning --- # --- Versioning ---
major minor patch: _check-main major minor patch: _check-main
@echo "Bumping $@ version..." @echo "Bumping $@ version..."
@@ -151,6 +134,7 @@ major minor patch: _check-main
&& echo "✓ Tags pushed" \ && echo "✓ Tags pushed" \
|| (echo "✗ Push failed" && exit 1) || (echo "✗ Push failed" && exit 1)
# CI/CD helpers
_ci-dump-config: _ci-dump-config:
@echo "image_name=$(IMAGE_NAME)" @echo "image_name=$(IMAGE_NAME)"
@echo "python_version=$(PYTHON_VERSION)" @echo "python_version=$(PYTHON_VERSION)"
@@ -173,31 +157,26 @@ _check-main:
# --- Help --- # --- Help ---
help: help:
@echo "Usage: make [target]" @echo "Cleverly Crafted Unawareness - Management Commands"
@echo "" @echo ""
@echo "Setup:" @echo "Usage: make [target] [p=profile1,profile2]"
@echo " setup Initialize .env"
@echo " status Show project status"
@echo "" @echo ""
@echo "Docker:" @echo "Docker:"
@echo " up Start containers" @echo " up Start containers (default profile: core)"
@echo " down Stop containers" @echo " Example: make up p=rag,meili"
@echo " restart Restart containers" @echo " down Stop all containers"
@echo " logs Follow logs" @echo " restart Restart containers (supports p=...)"
@echo " ps Container status" @echo " logs Follow logs (supports p=...)"
@echo " shell Shell into container" @echo " ps Status of containers"
@echo " build Build image" @echo " shell Open bash in the core container"
@echo " build Build the production Docker image"
@echo "" @echo ""
@echo "Dev:" @echo "Dev & Quality:"
@echo " install Install dependencies" @echo " setup Bootstrap .env and security keys"
@echo " update Update dependencies" @echo " install Install dependencies via $(RUNNER)"
@echo " test Run tests" @echo " test Run pytest suite"
@echo " coverage Run tests with coverage" @echo " coverage Run tests and generate HTML report"
@echo " lint Lint code" @echo " lint/format Quality and style checks"
@echo " format Format code"
@echo " clean Clean artifacts"
@echo "" @echo ""
@echo "Release:" @echo "Release:"
@echo " patch Bump patch version" @echo " major|minor|patch Bump version and push tags (main branch only)"
@echo " minor Bump minor version"
@echo " major Bump major version"

0
alfred/__init__.py Normal file
View File

View File

@@ -1,4 +1,20 @@
services: services:
# - CORE SERVICES -
# --- .ENV INIT ---
alfred-init:
container_name: alfred-init
build:
context: .
target: builder
args:
PYTHON_VERSION: ${PYTHON_VERSION}
PYTHON_VERSION_SHORT: ${PYTHON_VERSION_SHORT}
RUNNER: ${RUNNER}
command: python scripts/bootstrap.py
networks:
- alfred-net
# --- MAIN APPLICATION ---
alfred: alfred:
container_name: alfred-core container_name: alfred-core
build: build:
@@ -8,47 +24,38 @@ services:
PYTHON_VERSION_SHORT: ${PYTHON_VERSION_SHORT} PYTHON_VERSION_SHORT: ${PYTHON_VERSION_SHORT}
RUNNER: ${RUNNER} RUNNER: ${RUNNER}
depends_on: depends_on:
- librechat alfred-init:
condition: service_completed_successfully
restart: unless-stopped restart: unless-stopped
env_file: env_file:
- .env - path: .env
environment: required: true
# LLM Configuration
LLM_PROVIDER: ${LLM_PROVIDER:-deepseek}
DEEPSEEK_API_KEY: ${DEEPSEEK_API_KEY:-}
# Memory storage
MEMORY_STORAGE_DIR: /data/memory
# External services
TMDB_API_KEY: ${TMDB_API_KEY:-}
QBITTORRENT_URL: ${QBITTORRENT_URL:-}
QBITTORRENT_USERNAME: ${QBITTORRENT_USERNAME:-}
QBITTORRENT_PASSWORD: ${QBITTORRENT_PASSWORD:-}
volumes: volumes:
- ./data/memory:/data/memory - ./data:/data
- ./logs:/data/logs - ./logs:/logs
# TODO: Development: mount code for hot reload (comment out in production) # TODO: Hot reload (comment out in production)
# - ./alfred:/app/alfred #- ./alfred:/home/appuser/alfred
networks:
- alfred-net
# --- FRONTEND LIBRECHAT ---
librechat: librechat:
container_name: alfred-librechat container_name: alfred-librechat
image: ghcr.io/danny-avila/librechat:${LIBRECHAT_VERSION} image: ghcr.io/danny-avila/librechat:${LIBRECHAT_VERSION}
depends_on: depends_on:
- mongodb alfred-init:
- meilisearch condition: service_completed_successfully
- rag_api mongodb:
condition: service_healthy
restart: unless-stopped restart: unless-stopped
env_file: env_file:
- .env - path: .env
required: true
environment: environment:
- HOST=0.0.0.0 # Remap value name
- MONGO_URI=mongodb://mongodb:27017/LibreChat - SEARCH=${MEILI_ENABLED}
- MEILI_HOST=http://meilisearch:7700
- RAG_PORT=${RAG_PORT:-8000}
- RAG_API_URL=http://rag_api:${RAG_PORT:-8000}
ports: ports:
- "${LIBRECHAT_PORT:-3080}:3080" - "${PORT}:${PORT}"
volumes: volumes:
- ./data/librechat/images:/app/client/public/images - ./data/librechat/images:/app/client/public/images
- ./data/librechat/uploads:/app/client/uploads - ./data/librechat/uploads:/app/client/uploads
@@ -56,47 +63,144 @@ services:
# Mount custom endpoint # Mount custom endpoint
- ./librechat/manifests:/app/manifests:ro - ./librechat/manifests:/app/manifests:ro
- ./librechat/librechat.yaml:/app/librechat.yaml:ro - ./librechat/librechat.yaml:/app/librechat.yaml:ro
networks:
- alfred-net
# --- DATABASE #1 - APP STATE ---
mongodb: mongodb:
container_name: alfred-mongodb container_name: alfred-mongodb
image: mongo:latest image: mongo:latest
restart: unless-stopped restart: unless-stopped
depends_on:
alfred-init:
condition: service_completed_successfully
env_file:
- path: .env
required: true
environment:
# Remap value name
- MONGO_INITDB_ROOT_USERNAME=${MONGO_USER}
- MONGO_INITDB_ROOT_PASSWORD=${MONGO_PASSWORD}
ports:
- "${MONGO_PORT}:${MONGO_PORT}"
volumes: volumes:
- ./data/mongo:/data/db - ./data/mongo:/data/db
command: mongod --noauth command: mongod --quiet --setParameter logComponentVerbosity='{"network":{"verbosity":0}}'
healthcheck:
test: |
mongosh --quiet --eval "db.adminCommand('ping')" || \
mongosh --quiet -u "${MONGO_USER}" -p "${MONGO_PASSWORD}" --authenticationDatabase admin --eval "db.adminCommand('ping')"
interval: 10s
timeout: 5s
retries: 5
networks:
- alfred-net
# --- OLLAMA - LOCAL LLM ENGINE ---
ollama:
image: ollama/ollama:latest
container_name: alfred-ollama
depends_on:
alfred-init:
condition: service_completed_successfully
restart: unless-stopped
env_file:
- path: .env
required: true
volumes:
- ./data/ollama:/root/.ollama
networks:
- alfred-net
# - OPTIONAL SERVICES -
# --- SEARCH ENGINE SUPER FAST (Optional) ---
meilisearch: meilisearch:
container_name: alfred-meilisearch container_name: alfred-meilisearch
image: getmeili/meilisearch:v1.12.3 image: getmeili/meilisearch:v1.12.3
depends_on:
alfred-init:
condition: service_completed_successfully
restart: unless-stopped restart: unless-stopped
environment: env_file:
- MEILI_NO_ANALYTICS=true - path: .env
required: true
volumes: volumes:
- ./data/meili:/meili_data - ./data/meilisearch:/meili_data
#profiles: ["meili", "full"] profiles: ["meili", "full"]
networks:
- alfred-net
# --- RETRIEVAL AUGMENTED GENERATION SYSTEM (Optional) ---
rag_api: rag_api:
container_name: alfred-rag container_name: alfred-rag
image: ghcr.io/danny-avila/librechat-rag-api-dev-lite:${RAG_VERSION} image: ghcr.io/danny-avila/librechat-rag-api-dev-lite:${RAG_VERSION}
depends_on:
alfred-init:
condition: service_completed_successfully
vectordb:
condition: service_healthy
restart: unless-stopped restart: unless-stopped
env_file: env_file:
- .env - path: .env
environment: required: true
- DB_HOST=vectordb
- DB_PORT=5432
- RAG_PORT=${RAG_PORT:-8000}
ports: ports:
- "${RAG_PORT:-8000}:${RAG_PORT:-8000}" - "${RAG_API_PORT}:${RAG_API_PORT}"
#profiles: ["rag", "full"] volumes:
- ./data/rag/uploads:/app/uploads
profiles: ["rag", "full"]
networks:
- alfred-net
# --- DATABASE #2 - Vector RAG (Optional) ---
vectordb: vectordb:
container_name: alfred-vectordb container_name: alfred-vectordb
image: pgvector/pgvector:0.8.0-pg16-bookworm image: pgvector/pgvector:0.8.0-pg16-bookworm
depends_on:
alfred-init:
condition: service_completed_successfully
restart: unless-stopped restart: unless-stopped
env_file: env_file:
- .env - path: .env
required: true
ports: ports:
- "${VECTOR_DB_PORT:-5432}:5432" - "${POSTGRES_PORT}:${POSTGRES_PORT}"
volumes: volumes:
- ./data/vectordb:/var/lib/postgresql/data - ./data/vectordb:/var/lib/postgresql/data
#profiles: ["rag", "full"] profiles: ["rag", "full"]
healthcheck:
test: [ "CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-alfred} -d ${POSTGRES_DB_NAME:-alfred}" ]
interval: 5s
timeout: 5s
retries: 5
networks:
- alfred-net
# --- QBITTORENT (Optional) ---
qbittorrent:
image: lscr.io/linuxserver/qbittorrent:latest
container_name: alfred-qbittorrent
depends_on:
alfred-init:
condition: service_completed_successfully
restart: unless-stopped
env_file:
- path: .env
required: true
environment:
- PUID=1000
- PGID=1000
- TZ=Europe/Paris
- WEBUI_PORT=${QBITTORRENT_PORT}
volumes:
- ./data/qbittorrent/config:/config
- ./data/qbittorrent/downloads:/downloads
profiles: ["qbittorrent", "full"]
ports:
- "${QBITTORRENT_PORT}:${QBITTORRENT_PORT}"
networks:
- alfred-net
networks:
alfred-net:
name: alfred-internal
driver: bridge

233
scripts/bootstrap.py Normal file
View File

@@ -0,0 +1,233 @@
import re
import secrets
from pathlib import Path
import tomllib
def generate_secret(rule: str) -> str:
"""
Generates a cryptographically secure secret based on a spec string.
Example specs: '32:b64', '16:hex'.
"""
chunks: list[str] = rule.split(":")
size: int = int(chunks[0])
tech: str = chunks[1]
if tech == "b64":
return secrets.token_urlsafe(size)
elif tech == "hex":
return secrets.token_hex(size)
else:
raise ValueError(f"Invalid security format: {tech}")
def extract_python_version(version_string: str) -> tuple[str, str]:
"""
Extract Python version from poetry dependency string.
Examples:
"==3.14.2" -> ("3.14.2", "3.14")
"^3.14.2" -> ("3.14.2", "3.14")
"~3.14.2" -> ("3.14.2", "3.14")
"3.14.2" -> ("3.14.2", "3.14")
"""
# Remove poetry version operators (==, ^, ~, >=, etc.)
clean_version = re.sub(r"^[=^~><]+", "", version_string.strip())
# Extract version parts
parts = clean_version.split(".")
if len(parts) >= 2:
full_version = clean_version
short_version = f"{parts[0]}.{parts[1]}"
return full_version, short_version
else:
raise ValueError(f"Invalid Python version format: {version_string}")
# TODO: Refactor
def bootstrap(): # noqa: PLR0912, PLR0915
"""
Initializes the .env file by merging .env.example with generated secrets
and build variables from pyproject.toml.
Also generates .env.make for Makefile.
ALWAYS preserves existing secrets!
"""
base_dir = Path(__file__).resolve().parent.parent
env_path = base_dir / ".env"
example_path = base_dir / ".env.example"
if not example_path.exists():
print(f"{example_path.name} not found.")
return
toml_path = base_dir / "pyproject.toml"
if not toml_path.exists():
print(f"{toml_path.name} not found.")
return
# ALWAYS load existing .env if it exists
existing_env = {}
if env_path.exists():
print("🔄 Reading existing .env...")
with open(env_path) as f:
for line in f:
if "=" in line and not line.strip().startswith("#"):
key, value = line.split("=", 1)
existing_env[key.strip()] = value.strip()
print(f" Found {len(existing_env)} existing keys")
print("🔧 Updating .env file (keeping secrets)...")
else:
print("🔧 Initializing: Creating secure .env file...")
# Load data from pyproject.toml
with open(toml_path, "rb") as f:
data = tomllib.load(f)
security_keys = data["tool"]["alfred"]["security"]
settings_keys = data["tool"]["alfred"]["settings"]
dependencies = data["tool"]["poetry"]["dependencies"]
# Normalize TOML keys to UPPER_CASE for .env format (done once)
security_keys_upper = {k.upper(): v for k, v in security_keys.items()}
settings_keys_upper = {k.upper(): v for k, v in settings_keys.items()}
# Extract Python version
python_version_full, python_version_short = extract_python_version(
dependencies["python"]
)
# Read .env.example
with open(example_path) as f:
example_lines = f.readlines()
new_lines = []
# Process each line from .env.example
for raw_line in example_lines:
line = raw_line.strip()
if line and not line.startswith("#") and "=" in line:
key, value = line.split("=", 1)
key = key.strip()
# Check if key exists in current .env (update mode)
if key in existing_env:
# Keep existing value for secrets
if key in security_keys_upper:
new_lines.append(f"{key}={existing_env[key]}\n")
print(f" ↻ Kept existing {key}")
# Update build vars from pyproject.toml
elif key in settings_keys_upper:
new_value = settings_keys_upper[key]
if existing_env[key] != new_value:
new_lines.append(f"{key}={new_value}\n")
print(f" ↻ Updated {key}: {existing_env[key]}{new_value}")
else:
new_lines.append(f"{key}={existing_env[key]}\n")
print(f" ↻ Kept {key}={existing_env[key]}")
# Update Python versions
elif key == "PYTHON_VERSION":
if existing_env[key] != python_version_full:
new_lines.append(f"{key}={python_version_full}\n")
print(
f" ↻ Updated Python: {existing_env[key]}{python_version_full}"
)
else:
new_lines.append(f"{key}={existing_env[key]}\n")
print(f" ↻ Kept Python: {existing_env[key]}")
elif key == "PYTHON_VERSION_SHORT":
if existing_env[key] != python_version_short:
new_lines.append(f"{key}={python_version_short}\n")
print(
f" ↻ Updated Python (short): {existing_env[key]}{python_version_short}"
)
else:
new_lines.append(f"{key}={existing_env[key]}\n")
print(f" ↻ Kept Python (short): {existing_env[key]}")
# Keep other existing values
else:
new_lines.append(f"{key}={existing_env[key]}\n")
# Key doesn't exist, generate/add it
elif key in security_keys_upper:
rule = security_keys_upper[key]
secret = generate_secret(rule)
new_lines.append(f"{key}={secret}\n")
print(f" + Secret generated for {key} ({rule})")
elif key in settings_keys_upper:
value = settings_keys_upper[key]
new_lines.append(f"{key}={value}\n")
print(f" + Setting added: {key}={value}")
elif key == "PYTHON_VERSION":
new_lines.append(f"{key}={python_version_full}\n")
print(f" + Python version: {python_version_full}")
elif key == "PYTHON_VERSION_SHORT":
new_lines.append(f"{key}={python_version_short}\n")
print(f" + Python version (short): {python_version_short}")
else:
new_lines.append(raw_line)
else:
# Keep comments and empty lines
new_lines.append(raw_line)
# Compute database URIs from the generated values
final_env = {}
for line in new_lines:
if "=" in line and not line.strip().startswith("#"):
key, value = line.split("=", 1)
final_env[key.strip()] = value.strip()
# Compute MONGO_URI
if "MONGO_USER" in final_env and "MONGO_PASSWORD" in final_env:
mongo_uri = (
f"mongodb://{final_env.get('MONGO_USER', 'alfred')}:"
f"{final_env.get('MONGO_PASSWORD', '')}@"
f"{final_env.get('MONGO_HOST', 'mongodb')}:"
f"{final_env.get('MONGO_PORT', '27017')}/"
f"{final_env.get('MONGO_DB_NAME', 'alfred')}?authSource=admin"
)
# Update MONGO_URI in new_lines
for i, line in enumerate(new_lines):
if line.startswith("MONGO_URI="):
new_lines[i] = f"MONGO_URI={mongo_uri}\n"
print(" ✓ Computed MONGO_URI")
break
# Compute POSTGRES_URI
if "POSTGRES_USER" in final_env and "POSTGRES_PASSWORD" in final_env:
postgres_uri = (
f"postgresql://{final_env.get('POSTGRES_USER', 'alfred')}:"
f"{final_env.get('POSTGRES_PASSWORD', '')}@"
f"{final_env.get('POSTGRES_HOST', 'vectordb')}:"
f"{final_env.get('POSTGRES_PORT', '5432')}/"
f"{final_env.get('POSTGRES_DB_NAME', 'alfred')}"
)
# Update POSTGRES_URI in new_lines
for i, line in enumerate(new_lines):
if line.startswith("POSTGRES_URI="):
new_lines[i] = f"POSTGRES_URI={postgres_uri}\n"
print(" ✓ Computed POSTGRES_URI")
break
# Write .env file
with open(env_path, "w", encoding="utf-8") as f:
f.writelines(new_lines)
print(f"\n{env_path.name} generated successfully.")
# Generate .env.make for Makefile
env_make_path = base_dir / ".env.make"
with open(env_make_path, "w", encoding="utf-8") as f:
f.write("# Auto-generated from pyproject.toml by bootstrap.py\n")
f.write(f"export PYTHON_VERSION={python_version_full}\n")
f.write(f"export PYTHON_VERSION_SHORT={python_version_short}\n")
f.write(f"export RUNNER={settings_keys['runner']}\n")
f.write(f"export IMAGE_NAME={settings_keys['image_name']}\n")
f.write(f"export SERVICE_NAME={settings_keys['service_name']}\n")
f.write(f"export LIBRECHAT_VERSION={settings_keys['librechat_version']}\n")
f.write(f"export RAG_VERSION={settings_keys['rag_version']}\n")
print(f"{env_make_path.name} generated for Makefile.")
print("\n⚠️ Reminder: Please manually add your API keys to the .env file.")
if __name__ == "__main__":
bootstrap()