infra: added orchestration and app bootstrap (.env)
This commit is contained in:
@@ -22,8 +22,7 @@ venv
|
||||
.venv
|
||||
env
|
||||
.env
|
||||
.env.*
|
||||
|
||||
.env-
|
||||
# IDE
|
||||
.vscode
|
||||
.idea
|
||||
|
||||
119
.env.example
119
.env.example
@@ -1,53 +1,92 @@
|
||||
# Configuration
|
||||
LIBRECHAT_VERSION=v0.8.1
|
||||
RAG_VERSION=v0.7.0
|
||||
MAX_HISTORY_MESSAGES=10
|
||||
MAX_TOOL_ITERATIONS=10
|
||||
REQUEST_TIMEOUT=30
|
||||
|
||||
# Keys
|
||||
# - Deepseek API
|
||||
DEEPSEEK_API_KEY=
|
||||
# LLM Settings
|
||||
LLM_TEMPERATURE=0.2
|
||||
|
||||
# - Google API
|
||||
GOOGLE_API_KEY=
|
||||
#GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite,gemini-2.0-flash,gemini-2.0-flash-lite #TODO:Update models
|
||||
# Persistence
|
||||
DATA_STORAGE_DIR=data
|
||||
|
||||
# - Anthropic API
|
||||
ANTHROPIC_API_KEY=
|
||||
# Network configuration
|
||||
HOST=0.0.0.0
|
||||
PORT=3080
|
||||
|
||||
# - Kimi API
|
||||
KIMI_API_KEY=
|
||||
# Build informations (Synced with pyproject.toml via bootstrap)
|
||||
IMAGE_NAME=
|
||||
LIBRECHAT_VERSION=
|
||||
PYTHON_VERSION=
|
||||
PYTHON_VERSION_SHORT=
|
||||
RAG_VERSION=
|
||||
RUNNER=
|
||||
SERVICE_NAME=
|
||||
|
||||
# - ChatGPT/Open API
|
||||
OPENAI_API_KEY=
|
||||
|
||||
# - Themoviedb.org API (media metadata)
|
||||
TMDB_API_KEY=
|
||||
|
||||
# - Security keys
|
||||
# --- SECURITY KEYS (CRITICAL) ---
|
||||
# These are used for session tokens and encrypting sensitive data in MongoDB.
|
||||
# If you lose these, you lose access to encrypted stored credentials.
|
||||
JWT_SECRET=
|
||||
JWT_REFRESH_SECRET=
|
||||
CREDS_KEY=
|
||||
CREDS_IV=
|
||||
|
||||
# Local LLM
|
||||
OLLAMA_BASE_URL=
|
||||
OLLAMA_MODEL=
|
||||
# --- DATABASES (AUTO-SECURED) ---
|
||||
# Alfred uses MongoDB for application state and PostgreSQL for Vector RAG.
|
||||
# Passwords will be generated as 24-character secure tokens if left blank.
|
||||
|
||||
# Alfred Configuration
|
||||
LLM_PROVIDER=deepseek
|
||||
# MongoDB (Application Data)
|
||||
MONGO_URI=
|
||||
MONGO_HOST=mongodb
|
||||
MONGO_PORT=27017
|
||||
MONGO_USER=alfred
|
||||
MONGO_PASSWORD=
|
||||
MONGO_DB_NAME=alfred
|
||||
|
||||
# Memory storage directory (inside container)
|
||||
MEMORY_STORAGE_DIR=/data/memory
|
||||
|
||||
# qBittorrent Configuration
|
||||
QBITTORRENT_URL=
|
||||
QBITTORRENT_USERNAME=admin
|
||||
QBITTORRENT_PASSWORD=adminadmin
|
||||
|
||||
# Debug Options
|
||||
DEBUG_LOGGING=false
|
||||
DEBUG_CONSOLE=false
|
||||
|
||||
# Postgres (RAG)
|
||||
POSTGRES_DB=
|
||||
POSTGRES_USER=
|
||||
# PostgreSQL (Vector Database / RAG)
|
||||
POSTGRES_URI=
|
||||
POSTGRES_HOST=vectordb
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_USER=alfred
|
||||
POSTGRES_PASSWORD=
|
||||
POSTGRES_DB_NAME=alfred
|
||||
|
||||
# --- EXTERNAL SERVICES ---
|
||||
# Media Metadata (Required)
|
||||
# Get your key at https://www.themoviedb.org/
|
||||
TMDB_API_KEY=
|
||||
TMDB_BASE_URL=https://api.themoviedb.org/3
|
||||
|
||||
# qBittorrent integration
|
||||
QBITTORRENT_URL=http://qbittorrent:16140
|
||||
QBITTORRENT_USERNAME=admin
|
||||
QBITTORRENT_PASSWORD=
|
||||
QBITTORRENT_PORT=16140
|
||||
|
||||
# Meilisearch
|
||||
MEILI_ENABLED=FALSE
|
||||
MEILI_NO_ANALYTICS=TRUE
|
||||
MEILI_HOST=http://meilisearch:7700
|
||||
MEILI_MASTER_KEY=
|
||||
|
||||
# --- LLM CONFIGURATION ---
|
||||
# Providers: 'local', 'openai', 'anthropic', 'deepseek', 'google', 'kimi'
|
||||
DEFAULT_LLM_PROVIDER=local
|
||||
|
||||
# Local LLM (Ollama)
|
||||
OLLAMA_BASE_URL=http://ollama:11434
|
||||
OLLAMA_MODEL=llama3.3:latest
|
||||
|
||||
# --- API KEYS (OPTIONAL) ---
|
||||
# Fill only the ones you intend to use.
|
||||
ANTHROPIC_API_KEY=
|
||||
DEEPSEEK_API_KEY=
|
||||
GOOGLE_API_KEY=
|
||||
KIMI_API_KEY=
|
||||
OPENAI_API_KEY=
|
||||
|
||||
# --- RAG ENGINE ---
|
||||
# Enable/Disable the Retrieval Augmented Generation system
|
||||
RAG_ENABLED=TRUE
|
||||
RAG_API_URL=http://rag_api:8000
|
||||
RAG_API_PORT=8000
|
||||
EMBEDDINGS_PROVIDER=ollama
|
||||
EMBEDDINGS_MODEL=nomic-embed-text
|
||||
|
||||
19
Dockerfile
19
Dockerfile
@@ -43,6 +43,9 @@ RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
uv pip install --system -r pyproject.toml; \
|
||||
fi
|
||||
|
||||
COPY scripts/ ./scripts/
|
||||
COPY .env.example ./
|
||||
|
||||
# ===========================================
|
||||
# Stage 2: Testing
|
||||
# ===========================================
|
||||
@@ -60,6 +63,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
fi
|
||||
|
||||
COPY alfred/ ./alfred
|
||||
COPY scripts ./scripts
|
||||
COPY tests/ ./tests
|
||||
|
||||
# ===========================================
|
||||
@@ -69,10 +73,11 @@ FROM python:${PYTHON_VERSION}-slim-bookworm AS runtime
|
||||
|
||||
ARG PYTHON_VERSION_SHORT
|
||||
|
||||
# TODO: A-t-on encore besoin de toutes les clés ?
|
||||
ENV LLM_PROVIDER=deepseek \
|
||||
MEMORY_STORAGE_DIR=/data/memory \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONPATH=/home/appuser/app \
|
||||
PYTHONPATH=/home/appuser \
|
||||
PYTHONUNBUFFERED=1
|
||||
|
||||
# Install runtime dependencies (needs root)
|
||||
@@ -85,8 +90,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
RUN useradd -m -u 1000 -s /bin/bash appuser
|
||||
|
||||
# Create data directories (needs root for /data)
|
||||
RUN mkdir -p /data/memory /data/logs \
|
||||
&& chown -R appuser:appuser /data
|
||||
RUN mkdir -p /data /logs \
|
||||
&& chown -R appuser:appuser /data /logs
|
||||
|
||||
# Switch to non-root user
|
||||
USER appuser
|
||||
@@ -100,9 +105,12 @@ COPY --from=builder /usr/local/bin /usr/local/bin
|
||||
|
||||
# Copy application code (already owned by appuser)
|
||||
COPY --chown=appuser:appuser alfred/ ./alfred
|
||||
COPY --chown=appuser:appuser scripts/ ./scripts
|
||||
COPY --chown=appuser:appuser .env.example ./
|
||||
COPY --chown=appuser:appuser pyproject.toml ./
|
||||
|
||||
# Create volumes for persistent data
|
||||
VOLUME ["/data/memory", "/data/logs"]
|
||||
VOLUME ["/data", "/logs"]
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
@@ -111,5 +119,4 @@ EXPOSE 8000
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD python -c "import requests; requests.get('http://localhost:8000/health', timeout=5).raise_for_status()" || exit 1
|
||||
|
||||
# Run the application
|
||||
CMD ["python", "-m", "uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
CMD ["python", "-m", "uvicorn", "alfred.app:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
113
Makefile
113
Makefile
@@ -1,48 +1,37 @@
|
||||
.DEFAULT_GOAL := help
|
||||
|
||||
# --- Config ---
|
||||
export IMAGE_NAME := alfred_media_organizer
|
||||
export LIBRECHAT_VERSION := v0.8.1
|
||||
export PYTHON_VERSION := 3.14.2
|
||||
export PYTHON_VERSION_SHORT := 3.14
|
||||
export RAG_VERSION := v0.7.0
|
||||
export RUNNER := poetry
|
||||
export SERVICE_NAME := alfred
|
||||
# --- Load Config from pyproject.toml ---
|
||||
-include .env.make
|
||||
|
||||
# --- Profiles management ---
|
||||
# Usage: make up p=rag,meili
|
||||
p ?= core
|
||||
PROFILES_PARAM := COMPOSE_PROFILES=$(p)
|
||||
|
||||
# --- Commands ---
|
||||
CLI := python3 cli.py
|
||||
DOCKER_COMPOSE := docker compose
|
||||
DOCKER_BUILD := docker build \
|
||||
DOCKER_BUILD := docker build --no-cache \
|
||||
--build-arg PYTHON_VERSION=$(PYTHON_VERSION) \
|
||||
--build-arg PYTHON_VERSION_SHORT=$(PYTHON_VERSION_SHORT) \
|
||||
--build-arg RUNNER=$(RUNNER)
|
||||
|
||||
# --- Phony ---
|
||||
.PHONY: setup status check
|
||||
.PHONY: up down restart logs ps shell
|
||||
.PHONY: build build-test
|
||||
.PHONY: install update install-hooks
|
||||
.PHONY: test coverage lint format clean prune
|
||||
.PHONY: major minor patch
|
||||
.PHONY: help
|
||||
.PHONY: .env up down restart logs ps shell build build-test install update \
|
||||
install-hooks test coverage lint format clean major minor patch help
|
||||
|
||||
# --- Setup ---
|
||||
setup:
|
||||
.env .env.make:
|
||||
@echo "Initializing environment..."
|
||||
@$(CLI) setup \
|
||||
@python scripts/bootstrap.py \
|
||||
&& echo "✓ Environment ready" \
|
||||
|| (echo "✗ Setup failed" && exit 1)
|
||||
|| (echo "✗ Environment setup failed" && exit 1)
|
||||
|
||||
status:
|
||||
@$(CLI) status
|
||||
|
||||
check:
|
||||
@$(CLI) check
|
||||
bootstrap: .env .env.make
|
||||
|
||||
# --- Docker ---
|
||||
up: check
|
||||
@echo "Starting containers..."
|
||||
@$(DOCKER_COMPOSE) up -d --remove-orphans \
|
||||
up: .env
|
||||
@echo "Starting containers with profiles: [$(p)]..."
|
||||
@$(PROFILES_PARAM) $(DOCKER_COMPOSE) up -d --remove-orphans \
|
||||
&& echo "✓ Containers started" \
|
||||
|| (echo "✗ Failed to start containers" && exit 1)
|
||||
|
||||
@@ -54,30 +43,30 @@ down:
|
||||
|
||||
restart:
|
||||
@echo "Restarting containers..."
|
||||
@$(DOCKER_COMPOSE) restart \
|
||||
@$(PROFILES_PARAM) $(DOCKER_COMPOSE) restart \
|
||||
&& echo "✓ Containers restarted" \
|
||||
|| (echo "✗ Failed to restart containers" && exit 1)
|
||||
|
||||
logs:
|
||||
@echo "Following logs (Ctrl+C to exit)..."
|
||||
@$(DOCKER_COMPOSE) logs -f
|
||||
@$(PROFILES_PARAM) $(DOCKER_COMPOSE) logs -f
|
||||
|
||||
ps:
|
||||
@echo "Container status:"
|
||||
@$(DOCKER_COMPOSE) ps
|
||||
@$(PROFILES_PARAM) $(DOCKER_COMPOSE) ps
|
||||
|
||||
shell:
|
||||
@echo "Opening shell in $(SERVICE_NAME)..."
|
||||
@$(DOCKER_COMPOSE) exec $(SERVICE_NAME) /bin/bash
|
||||
|
||||
# --- Build ---
|
||||
build: check
|
||||
build: .env.make
|
||||
@echo "Building image $(IMAGE_NAME):latest ..."
|
||||
@$(DOCKER_BUILD) -t $(IMAGE_NAME):latest . \
|
||||
&& echo "✓ Build complete" \
|
||||
|| (echo "✗ Build failed" && exit 1)
|
||||
|
||||
build-test: check
|
||||
build-test: .env.make
|
||||
@echo "Building test image $(IMAGE_NAME):test..."
|
||||
@$(DOCKER_BUILD) --target test -t $(IMAGE_NAME):test . \
|
||||
&& echo "✓ Test image built" \
|
||||
@@ -90,18 +79,18 @@ install:
|
||||
&& echo "✓ Dependencies installed" \
|
||||
|| (echo "✗ Installation failed" && exit 1)
|
||||
|
||||
update:
|
||||
@echo "Updating dependencies with $(RUNNER)..."
|
||||
@$(RUNNER) update \
|
||||
&& echo "✓ Dependencies updated" \
|
||||
|| (echo "✗ Update failed" && exit 1)
|
||||
|
||||
install-hooks:
|
||||
@echo "Installing pre-commit hooks..."
|
||||
@$(RUNNER) run pre-commit install \
|
||||
&& echo "✓ Hooks installed" \
|
||||
|| (echo "✗ Hook installation failed" && exit 1)
|
||||
|
||||
update:
|
||||
@echo "Updating dependencies with $(RUNNER)..."
|
||||
@$(RUNNER) update \
|
||||
&& echo "✓ Dependencies updated" \
|
||||
|| (echo "✗ Update failed" && exit 1)
|
||||
|
||||
# --- Quality ---
|
||||
test:
|
||||
@echo "Running tests..."
|
||||
@@ -133,12 +122,6 @@ clean:
|
||||
@find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true
|
||||
@echo "✓ Cleanup complete"
|
||||
|
||||
prune:
|
||||
@echo "Pruning Docker system..."
|
||||
@docker system prune -af \
|
||||
&& echo "✓ Docker pruned" \
|
||||
|| (echo "✗ Prune failed" && exit 1)
|
||||
|
||||
# --- Versioning ---
|
||||
major minor patch: _check-main
|
||||
@echo "Bumping $@ version..."
|
||||
@@ -151,6 +134,7 @@ major minor patch: _check-main
|
||||
&& echo "✓ Tags pushed" \
|
||||
|| (echo "✗ Push failed" && exit 1)
|
||||
|
||||
# CI/CD helpers
|
||||
_ci-dump-config:
|
||||
@echo "image_name=$(IMAGE_NAME)"
|
||||
@echo "python_version=$(PYTHON_VERSION)"
|
||||
@@ -173,31 +157,26 @@ _check-main:
|
||||
|
||||
# --- Help ---
|
||||
help:
|
||||
@echo "Usage: make [target]"
|
||||
@echo "Cleverly Crafted Unawareness - Management Commands"
|
||||
@echo ""
|
||||
@echo "Setup:"
|
||||
@echo " setup Initialize .env"
|
||||
@echo " status Show project status"
|
||||
@echo "Usage: make [target] [p=profile1,profile2]"
|
||||
@echo ""
|
||||
@echo "Docker:"
|
||||
@echo " up Start containers"
|
||||
@echo " down Stop containers"
|
||||
@echo " restart Restart containers"
|
||||
@echo " logs Follow logs"
|
||||
@echo " ps Container status"
|
||||
@echo " shell Shell into container"
|
||||
@echo " build Build image"
|
||||
@echo " up Start containers (default profile: core)"
|
||||
@echo " Example: make up p=rag,meili"
|
||||
@echo " down Stop all containers"
|
||||
@echo " restart Restart containers (supports p=...)"
|
||||
@echo " logs Follow logs (supports p=...)"
|
||||
@echo " ps Status of containers"
|
||||
@echo " shell Open bash in the core container"
|
||||
@echo " build Build the production Docker image"
|
||||
@echo ""
|
||||
@echo "Dev:"
|
||||
@echo " install Install dependencies"
|
||||
@echo " update Update dependencies"
|
||||
@echo " test Run tests"
|
||||
@echo " coverage Run tests with coverage"
|
||||
@echo " lint Lint code"
|
||||
@echo " format Format code"
|
||||
@echo " clean Clean artifacts"
|
||||
@echo "Dev & Quality:"
|
||||
@echo " setup Bootstrap .env and security keys"
|
||||
@echo " install Install dependencies via $(RUNNER)"
|
||||
@echo " test Run pytest suite"
|
||||
@echo " coverage Run tests and generate HTML report"
|
||||
@echo " lint/format Quality and style checks"
|
||||
@echo ""
|
||||
@echo "Release:"
|
||||
@echo " patch Bump patch version"
|
||||
@echo " minor Bump minor version"
|
||||
@echo " major Bump major version"
|
||||
@echo " major|minor|patch Bump version and push tags (main branch only)"
|
||||
|
||||
0
alfred/__init__.py
Normal file
0
alfred/__init__.py
Normal file
@@ -1,4 +1,20 @@
|
||||
services:
|
||||
# - CORE SERVICES -
|
||||
# --- .ENV INIT ---
|
||||
alfred-init:
|
||||
container_name: alfred-init
|
||||
build:
|
||||
context: .
|
||||
target: builder
|
||||
args:
|
||||
PYTHON_VERSION: ${PYTHON_VERSION}
|
||||
PYTHON_VERSION_SHORT: ${PYTHON_VERSION_SHORT}
|
||||
RUNNER: ${RUNNER}
|
||||
command: python scripts/bootstrap.py
|
||||
networks:
|
||||
- alfred-net
|
||||
|
||||
# --- MAIN APPLICATION ---
|
||||
alfred:
|
||||
container_name: alfred-core
|
||||
build:
|
||||
@@ -8,47 +24,38 @@ services:
|
||||
PYTHON_VERSION_SHORT: ${PYTHON_VERSION_SHORT}
|
||||
RUNNER: ${RUNNER}
|
||||
depends_on:
|
||||
- librechat
|
||||
alfred-init:
|
||||
condition: service_completed_successfully
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
# LLM Configuration
|
||||
LLM_PROVIDER: ${LLM_PROVIDER:-deepseek}
|
||||
DEEPSEEK_API_KEY: ${DEEPSEEK_API_KEY:-}
|
||||
|
||||
# Memory storage
|
||||
MEMORY_STORAGE_DIR: /data/memory
|
||||
|
||||
# External services
|
||||
TMDB_API_KEY: ${TMDB_API_KEY:-}
|
||||
QBITTORRENT_URL: ${QBITTORRENT_URL:-}
|
||||
QBITTORRENT_USERNAME: ${QBITTORRENT_USERNAME:-}
|
||||
QBITTORRENT_PASSWORD: ${QBITTORRENT_PASSWORD:-}
|
||||
- path: .env
|
||||
required: true
|
||||
volumes:
|
||||
- ./data/memory:/data/memory
|
||||
- ./logs:/data/logs
|
||||
# TODO: Development: mount code for hot reload (comment out in production)
|
||||
# - ./alfred:/app/alfred
|
||||
- ./data:/data
|
||||
- ./logs:/logs
|
||||
# TODO: Hot reload (comment out in production)
|
||||
#- ./alfred:/home/appuser/alfred
|
||||
networks:
|
||||
- alfred-net
|
||||
|
||||
# --- FRONTEND LIBRECHAT ---
|
||||
librechat:
|
||||
container_name: alfred-librechat
|
||||
image: ghcr.io/danny-avila/librechat:${LIBRECHAT_VERSION}
|
||||
depends_on:
|
||||
- mongodb
|
||||
- meilisearch
|
||||
- rag_api
|
||||
alfred-init:
|
||||
condition: service_completed_successfully
|
||||
mongodb:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- .env
|
||||
- path: .env
|
||||
required: true
|
||||
environment:
|
||||
- HOST=0.0.0.0
|
||||
- MONGO_URI=mongodb://mongodb:27017/LibreChat
|
||||
- MEILI_HOST=http://meilisearch:7700
|
||||
- RAG_PORT=${RAG_PORT:-8000}
|
||||
- RAG_API_URL=http://rag_api:${RAG_PORT:-8000}
|
||||
# Remap value name
|
||||
- SEARCH=${MEILI_ENABLED}
|
||||
ports:
|
||||
- "${LIBRECHAT_PORT:-3080}:3080"
|
||||
- "${PORT}:${PORT}"
|
||||
volumes:
|
||||
- ./data/librechat/images:/app/client/public/images
|
||||
- ./data/librechat/uploads:/app/client/uploads
|
||||
@@ -56,47 +63,144 @@ services:
|
||||
# Mount custom endpoint
|
||||
- ./librechat/manifests:/app/manifests:ro
|
||||
- ./librechat/librechat.yaml:/app/librechat.yaml:ro
|
||||
networks:
|
||||
- alfred-net
|
||||
|
||||
# --- DATABASE #1 - APP STATE ---
|
||||
mongodb:
|
||||
container_name: alfred-mongodb
|
||||
image: mongo:latest
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
alfred-init:
|
||||
condition: service_completed_successfully
|
||||
env_file:
|
||||
- path: .env
|
||||
required: true
|
||||
environment:
|
||||
# Remap value name
|
||||
- MONGO_INITDB_ROOT_USERNAME=${MONGO_USER}
|
||||
- MONGO_INITDB_ROOT_PASSWORD=${MONGO_PASSWORD}
|
||||
ports:
|
||||
- "${MONGO_PORT}:${MONGO_PORT}"
|
||||
volumes:
|
||||
- ./data/mongo:/data/db
|
||||
command: mongod --noauth
|
||||
command: mongod --quiet --setParameter logComponentVerbosity='{"network":{"verbosity":0}}'
|
||||
healthcheck:
|
||||
test: |
|
||||
mongosh --quiet --eval "db.adminCommand('ping')" || \
|
||||
mongosh --quiet -u "${MONGO_USER}" -p "${MONGO_PASSWORD}" --authenticationDatabase admin --eval "db.adminCommand('ping')"
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
networks:
|
||||
- alfred-net
|
||||
|
||||
# --- OLLAMA - LOCAL LLM ENGINE ---
|
||||
ollama:
|
||||
image: ollama/ollama:latest
|
||||
container_name: alfred-ollama
|
||||
depends_on:
|
||||
alfred-init:
|
||||
condition: service_completed_successfully
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- path: .env
|
||||
required: true
|
||||
volumes:
|
||||
- ./data/ollama:/root/.ollama
|
||||
networks:
|
||||
- alfred-net
|
||||
|
||||
# - OPTIONAL SERVICES -
|
||||
# --- SEARCH ENGINE SUPER FAST (Optional) ---
|
||||
meilisearch:
|
||||
container_name: alfred-meilisearch
|
||||
image: getmeili/meilisearch:v1.12.3
|
||||
depends_on:
|
||||
alfred-init:
|
||||
condition: service_completed_successfully
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- MEILI_NO_ANALYTICS=true
|
||||
env_file:
|
||||
- path: .env
|
||||
required: true
|
||||
volumes:
|
||||
- ./data/meili:/meili_data
|
||||
#profiles: ["meili", "full"]
|
||||
- ./data/meilisearch:/meili_data
|
||||
profiles: ["meili", "full"]
|
||||
networks:
|
||||
- alfred-net
|
||||
|
||||
# --- RETRIEVAL AUGMENTED GENERATION SYSTEM (Optional) ---
|
||||
rag_api:
|
||||
container_name: alfred-rag
|
||||
image: ghcr.io/danny-avila/librechat-rag-api-dev-lite:${RAG_VERSION}
|
||||
depends_on:
|
||||
alfred-init:
|
||||
condition: service_completed_successfully
|
||||
vectordb:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- DB_HOST=vectordb
|
||||
- DB_PORT=5432
|
||||
- RAG_PORT=${RAG_PORT:-8000}
|
||||
- path: .env
|
||||
required: true
|
||||
ports:
|
||||
- "${RAG_PORT:-8000}:${RAG_PORT:-8000}"
|
||||
#profiles: ["rag", "full"]
|
||||
- "${RAG_API_PORT}:${RAG_API_PORT}"
|
||||
volumes:
|
||||
- ./data/rag/uploads:/app/uploads
|
||||
profiles: ["rag", "full"]
|
||||
networks:
|
||||
- alfred-net
|
||||
|
||||
# --- DATABASE #2 - Vector RAG (Optional) ---
|
||||
vectordb:
|
||||
container_name: alfred-vectordb
|
||||
image: pgvector/pgvector:0.8.0-pg16-bookworm
|
||||
depends_on:
|
||||
alfred-init:
|
||||
condition: service_completed_successfully
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- .env
|
||||
- path: .env
|
||||
required: true
|
||||
ports:
|
||||
- "${VECTOR_DB_PORT:-5432}:5432"
|
||||
- "${POSTGRES_PORT}:${POSTGRES_PORT}"
|
||||
volumes:
|
||||
- ./data/vectordb:/var/lib/postgresql/data
|
||||
#profiles: ["rag", "full"]
|
||||
profiles: ["rag", "full"]
|
||||
healthcheck:
|
||||
test: [ "CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-alfred} -d ${POSTGRES_DB_NAME:-alfred}" ]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
networks:
|
||||
- alfred-net
|
||||
|
||||
# --- QBITTORENT (Optional) ---
|
||||
qbittorrent:
|
||||
image: lscr.io/linuxserver/qbittorrent:latest
|
||||
container_name: alfred-qbittorrent
|
||||
depends_on:
|
||||
alfred-init:
|
||||
condition: service_completed_successfully
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- path: .env
|
||||
required: true
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Paris
|
||||
- WEBUI_PORT=${QBITTORRENT_PORT}
|
||||
volumes:
|
||||
- ./data/qbittorrent/config:/config
|
||||
- ./data/qbittorrent/downloads:/downloads
|
||||
profiles: ["qbittorrent", "full"]
|
||||
ports:
|
||||
- "${QBITTORRENT_PORT}:${QBITTORRENT_PORT}"
|
||||
networks:
|
||||
- alfred-net
|
||||
|
||||
networks:
|
||||
alfred-net:
|
||||
name: alfred-internal
|
||||
driver: bridge
|
||||
|
||||
233
scripts/bootstrap.py
Normal file
233
scripts/bootstrap.py
Normal file
@@ -0,0 +1,233 @@
|
||||
import re
|
||||
import secrets
|
||||
from pathlib import Path
|
||||
|
||||
import tomllib
|
||||
|
||||
|
||||
def generate_secret(rule: str) -> str:
|
||||
"""
|
||||
Generates a cryptographically secure secret based on a spec string.
|
||||
Example specs: '32:b64', '16:hex'.
|
||||
"""
|
||||
chunks: list[str] = rule.split(":")
|
||||
size: int = int(chunks[0])
|
||||
tech: str = chunks[1]
|
||||
|
||||
if tech == "b64":
|
||||
return secrets.token_urlsafe(size)
|
||||
elif tech == "hex":
|
||||
return secrets.token_hex(size)
|
||||
else:
|
||||
raise ValueError(f"Invalid security format: {tech}")
|
||||
|
||||
|
||||
def extract_python_version(version_string: str) -> tuple[str, str]:
|
||||
"""
|
||||
Extract Python version from poetry dependency string.
|
||||
Examples:
|
||||
"==3.14.2" -> ("3.14.2", "3.14")
|
||||
"^3.14.2" -> ("3.14.2", "3.14")
|
||||
"~3.14.2" -> ("3.14.2", "3.14")
|
||||
"3.14.2" -> ("3.14.2", "3.14")
|
||||
"""
|
||||
# Remove poetry version operators (==, ^, ~, >=, etc.)
|
||||
clean_version = re.sub(r"^[=^~><]+", "", version_string.strip())
|
||||
|
||||
# Extract version parts
|
||||
parts = clean_version.split(".")
|
||||
|
||||
if len(parts) >= 2:
|
||||
full_version = clean_version
|
||||
short_version = f"{parts[0]}.{parts[1]}"
|
||||
return full_version, short_version
|
||||
else:
|
||||
raise ValueError(f"Invalid Python version format: {version_string}")
|
||||
|
||||
|
||||
# TODO: Refactor
|
||||
def bootstrap(): # noqa: PLR0912, PLR0915
|
||||
"""
|
||||
Initializes the .env file by merging .env.example with generated secrets
|
||||
and build variables from pyproject.toml.
|
||||
Also generates .env.make for Makefile.
|
||||
|
||||
ALWAYS preserves existing secrets!
|
||||
"""
|
||||
base_dir = Path(__file__).resolve().parent.parent
|
||||
env_path = base_dir / ".env"
|
||||
|
||||
example_path = base_dir / ".env.example"
|
||||
if not example_path.exists():
|
||||
print(f"❌ {example_path.name} not found.")
|
||||
return
|
||||
|
||||
toml_path = base_dir / "pyproject.toml"
|
||||
if not toml_path.exists():
|
||||
print(f"❌ {toml_path.name} not found.")
|
||||
return
|
||||
|
||||
# ALWAYS load existing .env if it exists
|
||||
existing_env = {}
|
||||
if env_path.exists():
|
||||
print("🔄 Reading existing .env...")
|
||||
with open(env_path) as f:
|
||||
for line in f:
|
||||
if "=" in line and not line.strip().startswith("#"):
|
||||
key, value = line.split("=", 1)
|
||||
existing_env[key.strip()] = value.strip()
|
||||
print(f" Found {len(existing_env)} existing keys")
|
||||
print("🔧 Updating .env file (keeping secrets)...")
|
||||
else:
|
||||
print("🔧 Initializing: Creating secure .env file...")
|
||||
|
||||
# Load data from pyproject.toml
|
||||
with open(toml_path, "rb") as f:
|
||||
data = tomllib.load(f)
|
||||
security_keys = data["tool"]["alfred"]["security"]
|
||||
settings_keys = data["tool"]["alfred"]["settings"]
|
||||
dependencies = data["tool"]["poetry"]["dependencies"]
|
||||
|
||||
# Normalize TOML keys to UPPER_CASE for .env format (done once)
|
||||
security_keys_upper = {k.upper(): v for k, v in security_keys.items()}
|
||||
settings_keys_upper = {k.upper(): v for k, v in settings_keys.items()}
|
||||
|
||||
# Extract Python version
|
||||
python_version_full, python_version_short = extract_python_version(
|
||||
dependencies["python"]
|
||||
)
|
||||
|
||||
# Read .env.example
|
||||
with open(example_path) as f:
|
||||
example_lines = f.readlines()
|
||||
|
||||
new_lines = []
|
||||
# Process each line from .env.example
|
||||
for raw_line in example_lines:
|
||||
line = raw_line.strip()
|
||||
|
||||
if line and not line.startswith("#") and "=" in line:
|
||||
key, value = line.split("=", 1)
|
||||
key = key.strip()
|
||||
|
||||
# Check if key exists in current .env (update mode)
|
||||
if key in existing_env:
|
||||
# Keep existing value for secrets
|
||||
if key in security_keys_upper:
|
||||
new_lines.append(f"{key}={existing_env[key]}\n")
|
||||
print(f" ↻ Kept existing {key}")
|
||||
# Update build vars from pyproject.toml
|
||||
elif key in settings_keys_upper:
|
||||
new_value = settings_keys_upper[key]
|
||||
if existing_env[key] != new_value:
|
||||
new_lines.append(f"{key}={new_value}\n")
|
||||
print(f" ↻ Updated {key}: {existing_env[key]} → {new_value}")
|
||||
else:
|
||||
new_lines.append(f"{key}={existing_env[key]}\n")
|
||||
print(f" ↻ Kept {key}={existing_env[key]}")
|
||||
# Update Python versions
|
||||
elif key == "PYTHON_VERSION":
|
||||
if existing_env[key] != python_version_full:
|
||||
new_lines.append(f"{key}={python_version_full}\n")
|
||||
print(
|
||||
f" ↻ Updated Python: {existing_env[key]} → {python_version_full}"
|
||||
)
|
||||
else:
|
||||
new_lines.append(f"{key}={existing_env[key]}\n")
|
||||
print(f" ↻ Kept Python: {existing_env[key]}")
|
||||
elif key == "PYTHON_VERSION_SHORT":
|
||||
if existing_env[key] != python_version_short:
|
||||
new_lines.append(f"{key}={python_version_short}\n")
|
||||
print(
|
||||
f" ↻ Updated Python (short): {existing_env[key]} → {python_version_short}"
|
||||
)
|
||||
else:
|
||||
new_lines.append(f"{key}={existing_env[key]}\n")
|
||||
print(f" ↻ Kept Python (short): {existing_env[key]}")
|
||||
# Keep other existing values
|
||||
else:
|
||||
new_lines.append(f"{key}={existing_env[key]}\n")
|
||||
# Key doesn't exist, generate/add it
|
||||
elif key in security_keys_upper:
|
||||
rule = security_keys_upper[key]
|
||||
secret = generate_secret(rule)
|
||||
new_lines.append(f"{key}={secret}\n")
|
||||
print(f" + Secret generated for {key} ({rule})")
|
||||
elif key in settings_keys_upper:
|
||||
value = settings_keys_upper[key]
|
||||
new_lines.append(f"{key}={value}\n")
|
||||
print(f" + Setting added: {key}={value}")
|
||||
elif key == "PYTHON_VERSION":
|
||||
new_lines.append(f"{key}={python_version_full}\n")
|
||||
print(f" + Python version: {python_version_full}")
|
||||
elif key == "PYTHON_VERSION_SHORT":
|
||||
new_lines.append(f"{key}={python_version_short}\n")
|
||||
print(f" + Python version (short): {python_version_short}")
|
||||
else:
|
||||
new_lines.append(raw_line)
|
||||
else:
|
||||
# Keep comments and empty lines
|
||||
new_lines.append(raw_line)
|
||||
|
||||
# Compute database URIs from the generated values
|
||||
final_env = {}
|
||||
for line in new_lines:
|
||||
if "=" in line and not line.strip().startswith("#"):
|
||||
key, value = line.split("=", 1)
|
||||
final_env[key.strip()] = value.strip()
|
||||
|
||||
# Compute MONGO_URI
|
||||
if "MONGO_USER" in final_env and "MONGO_PASSWORD" in final_env:
|
||||
mongo_uri = (
|
||||
f"mongodb://{final_env.get('MONGO_USER', 'alfred')}:"
|
||||
f"{final_env.get('MONGO_PASSWORD', '')}@"
|
||||
f"{final_env.get('MONGO_HOST', 'mongodb')}:"
|
||||
f"{final_env.get('MONGO_PORT', '27017')}/"
|
||||
f"{final_env.get('MONGO_DB_NAME', 'alfred')}?authSource=admin"
|
||||
)
|
||||
# Update MONGO_URI in new_lines
|
||||
for i, line in enumerate(new_lines):
|
||||
if line.startswith("MONGO_URI="):
|
||||
new_lines[i] = f"MONGO_URI={mongo_uri}\n"
|
||||
print(" ✓ Computed MONGO_URI")
|
||||
break
|
||||
|
||||
# Compute POSTGRES_URI
|
||||
if "POSTGRES_USER" in final_env and "POSTGRES_PASSWORD" in final_env:
|
||||
postgres_uri = (
|
||||
f"postgresql://{final_env.get('POSTGRES_USER', 'alfred')}:"
|
||||
f"{final_env.get('POSTGRES_PASSWORD', '')}@"
|
||||
f"{final_env.get('POSTGRES_HOST', 'vectordb')}:"
|
||||
f"{final_env.get('POSTGRES_PORT', '5432')}/"
|
||||
f"{final_env.get('POSTGRES_DB_NAME', 'alfred')}"
|
||||
)
|
||||
# Update POSTGRES_URI in new_lines
|
||||
for i, line in enumerate(new_lines):
|
||||
if line.startswith("POSTGRES_URI="):
|
||||
new_lines[i] = f"POSTGRES_URI={postgres_uri}\n"
|
||||
print(" ✓ Computed POSTGRES_URI")
|
||||
break
|
||||
|
||||
# Write .env file
|
||||
with open(env_path, "w", encoding="utf-8") as f:
|
||||
f.writelines(new_lines)
|
||||
print(f"\n✅ {env_path.name} generated successfully.")
|
||||
|
||||
# Generate .env.make for Makefile
|
||||
env_make_path = base_dir / ".env.make"
|
||||
with open(env_make_path, "w", encoding="utf-8") as f:
|
||||
f.write("# Auto-generated from pyproject.toml by bootstrap.py\n")
|
||||
f.write(f"export PYTHON_VERSION={python_version_full}\n")
|
||||
f.write(f"export PYTHON_VERSION_SHORT={python_version_short}\n")
|
||||
f.write(f"export RUNNER={settings_keys['runner']}\n")
|
||||
f.write(f"export IMAGE_NAME={settings_keys['image_name']}\n")
|
||||
f.write(f"export SERVICE_NAME={settings_keys['service_name']}\n")
|
||||
f.write(f"export LIBRECHAT_VERSION={settings_keys['librechat_version']}\n")
|
||||
f.write(f"export RAG_VERSION={settings_keys['rag_version']}\n")
|
||||
|
||||
print(f"✅ {env_make_path.name} generated for Makefile.")
|
||||
print("\n⚠️ Reminder: Please manually add your API keys to the .env file.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
bootstrap()
|
||||
Reference in New Issue
Block a user