52 Commits

Author SHA1 Message Date
01a00a12af chore: bump version 0.1.6 → 0.1.7
Some checks failed
CI/CD Awesome Pipeline / Test (push) Successful in 5m54s
CI/CD Awesome Pipeline / Build & Push to Registry (push) Failing after 1m9s
2026-01-01 05:04:37 +01:00
504d0162bb infra: added proper settings handling & orchestration and app bootstrap (.env)
Reviewed-on: https://gitea.iswearihadsomethingforthis.net/francwa/alfred/pulls/18
2026-01-01 03:55:35 +00:00
cda23d074f feat: added current alfred version from pyproject.py to healthcheck 2026-01-01 04:51:45 +01:00
0357108077 infra: added orchestration and app bootstrap (.env) 2026-01-01 04:48:32 +01:00
ab1df3dd0f fix: forgot to lint/format 2026-01-01 04:48:32 +01:00
c50091f6bf feat: added proper settings handling 2026-01-01 04:48:32 +01:00
8b406370f1 chore: updated some dependencies 2026-01-01 03:08:22 +01:00
c56bf2b92c feat: added Claude.AI to available providers 2025-12-29 02:13:11 +01:00
b1507db4d0 fix: fixed test image not being built before running tests
All checks were successful
Renovate Bot / renovate (push) Successful in 1m26s
2025-12-28 13:24:03 +01:00
3074962314 infra: proper librechat integration & improved configuration handling
Reviewed-on: https://gitea.iswearihadsomethingforthis.net/francwa/alfred/pulls/11
2025-12-28 11:56:53 +00:00
84799879bb fix: added data dir to .dockerignore 2025-12-28 12:02:44 +01:00
1052c1b619 infra: added modules version to .env.example 2025-12-28 06:07:34 +01:00
9958b8e848 feat: created placeholders for various API models 2025-12-28 06:06:28 +01:00
b15161dad7 fix: provided OpenAI API key and fixed docker-compose configuration to enable RAG service 2025-12-28 06:04:08 +01:00
52f025ae32 chore: ran linter & formatter again 2025-12-27 20:07:48 +01:00
2cfe7a035b infra: moved makefile logic to python and added .env setup 2025-12-27 19:51:40 +01:00
2441c2dc29 infra: rewrote docker-compose for proper integration of librechat 2025-12-27 19:49:43 +01:00
261a1f3918 fix: fixed real data directory being used in tests 2025-12-27 19:48:13 +01:00
253903a1e5 infra: made pyproject the SSOT 2025-12-27 19:46:27 +01:00
20a113e335 infra: updated .env.example 2025-12-27 19:43:31 +01:00
fed83e7d79 chore: bumped fastapi version 2025-12-27 19:42:16 +01:00
3880a4ec49 chore: ran linter and formatter 2025-12-27 19:41:22 +01:00
6195abbaa5 chore: fixed imports and tests configuration 2025-12-27 19:39:36 +01:00
b132554631 infra: updated .gitignore 2025-12-27 02:30:14 +01:00
561796cec1 infra: removed CORE_DIR variable from Makefile (not relevant anymore) 2025-12-24 11:28:28 +01:00
26d90acc16 infra: moved manifest files in librechat dir 2025-12-24 09:46:25 +01:00
d8234b2958 chore: cleaned up .env.example 2025-12-24 08:17:47 +01:00
156d1fe567 fix: fixed Dockerfile 2025-12-24 08:17:12 +01:00
f8eee120cf infra: backed up old deploy-compose 2025-12-24 08:16:30 +01:00
c5e4a5e1a7 infra: removed occurences of alfred_api_key (not implemented after all) 2025-12-24 08:11:30 +01:00
d10c9160f3 infra: renamed broken references to alfred 2025-12-24 08:09:26 +01:00
1f88e99e8b infra: reorganized repo 2025-12-24 07:50:09 +01:00
e097a13221 chore: upgraded to python 3.14 and some dependencies
Reviewed-on: https://gitea.iswearihadsomethingforthis.net/francwa/agent_media/pulls/8
2025-12-22 14:04:46 +00:00
086fff803d feat: configured CI/CD pipeline to allow build for feature branches 2025-12-22 14:48:50 +01:00
45fbf975b3 feat: improved rules for renovate 2025-12-22 14:25:16 +01:00
b8f2798e29 chore: updated fastapi and uvicorn 2025-12-22 14:20:22 +01:00
c762d91eb1 chore: updated to python 3.14 2025-12-22 14:08:27 +01:00
35a68387ab infra: use python version from pyproject.toml 2025-12-22 13:46:40 +01:00
9b13c69631 chore: updated meilisearch 2025-12-22 13:25:54 +01:00
2ca1ea29b2 infra: added meilisearch to renovate exclusion list 2025-12-22 13:22:55 +01:00
5e86615bde fix: added renovate token with proper permissions 2025-12-22 12:59:24 +01:00
6701a4b392 infra: added Renovate bot 2025-12-22 12:50:59 +01:00
68372405d6 fix: downgraded upload-artifact action to v3 from v4 2025-12-22 12:13:50 +01:00
f1ea0de247 fix: fixed indentation error 2025-12-22 12:04:47 +01:00
974d008825 feat: finalized CI/CD pipeline setup 2025-12-22 11:59:36 +01:00
8a87d94e6d fix: use docker image for trivy vulnerability scanner
Some checks failed
CI/CD Awesome Pipeline / Test (push) Successful in 1m23s
CI/CD Awesome Pipeline / Build & Push to Registry (push) Failing after 5m9s
2025-12-22 11:38:35 +01:00
ec99a501fc fix! added directive to Dockerfile 2025-12-22 11:37:48 +01:00
c256b26601 fix: fixed vulnerability scanner issue in CI/CD pipeline
Some checks failed
CI/CD Awesome Pipeline / Test (push) Successful in 48s
CI/CD Awesome Pipeline / Build & Push to Registry (push) Failing after 6m18s
2025-12-22 10:59:34 +01:00
56a3c1257d infra: added trivy vulnerability scanner to CI/CD
Some checks failed
CI/CD Awesome Pipeline / Test (push) Successful in 1m36s
CI/CD Awesome Pipeline / Build & Push to Registry (push) Failing after 7m10s
2025-12-22 10:01:52 +01:00
79d23f936a fix: fixed typo 2025-12-22 09:40:43 +01:00
f02e916d33 fix: fixed config gathering in ci.yml
Some checks failed
CI/CD Awesome Pipeline / Test (push) Successful in 1m34s
CI/CD Awesome Pipeline / Build & Push to Registry (push) Failing after 14m19s
2025-12-22 09:16:55 +01:00
4e64c83c4b fix: updated build and push CI/CD configuration
Some checks failed
CI/CD Awesome Pipeline / Test (push) Successful in 1m4s
CI/CD Awesome Pipeline / Build & Push to Registry (push) Failing after 2m32s
2025-12-22 08:45:31 +01:00
127 changed files with 1922 additions and 1269 deletions

View File

@@ -1,5 +1,5 @@
[tool.bumpversion] [tool.bumpversion]
current_version = "0.1.6" current_version = "0.1.7"
parse = "(?P<major>\\d+)\\.(?P<minor>\\d+)\\.(?P<patch>\\d+)" parse = "(?P<major>\\d+)\\.(?P<minor>\\d+)\\.(?P<patch>\\d+)"
serialize = ["{major}.{minor}.{patch}"] serialize = ["{major}.{minor}.{patch}"]
search = "{current_version}" search = "{current_version}"

View File

@@ -22,8 +22,7 @@ venv
.venv .venv
env env
.env .env
.env.* .env-
# IDE # IDE
.vscode .vscode
.idea .idea
@@ -41,11 +40,8 @@ docs/
*.md *.md
!README.md !README.md
# Tests # Data
tests/ data/
pytest.ini
# Data (will be mounted as volumes)
memory_data/ memory_data/
logs/ logs/
*.log *.log

View File

@@ -1,69 +1,93 @@
# Agent Media - Environment Variables MAX_HISTORY_MESSAGES=10
MAX_TOOL_ITERATIONS=10
REQUEST_TIMEOUT=30
# LibreChat Security Keys # LLM Settings
# Generate secure keys with: openssl rand -base64 32 LLM_TEMPERATURE=0.2
JWT_SECRET=your-super-secret-jwt-key-change-this-in-production
JWT_REFRESH_SECRET=your-super-secret-refresh-key-change-this-too
# Generate with: openssl rand -hex 16 (for CREDS_KEY) # Persistence
CREDS_KEY=your-32-character-secret-key-here DATA_STORAGE_DIR=data
# Generate with: openssl rand -hex 8 (for CREDS_IV) # Network configuration
CREDS_IV=your-16-character-iv-here HOST=0.0.0.0
PORT=3080
# LibreChat Configuration # Build informations (Synced with pyproject.toml via bootstrap)
DOMAIN_CLIENT=http://localhost:3080 ALFRED_VERSION=
DOMAIN_SERVER=http://localhost:3080 IMAGE_NAME=
LIBRECHAT_VERSION=
PYTHON_VERSION=
PYTHON_VERSION_SHORT=
RAG_VERSION=
RUNNER=
SERVICE_NAME=
# Session expiry (in milliseconds) # --- SECURITY KEYS (CRITICAL) ---
# Default: 15 minutes # These are used for session tokens and encrypting sensitive data in MongoDB.
SESSION_EXPIRY=900000 # If you lose these, you lose access to encrypted stored credentials.
JWT_SECRET=
JWT_REFRESH_SECRET=
CREDS_KEY=
CREDS_IV=
# Refresh token expiry (in milliseconds) # --- DATABASES (AUTO-SECURED) ---
# Default: 7 days # Alfred uses MongoDB for application state and PostgreSQL for Vector RAG.
REFRESH_TOKEN_EXPIRY=604800000 # Passwords will be generated as 24-character secure tokens if left blank.
# Meilisearch Configuration # MongoDB (Application Data)
# Master key for Meilisearch (generate with: openssl rand -base64 32) MONGO_URI=
MEILI_MASTER_KEY=DrhYf7zENyR6AlUCKmnz0eYASOQdl6zxH7s7MKFSfFU MONGO_HOST=mongodb
MONGO_PORT=27017
MONGO_USER=alfred
MONGO_PASSWORD=
MONGO_DB_NAME=alfred
# PostgreSQL Configuration (for RAG API) # PostgreSQL (Vector Database / RAG)
POSTGRES_DB=librechat_rag POSTGRES_URI=
POSTGRES_USER=postgres POSTGRES_HOST=vectordb
POSTGRES_PASSWORD=postgres POSTGRES_PORT=5432
POSTGRES_USER=alfred
POSTGRES_PASSWORD=
POSTGRES_DB_NAME=alfred
# RAG API Configuration (Vector Database) # --- EXTERNAL SERVICES ---
RAG_COLLECTION_NAME=testcollection # Media Metadata (Required)
RAG_EMBEDDINGS_PROVIDER=openai # Get your key at https://www.themoviedb.org/
RAG_EMBEDDINGS_MODEL=text-embedding-3-small TMDB_API_KEY=
TMDB_BASE_URL=https://api.themoviedb.org/3
# API Keys # qBittorrent integration
# OpenAI API Key (required for RAG embeddings) QBITTORRENT_URL=http://qbittorrent:16140
OPENAI_API_KEY=your-openai-api-key-here
# Deepseek API Key (for LLM in agent-brain)
DEEPSEEK_API_KEY=your-deepseek-api-key-here
# Agent Brain Configuration
# LLM Provider (deepseek or ollama)
LLM_PROVIDER=deepseek
# Memory storage directory (inside container)
MEMORY_STORAGE_DIR=/data/memory
# API Key for agent-brain (used by LibreChat custom endpoint)
AGENT_BRAIN_API_KEY=agent-brain-secret-key
# External Services (Optional)
# TMDB API Key (for movie metadata)
TMDB_API_KEY=your-tmdb-key
# qBittorrent Configuration
QBITTORRENT_URL=http://localhost:8080
QBITTORRENT_USERNAME=admin QBITTORRENT_USERNAME=admin
QBITTORRENT_PASSWORD=adminpass QBITTORRENT_PASSWORD=
QBITTORRENT_PORT=16140
# Debug Options # Meilisearch
DEBUG_LOGGING=false MEILI_ENABLED=FALSE
DEBUG_CONSOLE=false MEILI_NO_ANALYTICS=TRUE
MEILI_HOST=http://meilisearch:7700
MEILI_MASTER_KEY=
# --- LLM CONFIGURATION ---
# Providers: 'local', 'openai', 'anthropic', 'deepseek', 'google', 'kimi'
DEFAULT_LLM_PROVIDER=local
# Local LLM (Ollama)
OLLAMA_BASE_URL=http://ollama:11434
OLLAMA_MODEL=llama3.3:latest
# --- API KEYS (OPTIONAL) ---
# Fill only the ones you intend to use.
ANTHROPIC_API_KEY=
DEEPSEEK_API_KEY=
GOOGLE_API_KEY=
KIMI_API_KEY=
OPENAI_API_KEY=
# --- RAG ENGINE ---
# Enable/Disable the Retrieval Augmented Generation system
RAG_ENABLED=TRUE
RAG_API_URL=http://rag_api:8000
RAG_API_PORT=8000
EMBEDDINGS_PROVIDER=ollama
EMBEDDINGS_MODEL=nomic-embed-text

View File

@@ -2,11 +2,10 @@ name: CI/CD Awesome Pipeline
on: on:
push: push:
branches: [main]
tags: tags:
- 'v*.*.*' - 'v*.*.*'
pull_request:
branches: [main] workflow_dispatch:
env: env:
REGISTRY_URL: ${{ vars.REGISTRY_URL || 'gitea.iswearihadsomethingforthis.net' }} REGISTRY_URL: ${{ vars.REGISTRY_URL || 'gitea.iswearihadsomethingforthis.net' }}
@@ -30,34 +29,61 @@ jobs:
name: Build & Push to Registry name: Build & Push to Registry
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: test needs: test
if: startsWith(github.ref, 'refs/tags/')
steps:
- name: Debug ref
run: |
echo "github.ref = ${{ github.ref }}"
echo "GITHUB_REF = $GITHUB_REF"
echo "This should only run on tags!"
steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Load config from Makefile - name: Load config from Makefile
id: config id: config
run: | run: make -s _ci-dump-config >> $GITHUB_OUTPUT
eval "$(make _ci-image-name)"
echo "image_name=${IMAGE_NAME}" >> $GITHUB_OUTPUT
- name: Extract version from tag - name: 🏷️ Docker Metadata (Tags & Labels)
id: version id: meta
run: echo "version=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT uses: docker/metadata-action@v5
with:
images: gitea.iswearihadsomethingforthis.net/francwa/${{ steps.config.outputs.image_name }}
tags: |
# Tagged (v1.2.3)
type=semver,pattern={{ version }}
# Latest (main)
type=raw,value=latest,enable={{ is_default_branch }}
# Feature branches
type=ref,event=branch
- name: Build production image - name: Login to Gitea Registry
run: make build uses: docker/login-action@v3
with:
registry: gitea.iswearihadsomethingforthis.net
username: ${{ gitea.actor }}
password: ${{ secrets.G1T34_TOKEN }}
- name: Tag and push to registry - name: Build and push
run: | id: docker_build
docker tag ${{ steps.config.outputs.image_name }}:latest ${{ env.REGISTRY_URL }}/${{ env.REGISTRY_USER }}/${{ steps.config.outputs.image_name }}:${{ steps.version.outputs.version }} uses: docker/build-push-action@v5
docker tag ${{ steps.config.outputs.image_name }}:latest ${{ env.REGISTRY_URL }}/${{ env.REGISTRY_USER }}/${{ steps.config.outputs.image_name }}:latest with:
echo "${{ secrets.G1T34_TOKEN }}" | docker login ${{ env.REGISTRY_URL }} -u ${{ env.REGISTRY_USER }} --password-stdin context: .
docker push ${{ env.REGISTRY_URL }}/${{ env.REGISTRY_USER }}/${{ steps.config.outputs.image_name }}:${{ steps.version.outputs.version }} push: true
docker push ${{ env.REGISTRY_URL }}/${{ env.REGISTRY_USER }}/${{ steps.config.outputs.image_name }}:latest tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
PYTHON_VERSION=${{ steps.config.outputs.python_version }}
PYTHON_VERSION_SHORT=${{ steps.config.outputs.python_version_short }}
RUNNER=${{ steps.config.outputs.runner }}
- name: 🛡️ Run Trivy Vulnerability Scanner
uses: docker://aquasec/trivy:latest
env:
TRIVY_USERNAME: ${{ gitea.actor }}
TRIVY_PASSWORD: ${{ secrets.G1T34_TOKEN }}
# Unset the fake GITHUB_TOKEN injected by Gitea
GITHUB_TOKEN: ""
with:
args: image --format table --output trivy-report.txt --exit-code 0 --ignore-unfixed --severity CRITICAL,HIGH gitea.iswearihadsomethingforthis.net/francwa/${{ steps.config.outputs.image_name }}:latest
- name: 📤 Upload Security Report
uses: actions/upload-artifact@v3
with:
name: security-report
path: trivy-report.txt
retention-days: 7

View File

@@ -0,0 +1,22 @@
name: Renovate Bot
on:
schedule:
# Every Monday 4AM
- cron: '0 4 * * 1'
workflow_dispatch:
jobs:
renovate:
runs-on: ubuntu-latest
steps:
- name: Run Renovate
uses: docker://renovate/renovate:latest
env:
RENOVATE_PLATFORM: "gitea"
RENOVATE_ENDPOINT: "https://gitea.iswearihadsomethingforthis.net/api/v1"
RENOVATE_TOKEN: "${{ secrets.RENOVATE_TOKEN }}"
RENOVATE_REPOSITORIES: '["${{ gitea.repository }}"]'
RENOVATE_GIT_AUTHOR: "Renovate Bot <renovate@bot.local>"
# Might need a free github token if lots of depencies
# RENOVATE_GITHUB_TOKEN: "${{ secrets.GITHUB_COM_TOKEN }}"

6
.gitignore vendored
View File

@@ -59,3 +59,9 @@ Thumbs.db
# Backup files # Backup files
*.backup *.backup
# Application data dir
data/*
# Application logs
logs/*

View File

@@ -1,5 +1,6 @@
# Dockerfile for Agent Media # syntax=docker/dockerfile:1
# Multi-stage build for smaller image size # check=skip=InvalidDefaultArgInFrom
ARG PYTHON_VERSION ARG PYTHON_VERSION
ARG PYTHON_VERSION_SHORT ARG PYTHON_VERSION_SHORT
ARG RUNNER ARG RUNNER
@@ -29,7 +30,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
WORKDIR /tmp WORKDIR /tmp
# Copy dependency files # Copy dependency files
COPY brain/pyproject.toml brain/poetry.lock* brain/uv.lock* Makefile ./ COPY pyproject.toml poetry.lock* uv.lock* Makefile ./
# Install dependencies as root (to avoid permission issues with system packages) # Install dependencies as root (to avoid permission issues with system packages)
RUN --mount=type=cache,target=/root/.cache/pip \ RUN --mount=type=cache,target=/root/.cache/pip \
@@ -42,6 +43,9 @@ RUN --mount=type=cache,target=/root/.cache/pip \
uv pip install --system -r pyproject.toml; \ uv pip install --system -r pyproject.toml; \
fi fi
COPY scripts/ ./scripts/
COPY .env.example ./
# =========================================== # ===========================================
# Stage 2: Testing # Stage 2: Testing
# =========================================== # ===========================================
@@ -58,12 +62,9 @@ RUN --mount=type=cache,target=/root/.cache/pip \
uv pip install --system -e .[dev]; \ uv pip install --system -e .[dev]; \
fi fi
COPY brain/agent/ ./agent/ COPY alfred/ ./alfred
COPY brain/application/ ./application/ COPY scripts ./scripts
COPY brain/domain/ ./domain/ COPY tests/ ./tests
COPY brain/infrastructure/ ./infrastructure/
COPY brain/tests/ ./tests/
COPY brain/app.py .
# =========================================== # ===========================================
# Stage 3: Runtime # Stage 3: Runtime
@@ -72,10 +73,11 @@ FROM python:${PYTHON_VERSION}-slim-bookworm AS runtime
ARG PYTHON_VERSION_SHORT ARG PYTHON_VERSION_SHORT
# TODO: A-t-on encore besoin de toutes les clés ?
ENV LLM_PROVIDER=deepseek \ ENV LLM_PROVIDER=deepseek \
MEMORY_STORAGE_DIR=/data/memory \ MEMORY_STORAGE_DIR=/data/memory \
PYTHONDONTWRITEBYTECODE=1 \ PYTHONDONTWRITEBYTECODE=1 \
PYTHONPATH=/home/appuser/app \ PYTHONPATH=/home/appuser \
PYTHONUNBUFFERED=1 PYTHONUNBUFFERED=1
# Install runtime dependencies (needs root) # Install runtime dependencies (needs root)
@@ -88,28 +90,27 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
RUN useradd -m -u 1000 -s /bin/bash appuser RUN useradd -m -u 1000 -s /bin/bash appuser
# Create data directories (needs root for /data) # Create data directories (needs root for /data)
RUN mkdir -p /data/memory /data/logs \ RUN mkdir -p /data /logs \
&& chown -R appuser:appuser /data && chown -R appuser:appuser /data /logs
# Switch to non-root user # Switch to non-root user
USER appuser USER appuser
# Set working directory (owned by appuser) # Set working directory (owned by appuser)
WORKDIR /home/appuser/app WORKDIR /home/appuser
# Copy Python packages from builder stage # Copy Python packages from builder stage
COPY --from=builder /usr/local/lib/python${PYTHON_VERSION_SHORT}/site-packages /usr/local/lib/python${PYTHON_VERSION_SHORT}/site-packages COPY --from=builder /usr/local/lib/python${PYTHON_VERSION_SHORT}/site-packages /usr/local/lib/python${PYTHON_VERSION_SHORT}/site-packages
COPY --from=builder /usr/local/bin /usr/local/bin COPY --from=builder /usr/local/bin /usr/local/bin
# Copy application code (already owned by appuser) # Copy application code (already owned by appuser)
COPY --chown=appuser:appuser brain/agent/ ./agent/ COPY --chown=appuser:appuser alfred/ ./alfred
COPY --chown=appuser:appuser brain/application/ ./application/ COPY --chown=appuser:appuser scripts/ ./scripts
COPY --chown=appuser:appuser brain/domain/ ./domain/ COPY --chown=appuser:appuser .env.example ./
COPY --chown=appuser:appuser brain/infrastructure/ ./infrastructure/ COPY --chown=appuser:appuser pyproject.toml ./
COPY --chown=appuser:appuser brain/app.py .
# Create volumes for persistent data # Create volumes for persistent data
VOLUME ["/data/memory", "/data/logs"] VOLUME ["/data", "/logs"]
# Expose port # Expose port
EXPOSE 8000 EXPOSE 8000
@@ -118,5 +119,4 @@ EXPOSE 8000
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD python -c "import requests; requests.get('http://localhost:8000/health', timeout=5).raise_for_status()" || exit 1 CMD python -c "import requests; requests.get('http://localhost:8000/health', timeout=5).raise_for_status()" || exit 1
# Run the application CMD ["python", "-m", "uvicorn", "alfred.app:app", "--host", "0.0.0.0", "--port", "8000"]
CMD ["python", "-m", "uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8000"]

387
Makefile
View File

@@ -1,257 +1,182 @@
.POSIX:
.SUFFIXES:
.DEFAULT_GOAL := help .DEFAULT_GOAL := help
# --- SETTINGS --- # --- Load Config from pyproject.toml ---
PYTHON_VERSION = 3.12.7 -include .env.make
PYTHON_VERSION_SHORT = $(shell echo $(PYTHON_VERSION) | cut -d. -f1,2)
# Change to 'uv' when ready.
RUNNER ?= poetry
export PYTHON_VERSION # --- Profiles management ---
export PYTHON_VERSION_SHORT # Usage: make up p=rag,meili
export RUNNER p ?= core
export IMAGE_NAME PROFILES_PARAM := COMPOSE_PROFILES=$(p)
# --- VARIABLES --- # --- Commands ---
CORE_DIR = brain DOCKER_COMPOSE := docker compose
SERVICE_NAME = agent_media DOCKER_BUILD := docker build --no-cache \
IMAGE_NAME = agent_media --build-arg PYTHON_VERSION=$(PYTHON_VERSION) \
--build-arg PYTHON_VERSION_SHORT=$(PYTHON_VERSION_SHORT) \
--build-arg RUNNER=$(RUNNER)
# --- ADAPTERS --- # --- Phony ---
# UV uses "sync", Poetry uses "install". Both install DEV deps by default. .PHONY: .env up down restart logs ps shell build build-test install update \
INSTALL_CMD = $(if $(filter uv,$(RUNNER)),sync,install) install-hooks test coverage lint format clean major minor patch help
# --- MACROS --- # --- Setup ---
ARGS = $(filter-out $@,$(MAKECMDGOALS)) .env .env.make:
BUMP_CMD = cd $(CORE_DIR) && $(RUNNER) run bump-my-version bump @echo "Initializing environment..."
COMPOSE_CMD = docker-compose @python scripts/bootstrap.py \
DOCKER_CMD = docker build \ && echo "✓ Environment ready" \
--build-arg PYTHON_VERSION=$(PYTHON_VERSION) \ || (echo "✗ Environment setup failed" && exit 1)
--build-arg PYTHON_VERSION_SHORT=$(PYTHON_VERSION_SHORT) \
--build-arg RUNNER=$(RUNNER) \
-f $(CORE_DIR)/Dockerfile \
-t $(IMAGE_NAME):latest .
RUNNER_ADD = cd $(CORE_DIR) && $(RUNNER) add bootstrap: .env .env.make
RUNNER_HOOKS = cd $(CORE_DIR) && $(RUNNER) run pre-commit install -c ../.pre-commit-config.yaml
RUNNER_INSTALL = cd $(CORE_DIR) && $(RUNNER) $(INSTALL_CMD)
RUNNER_RUN = cd $(CORE_DIR) && $(RUNNER) run
RUNNER_UPDATE = cd $(CORE_DIR) && $(RUNNER) update
# --- STYLES --- # --- Docker ---
B = \033[1m up: .env
G = \033[32m @echo "Starting containers with profiles: [$(p)]..."
T = \033[36m @$(PROFILES_PARAM) $(DOCKER_COMPOSE) up -d --remove-orphans \
R = \033[0m && echo "✓ Containers started" \
|| (echo "✗ Failed to start containers" && exit 1)
# --- TARGETS --- down:
.PHONY: add build build-test check-docker check-runner clean coverage down format help init-dotenv install install-hooks lint logs major minor patch prune ps restart run shell test up update _check_branch _ci-image-name _ci-run-tests _push_tag @echo "Stopping containers..."
@$(DOCKER_COMPOSE) down \
&& echo "✓ Containers stopped" \
|| (echo "✗ Failed to stop containers" && exit 1)
# Catch-all for args restart:
%: @echo "Restarting containers..."
@: @$(PROFILES_PARAM) $(DOCKER_COMPOSE) restart \
&& echo "✓ Containers restarted" \
|| (echo "✗ Failed to restart containers" && exit 1)
add: check-runner logs:
@echo "$(T) Adding dependency ($(RUNNER)): $(ARGS)$(R)" @echo "Following logs (Ctrl+C to exit)..."
$(RUNNER_ADD) $(ARGS) @$(PROFILES_PARAM) $(DOCKER_COMPOSE) logs -f
build: check-docker ps:
@echo "$(T)🐳 Building Docker image...$(R)" @echo "Container status:"
$(DOCKER_CMD) @$(PROFILES_PARAM) $(DOCKER_COMPOSE) ps
@echo "✅ Image $(IMAGE_NAME):latest ready."
build-test: check-docker shell:
@echo "$(T)🐳 Building test image (with dev deps)...$(R)" @echo "Opening shell in $(SERVICE_NAME)..."
docker build \ @$(DOCKER_COMPOSE) exec $(SERVICE_NAME) /bin/bash
--build-arg RUNNER=$(RUNNER) \
--build-arg PYTHON_VERSION=$(PYTHON_VERSION) \
--build-arg PYTHON_VERSION_SHORT=$(PYTHON_VERSION_SHORT) \
-f $(CORE_DIR)/Dockerfile \
--target test \
-t $(IMAGE_NAME):test .
@echo "✅ Test image $(IMAGE_NAME):test ready."
check-docker: # --- Build ---
@command -v docker >/dev/null 2>&1 || { echo "$(R)❌ Docker not installed$(R)"; exit 1; } build: .env.make
@docker info >/dev/null 2>&1 || { echo "$(R)❌ Docker daemon not running$(R)"; exit 1; } @echo "Building image $(IMAGE_NAME):latest ..."
@$(DOCKER_BUILD) -t $(IMAGE_NAME):latest . \
&& echo "✓ Build complete" \
|| (echo "✗ Build failed" && exit 1)
check-runner: build-test: .env.make
@command -v $(RUNNER) >/dev/null 2>&1 || { echo "$(R)$(RUNNER) not installed$(R)"; exit 1; } @echo "Building test image $(IMAGE_NAME):test..."
@$(DOCKER_BUILD) --target test -t $(IMAGE_NAME):test . \
&& echo "✓ Test image built" \
|| (echo "✗ Build failed" && exit 1)
# --- Dependencies ---
install:
@echo "Installing dependencies with $(RUNNER)..."
@$(RUNNER) install \
&& echo "✓ Dependencies installed" \
|| (echo "✗ Installation failed" && exit 1)
install-hooks:
@echo "Installing pre-commit hooks..."
@$(RUNNER) run pre-commit install \
&& echo "✓ Hooks installed" \
|| (echo "✗ Hook installation failed" && exit 1)
update:
@echo "Updating dependencies with $(RUNNER)..."
@$(RUNNER) update \
&& echo "✓ Dependencies updated" \
|| (echo "✗ Update failed" && exit 1)
# --- Quality ---
test:
@echo "Running tests..."
@$(RUNNER) run pytest \
&& echo "✓ Tests passed" \
|| (echo "✗ Tests failed" && exit 1)
coverage:
@echo "Running tests with coverage..."
@$(RUNNER) run pytest --cov=. --cov-report=html --cov-report=term \
&& echo "✓ Coverage report generated" \
|| (echo "✗ Coverage failed" && exit 1)
lint:
@echo "Linting code..."
@$(RUNNER) run ruff check --fix . \
&& echo "✓ Linting complete" \
|| (echo "✗ Linting failed" && exit 1)
format:
@echo "Formatting code..."
@$(RUNNER) run ruff format . && $(RUNNER) run ruff check --fix . \
&& echo "✓ Code formatted" \
|| (echo "✗ Formatting failed" && exit 1)
clean: clean:
@echo "$(T)🧹 Cleaning caches...$(R)" @echo "Cleaning build artifacts..."
cd $(CORE_DIR) && rm -rf .ruff_cache __pycache__ .pytest_cache @rm -rf .ruff_cache __pycache__ .pytest_cache htmlcov .coverage
find $(CORE_DIR) -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true @find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true
find $(CORE_DIR) -type d -name ".pytest_cache" -exec rm -rf {} + 2>/dev/null || true @echo "✓ Cleanup complete"
find $(CORE_DIR) -type f -name "*.pyc" -delete 2>/dev/null || true
@echo "✅ Caches cleaned."
coverage: check-runner # --- Versioning ---
@echo "$(T)📊 Running tests with coverage...$(R)" major minor patch: _check-main
$(RUNNER_RUN) pytest --cov=. --cov-report=html --cov-report=term $(ARGS) @echo "Bumping $@ version..."
@echo "✅ Report generated in htmlcov/" @$(RUNNER) run bump-my-version bump $@ \
&& echo "✓ Version bumped" \
|| (echo "✗ Version bump failed" && exit 1)
down: check-docker @echo "Pushing tags..."
@echo "$(T)🛑 Stopping containers...$(R)" @git push --tags \
$(COMPOSE_CMD) down && echo "✓ Tags pushed" \
@echo "✅ System stopped." || (echo "✗ Push failed" && exit 1)
format: check-runner # CI/CD helpers
@echo "$(T)✨ Formatting with Ruff...$(R)" _ci-dump-config:
$(RUNNER_RUN) ruff format . @echo "image_name=$(IMAGE_NAME)"
$(RUNNER_RUN) ruff check --fix . @echo "python_version=$(PYTHON_VERSION)"
@echo "✅ Code cleaned." @echo "python_version_short=$(PYTHON_VERSION_SHORT)"
@echo "runner=$(RUNNER)"
@echo "service_name=$(SERVICE_NAME)"
help: _ci-run-tests:build-test
@echo "$(B)Available commands:$(R)" @echo "Running tests in Docker..."
@echo ""
@echo "$(G)Setup:$(R)"
@echo " $(T)check-docker $(R) Verify Docker is installed and running."
@echo " $(T)check-runner $(R) Verify package manager ($(RUNNER))."
@echo " $(T)init-dotenv $(R) Create .env from .env.example with generated secrets."
@echo " $(T)install $(R) Install ALL dependencies (Prod + Dev)."
@echo " $(T)install-hooks $(R) Install git pre-commit hooks."
@echo ""
@echo "$(G)Docker:$(R)"
@echo " $(T)build $(R) Build the docker image (production)."
@echo " $(T)build-test $(R) Build the docker image (with dev deps for testing)."
@echo " $(T)down $(R) Stop and remove containers."
@echo " $(T)logs $(R) Follow logs."
@echo " $(T)prune $(R) Clean Docker system."
@echo " $(T)ps $(R) Show container status."
@echo " $(T)restart $(R) Restart all containers."
@echo " $(T)shell $(R) Open shell in container."
@echo " $(T)up $(R) Start the agent."
@echo ""
@echo "$(G)Development:$(R)"
@echo " $(T)add ... $(R) Add dependency (use --group dev or --dev if needed)."
@echo " $(T)clean $(R) Clean caches."
@echo " $(T)coverage $(R) Run tests with coverage."
@echo " $(T)format $(R) Format code (Ruff)."
@echo " $(T)lint $(R) Lint code without fixing."
@echo " $(T)test ... $(R) Run tests (local with $(RUNNER))."
@echo " $(T)update $(R) Update dependencies."
@echo ""
@echo "$(G)Versioning:$(R)"
@echo " $(T)major/minor/patch $(R) Bump version and push tag (triggers CI/CD)."
init-dotenv:
@echo "$(T)🔑 Initializing .env file...$(R)"
@if [ -f .env ]; then \
echo "$(R)⚠️ .env already exists. Skipping.$(R)"; \
exit 0; \
fi
@if [ ! -f .env.example ]; then \
echo "$(R)❌ .env.example not found$(R)"; \
exit 1; \
fi
@if ! command -v openssl >/dev/null 2>&1; then \
echo "$(R)❌ openssl not found. Please install it first.$(R)"; \
exit 1; \
fi
@echo "$(T) → Copying .env.example...$(R)"
@cp .env.example .env
@echo "$(T) → Generating secrets...$(R)"
@sed -i.bak "s|JWT_SECRET=.*|JWT_SECRET=$$(openssl rand -base64 32)|" .env
@sed -i.bak "s|JWT_REFRESH_SECRET=.*|JWT_REFRESH_SECRET=$$(openssl rand -base64 32)|" .env
@sed -i.bak "s|CREDS_KEY=.*|CREDS_KEY=$$(openssl rand -hex 16)|" .env
@sed -i.bak "s|CREDS_IV=.*|CREDS_IV=$$(openssl rand -hex 8)|" .env
@sed -i.bak "s|MEILI_MASTER_KEY=.*|MEILI_MASTER_KEY=$$(openssl rand -base64 32)|" .env
@sed -i.bak "s|AGENT_BRAIN_API_KEY=.*|AGENT_BRAIN_API_KEY=$$(openssl rand -base64 24)|" .env
@rm -f .env.bak
@echo "$(G)✅ .env created with generated secrets!$(R)"
@echo "$(T)⚠️ Don't forget to add your API keys:$(R)"
@echo " - OPENAI_API_KEY"
@echo " - DEEPSEEK_API_KEY"
@echo " - TMDB_API_KEY (optional)"
install: check-runner
@echo "$(T)📦 Installing FULL environment ($(RUNNER))...$(R)"
$(RUNNER_INSTALL)
@echo "✅ Environment ready (Prod + Dev)."
install-hooks: check-runner
@echo "$(T)🔧 Installing hooks...$(R)"
$(RUNNER_HOOKS)
@echo "✅ Hooks ready."
lint: check-runner
@echo "$(T)🔍 Linting code...$(R)"
$(RUNNER_RUN) ruff check .
logs: check-docker
@echo "$(T)📋 Following logs...$(R)"
$(COMPOSE_CMD) logs -f
major: _check_branch
@echo "$(T)💥 Bumping major...$(R)"
SKIP=all $(BUMP_CMD) major
@$(MAKE) -s _push_tag
minor: _check_branch
@echo "$(T)✨ Bumping minor...$(R)"
SKIP=all $(BUMP_CMD) minor
@$(MAKE) -s _push_tag
patch: _check_branch
@echo "$(T)🚀 Bumping patch...$(R)"
SKIP=all $(BUMP_CMD) patch
@$(MAKE) -s _push_tag
prune: check-docker
@echo "$(T)🗑️ Pruning Docker resources...$(R)"
docker system prune -af
@echo "✅ Docker cleaned."
ps: check-docker
@echo "$(T)📋 Container status:$(R)"
@$(COMPOSE_CMD) ps
restart: check-docker
@echo "$(T)🔄 Restarting containers...$(R)"
$(COMPOSE_CMD) restart
@echo "✅ Containers restarted."
run: check-runner
$(RUNNER_RUN) $(ARGS)
shell: check-docker
@echo "$(T)🐚 Opening shell in $(SERVICE_NAME)...$(R)"
$(COMPOSE_CMD) exec $(SERVICE_NAME) /bin/sh
test: check-runner
@echo "$(T)🧪 Running tests...$(R)"
$(RUNNER_RUN) pytest $(ARGS)
up: check-docker
@echo "$(T)🚀 Starting Agent Media...$(R)"
$(COMPOSE_CMD) up -d
@echo "✅ System is up."
update: check-runner
@echo "$(T)🔄 Updating dependencies...$(R)"
$(RUNNER_UPDATE)
@echo "✅ All packages up to date."
_check_branch:
@curr=$$(git rev-parse --abbrev-ref HEAD); \
if [ "$$curr" != "main" ]; then \
echo "❌ Error: not on the main branch"; exit 1; \
fi
_ci-image-name:
@echo "IMAGE_NAME=$(IMAGE_NAME)"
_ci-run-tests: build-test
@echo "$(T)🧪 Running tests in Docker...$(R)"
docker run --rm \ docker run --rm \
-e DEEPSEEK_API_KEY \ -e DEEPSEEK_API_KEY \
-e TMDB_API_KEY \ -e TMDB_API_KEY \
-e QBITTORRENT_URL \
$(IMAGE_NAME):test pytest $(IMAGE_NAME):test pytest
@echo " Tests passed." @echo " Tests passed."
_push_tag: _check-main:
@echo "$(T)📦 Pushing tag...$(R)" @test "$$(git rev-parse --abbrev-ref HEAD)" = "main" \
git push --tags || (echo "✗ ERROR: Not on main branch" && exit 1)
@echo "✅ Tag pushed. Check CI for build status."
# --- Help ---
help:
@echo "Cleverly Crafted Unawareness - Management Commands"
@echo ""
@echo "Usage: make [target] [p=profile1,profile2]"
@echo ""
@echo "Docker:"
@echo " up Start containers (default profile: core)"
@echo " Example: make up p=rag,meili"
@echo " down Stop all containers"
@echo " restart Restart containers (supports p=...)"
@echo " logs Follow logs (supports p=...)"
@echo " ps Status of containers"
@echo " shell Open bash in the core container"
@echo " build Build the production Docker image"
@echo ""
@echo "Dev & Quality:"
@echo " setup Bootstrap .env and security keys"
@echo " install Install dependencies via $(RUNNER)"
@echo " test Run pytest suite"
@echo " coverage Run tests and generate HTML report"
@echo " lint/format Quality and style checks"
@echo ""
@echo "Release:"
@echo " major|minor|patch Bump version and push tags (main branch only)"

0
alfred/__init__.py Normal file
View File

View File

@@ -1,6 +1,7 @@
"""Agent module for media library management.""" """Agent module for media library management."""
from alfred.settings import settings
from .agent import Agent from .agent import Agent
from .config import settings
__all__ = ["Agent", "settings"] __all__ = ["Agent", "settings"]

View File

@@ -5,9 +5,9 @@ import logging
from collections.abc import AsyncGenerator from collections.abc import AsyncGenerator
from typing import Any from typing import Any
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
from alfred.settings import settings
from .config import settings
from .prompts import PromptBuilder from .prompts import PromptBuilder
from .registry import Tool, make_tools from .registry import Tool, make_tools
@@ -21,17 +21,20 @@ class Agent:
Uses OpenAI-compatible tool calling API. Uses OpenAI-compatible tool calling API.
""" """
def __init__(self, llm, max_tool_iterations: int = 5): def __init__(self, settings, llm, max_tool_iterations: int = 5):
""" """
Initialize the agent. Initialize the agent.
Args: Args:
settings: Application settings instance
llm: LLM client with complete() method llm: LLM client with complete() method
max_tool_iterations: Maximum number of tool execution iterations max_tool_iterations: Maximum number of tool execution iterations
""" """
self.settings = settings
self.llm = llm self.llm = llm
self.tools: dict[str, Tool] = make_tools() self.tools: dict[str, Tool] = make_tools(settings)
self.prompt_builder = PromptBuilder(self.tools) self.prompt_builder = PromptBuilder(self.tools)
self.settings = settings
self.max_tool_iterations = max_tool_iterations self.max_tool_iterations = max_tool_iterations
def step(self, user_input: str) -> str: def step(self, user_input: str) -> str:
@@ -78,7 +81,7 @@ class Agent:
tools_spec = self.prompt_builder.build_tools_spec() tools_spec = self.prompt_builder.build_tools_spec()
# Tool execution loop # Tool execution loop
for _iteration in range(self.max_tool_iterations): for _iteration in range(self.settings.max_tool_iterations):
# Call LLM with tools # Call LLM with tools
llm_result = self.llm.complete(messages, tools=tools_spec) llm_result = self.llm.complete(messages, tools=tools_spec)
@@ -230,7 +233,7 @@ class Agent:
tools_spec = self.prompt_builder.build_tools_spec() tools_spec = self.prompt_builder.build_tools_spec()
# Tool execution loop # Tool execution loop
for _iteration in range(self.max_tool_iterations): for _iteration in range(self.settings.max_tool_iterations):
# Call LLM with tools # Call LLM with tools
llm_result = self.llm.complete(messages, tools=tools_spec) llm_result = self.llm.complete(messages, tools=tools_spec)

View File

@@ -6,7 +6,8 @@ from typing import Any
import requests import requests
from requests.exceptions import HTTPError, RequestException, Timeout from requests.exceptions import HTTPError, RequestException, Timeout
from ..config import settings from alfred.settings import Settings, settings
from .exceptions import LLMAPIError, LLMConfigurationError from .exceptions import LLMAPIError, LLMConfigurationError
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -21,6 +22,7 @@ class DeepSeekClient:
base_url: str | None = None, base_url: str | None = None,
model: str | None = None, model: str | None = None,
timeout: int | None = None, timeout: int | None = None,
settings: Settings | None = None,
): ):
""" """
Initialize DeepSeek client. Initialize DeepSeek client.
@@ -34,10 +36,10 @@ class DeepSeekClient:
Raises: Raises:
LLMConfigurationError: If API key is missing LLMConfigurationError: If API key is missing
""" """
self.api_key = api_key or settings.deepseek_api_key self.api_key = api_key or self.settings.deepseek_api_key
self.base_url = base_url or settings.deepseek_base_url self.base_url = base_url or self.settings.deepseek_base_url
self.model = model or settings.model self.model = model or self.settings.deepseek_model
self.timeout = timeout or settings.request_timeout self.timeout = timeout or self.settings.request_timeout
if not self.api_key: if not self.api_key:
raise LLMConfigurationError( raise LLMConfigurationError(
@@ -94,7 +96,7 @@ class DeepSeekClient:
payload = { payload = {
"model": self.model, "model": self.model,
"messages": messages, "messages": messages,
"temperature": settings.temperature, "temperature": settings.llm_temperature,
} }
# Add tools if provided # Add tools if provided

View File

@@ -1,13 +1,13 @@
"""Ollama LLM client with robust error handling.""" """Ollama LLM client with robust error handling."""
import logging import logging
import os
from typing import Any from typing import Any
import requests import requests
from requests.exceptions import HTTPError, RequestException, Timeout from requests.exceptions import HTTPError, RequestException, Timeout
from ..config import settings from alfred.settings import Settings
from .exceptions import LLMAPIError, LLMConfigurationError from .exceptions import LLMAPIError, LLMConfigurationError
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -32,6 +32,7 @@ class OllamaClient:
model: str | None = None, model: str | None = None,
timeout: int | None = None, timeout: int | None = None,
temperature: float | None = None, temperature: float | None = None,
settings: Settings | None = None,
): ):
""" """
Initialize Ollama client. Initialize Ollama client.
@@ -45,13 +46,11 @@ class OllamaClient:
Raises: Raises:
LLMConfigurationError: If configuration is invalid LLMConfigurationError: If configuration is invalid
""" """
self.base_url = base_url or os.getenv( self.base_url = base_url or settings.ollama_base_url
"OLLAMA_BASE_URL", "http://localhost:11434" self.model = model or settings.ollama_model
)
self.model = model or os.getenv("OLLAMA_MODEL", "llama3.2")
self.timeout = timeout or settings.request_timeout self.timeout = timeout or settings.request_timeout
self.temperature = ( self.temperature = (
temperature if temperature is not None else settings.temperature temperature if temperature is not None else settings.llm_temperature
) )
if not self.base_url: if not self.base_url:

View File

@@ -3,7 +3,7 @@
import json import json
from typing import Any from typing import Any
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
from .registry import Tool from .registry import Tool
@@ -52,7 +52,7 @@ class PromptBuilder:
# Show first 5 results # Show first 5 results
for i, result in enumerate(result_list[:5]): for i, result in enumerate(result_list[:5]):
name = result.get("name", "Unknown") name = result.get("name", "Unknown")
lines.append(f" {i+1}. {name}") lines.append(f" {i + 1}. {name}")
if len(result_list) > 5: if len(result_list) > 5:
lines.append(f" ... and {len(result_list) - 5} more") lines.append(f" ... and {len(result_list) - 5} more")

View File

@@ -78,10 +78,13 @@ def _create_tool_from_function(func: Callable) -> Tool:
) )
def make_tools() -> dict[str, Tool]: def make_tools(settings) -> dict[str, Tool]:
""" """
Create and register all available tools. Create and register all available tools.
Args:
settings: Application settings instance
Returns: Returns:
Dictionary mapping tool names to Tool objects Dictionary mapping tool names to Tool objects
""" """

View File

@@ -3,12 +3,12 @@
import logging import logging
from typing import Any from typing import Any
from application.movies import SearchMovieUseCase from alfred.application.movies import SearchMovieUseCase
from application.torrents import AddTorrentUseCase, SearchTorrentsUseCase from alfred.application.torrents import AddTorrentUseCase, SearchTorrentsUseCase
from infrastructure.api.knaben import knaben_client from alfred.infrastructure.api.knaben import knaben_client
from infrastructure.api.qbittorrent import qbittorrent_client from alfred.infrastructure.api.qbittorrent import qbittorrent_client
from infrastructure.api.tmdb import tmdb_client from alfred.infrastructure.api.tmdb import tmdb_client
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -2,8 +2,8 @@
from typing import Any from typing import Any
from application.filesystem import ListFolderUseCase, SetFolderPathUseCase from alfred.application.filesystem import ListFolderUseCase, SetFolderPathUseCase
from infrastructure.filesystem import FileManager from alfred.infrastructure.filesystem import FileManager
def set_path_for_folder(folder_name: str, path_value: str) -> dict[str, Any]: def set_path_for_folder(folder_name: str, path_value: str) -> dict[str, Any]:

View File

@@ -3,7 +3,7 @@
import logging import logging
from typing import Any from typing import Any
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -2,22 +2,21 @@
import json import json
import logging import logging
import os
import time import time
import uuid import uuid
from pathlib import Path
from typing import Any from typing import Any
from fastapi import FastAPI, HTTPException from fastapi import FastAPI, HTTPException
from fastapi.responses import JSONResponse, StreamingResponse from fastapi.responses import JSONResponse, StreamingResponse
from fastapi.staticfiles import StaticFiles
from pydantic import BaseModel, Field, validator from pydantic import BaseModel, Field, validator
from agent.agent import Agent from alfred.agent.agent import Agent
from agent.config import settings from alfred.agent.llm.deepseek import DeepSeekClient
from agent.llm.deepseek import DeepSeekClient from alfred.agent.llm.exceptions import LLMAPIError, LLMConfigurationError
from agent.llm.exceptions import LLMAPIError, LLMConfigurationError from alfred.agent.llm.ollama import OllamaClient
from agent.llm.ollama import OllamaClient from alfred.infrastructure.persistence import get_memory, init_memory
from infrastructure.persistence import get_memory, init_memory from alfred.settings import settings
logging.basicConfig( logging.basicConfig(
level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
@@ -30,38 +29,33 @@ app = FastAPI(
version="0.2.0", version="0.2.0",
) )
# TODO: Make a variable memory_path = Path(settings.data_storage) / "memory"
manifests = "manifests" init_memory(storage_dir=str(memory_path))
# Sécurité : on vérifie que le dossier existe pour ne pas faire planter l'app au démarrage logger.info(f"Memory context initialized (path: {memory_path})")
if os.path.exists(manifests):
app.mount("/manifests", StaticFiles(directory=manifests), name="manifests")
else:
print(
f"⚠️ ATTENTION : Le dossier '{manifests}' est introuvable. Le plugin ne marchera pas."
)
# Initialize memory context at startup
# Use /data/memory in Docker, fallback to memory_data for local dev
storage_dir = os.getenv("MEMORY_STORAGE_DIR", "memory_data")
init_memory(storage_dir=storage_dir)
logger.info(f"Memory context initialized (storage: {storage_dir})")
# Initialize LLM based on environment variable # Initialize LLM based on environment variable
llm_provider = os.getenv("LLM_PROVIDER", "deepseek").lower() llm_provider = settings.default_llm_provider.lower()
try: try:
if llm_provider == "ollama": if llm_provider == "local":
logger.info("Using Ollama LLM") logger.info("Using local Ollama LLM")
llm = OllamaClient() llm = OllamaClient(settings=settings)
else: elif llm_provider == "deepseek":
logger.info("Using DeepSeek LLM") logger.info("Using DeepSeek LLM")
llm = DeepSeekClient() llm = DeepSeekClient()
elif llm_provider == "claude":
raise ValueError(f"LLM provider not fully implemented: {llm_provider}")
else:
raise ValueError(f"Unknown LLM provider: {llm_provider}")
except LLMConfigurationError as e: except LLMConfigurationError as e:
logger.error(f"Failed to initialize LLM: {e}") logger.error(f"Failed to initialize LLM: {e}")
raise raise
# Initialize agent # Initialize agent
agent = Agent(llm=llm, max_tool_iterations=settings.max_tool_iterations) agent = Agent(
settings=settings, llm=llm, max_tool_iterations=settings.max_tool_iterations
)
logger.info("Agent Media API initialized") logger.info("Agent Media API initialized")
@@ -116,7 +110,7 @@ def extract_last_user_content(messages: list[dict[str, Any]]) -> str:
@app.get("/health") @app.get("/health")
async def health_check(): async def health_check():
"""Health check endpoint.""" """Health check endpoint."""
return {"status": "healthy", "version": "0.2.0"} return {"status": "healthy", "version": f"v{settings.alfred_version}"}
@app.get("/v1/models") @app.get("/v1/models")

View File

@@ -2,7 +2,7 @@
import logging import logging
from infrastructure.filesystem import FileManager from alfred.infrastructure.filesystem import FileManager
from .dto import ListFolderResponse from .dto import ListFolderResponse

View File

@@ -2,7 +2,7 @@
import logging import logging
from infrastructure.filesystem import FileManager from alfred.infrastructure.filesystem import FileManager
from .dto import SetFolderPathResponse from .dto import SetFolderPathResponse

View File

@@ -2,7 +2,7 @@
import logging import logging
from infrastructure.api.tmdb import ( from alfred.infrastructure.api.tmdb import (
TMDBAPIError, TMDBAPIError,
TMDBClient, TMDBClient,
TMDBConfigurationError, TMDBConfigurationError,

View File

@@ -2,7 +2,7 @@
import logging import logging
from infrastructure.api.qbittorrent import ( from alfred.infrastructure.api.qbittorrent import (
QBittorrentAPIError, QBittorrentAPIError,
QBittorrentAuthError, QBittorrentAuthError,
QBittorrentClient, QBittorrentClient,

View File

@@ -2,7 +2,11 @@
import logging import logging
from infrastructure.api.knaben import KnabenAPIError, KnabenClient, KnabenNotFoundError from alfred.infrastructure.api.knaben import (
KnabenAPIError,
KnabenClient,
KnabenNotFoundError,
)
from .dto import SearchTorrentsResponse from .dto import SearchTorrentsResponse

View File

@@ -6,7 +6,7 @@ from typing import Any
import requests import requests
from requests.exceptions import HTTPError, RequestException, Timeout from requests.exceptions import HTTPError, RequestException, Timeout
from agent.config import Settings, settings from alfred.settings import Settings, settings
from .dto import TorrentResult from .dto import TorrentResult
from .exceptions import KnabenAPIError, KnabenNotFoundError from .exceptions import KnabenAPIError, KnabenNotFoundError

View File

@@ -6,7 +6,7 @@ from typing import Any
import requests import requests
from requests.exceptions import HTTPError, RequestException, Timeout from requests.exceptions import HTTPError, RequestException, Timeout
from agent.config import Settings, settings from alfred.settings import Settings, settings
from .dto import TorrentInfo from .dto import TorrentInfo
from .exceptions import QBittorrentAPIError, QBittorrentAuthError from .exceptions import QBittorrentAPIError, QBittorrentAuthError

View File

@@ -6,7 +6,7 @@ from typing import Any
import requests import requests
from requests.exceptions import HTTPError, RequestException, Timeout from requests.exceptions import HTTPError, RequestException, Timeout
from agent.config import Settings, settings from alfred.settings import Settings, settings
from .dto import MediaResult from .dto import MediaResult
from .exceptions import ( from .exceptions import (

View File

@@ -7,7 +7,7 @@ from enum import Enum
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
from .exceptions import PathTraversalError from .exceptions import PathTraversalError

View File

@@ -3,9 +3,9 @@
import logging import logging
from pathlib import Path from pathlib import Path
from domain.movies.entities import Movie from alfred.domain.movies.entities import Movie
from domain.tv_shows.entities import Episode, Season, TVShow from alfred.domain.tv_shows.entities import Episode, Season, TVShow
from domain.tv_shows.value_objects import SeasonNumber from alfred.domain.tv_shows.value_objects import SeasonNumber
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -6,7 +6,7 @@ without passing it explicitly through all function calls.
Usage: Usage:
# At application startup # At application startup
from infrastructure.persistence import init_memory, get_memory from alfred.infrastructure.persistence import init_memory, get_memory
init_memory("memory_data") init_memory("memory_data")

View File

@@ -4,11 +4,11 @@ import logging
from datetime import datetime from datetime import datetime
from typing import Any from typing import Any
from domain.movies.entities import Movie from alfred.domain.movies.entities import Movie
from domain.movies.repositories import MovieRepository from alfred.domain.movies.repositories import MovieRepository
from domain.movies.value_objects import MovieTitle, Quality, ReleaseYear from alfred.domain.movies.value_objects import MovieTitle, Quality, ReleaseYear
from domain.shared.value_objects import FilePath, FileSize, ImdbId from alfred.domain.shared.value_objects import FilePath, FileSize, ImdbId
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -3,11 +3,11 @@
import logging import logging
from typing import Any from typing import Any
from domain.shared.value_objects import FilePath, ImdbId from alfred.domain.shared.value_objects import FilePath, ImdbId
from domain.subtitles.entities import Subtitle from alfred.domain.subtitles.entities import Subtitle
from domain.subtitles.repositories import SubtitleRepository from alfred.domain.subtitles.repositories import SubtitleRepository
from domain.subtitles.value_objects import Language, SubtitleFormat, TimingOffset from alfred.domain.subtitles.value_objects import Language, SubtitleFormat, TimingOffset
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -4,11 +4,11 @@ import logging
from datetime import datetime from datetime import datetime
from typing import Any from typing import Any
from domain.shared.value_objects import ImdbId from alfred.domain.shared.value_objects import ImdbId
from domain.tv_shows.entities import TVShow from alfred.domain.tv_shows.entities import TVShow
from domain.tv_shows.repositories import TVShowRepository from alfred.domain.tv_shows.repositories import TVShowRepository
from domain.tv_shows.value_objects import ShowStatus from alfred.domain.tv_shows.value_objects import ShowStatus
from infrastructure.persistence import get_memory from alfred.infrastructure.persistence import get_memory
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

209
alfred/settings.py Normal file
View File

@@ -0,0 +1,209 @@
import secrets
from pathlib import Path
from typing import NamedTuple
import tomllib
from pydantic import Field, computed_field, field_validator
from pydantic_settings import BaseSettings, SettingsConfigDict
BASE_DIR = Path(__file__).resolve().parent.parent
ENV_FILE_PATH = BASE_DIR / ".env"
toml_path = BASE_DIR / "pyproject.toml"
class ConfigurationError(Exception):
"""Raised when configuration is invalid."""
pass
class ProjectVersions(NamedTuple):
"""
Immutable structure for project versions.
Forces explicit naming and prevents accidental swaps.
"""
librechat: str
rag: str
alfred: str
def get_versions_from_toml() -> ProjectVersions:
"""
Reads versioning information from pyproject.toml.
Returns the default value if the file or key is missing.
"""
if not toml_path.exists():
raise FileNotFoundError(f"pyproject.toml not found: {toml_path}")
with open(toml_path, "rb") as f:
data = tomllib.load(f)
try:
return ProjectVersions(
librechat=data["tool"]["alfred"]["settings"]["librechat_version"],
rag=data["tool"]["alfred"]["settings"]["rag_version"],
alfred=data["tool"]["poetry"]["version"],
)
except KeyError as e:
raise KeyError(f"Error: Missing key {e} in pyproject.toml") from e
# Load versions once
VERSIONS: ProjectVersions = get_versions_from_toml()
class Settings(BaseSettings):
model_config = SettingsConfigDict(
env_file=ENV_FILE_PATH,
env_file_encoding="utf-8",
extra="ignore",
case_sensitive=False,
)
# --- GENERAL SETTINGS ---
host: str = "0.0.0.0"
port: int = 3080
debug_logging: bool = False
debug_console: bool = False
data_storage: str = "data"
librechat_version: str = Field(VERSIONS.librechat, description="Librechat version")
rag_version: str = Field(VERSIONS.rag, description="RAG engine version")
alfred_version: str = Field(VERSIONS.alfred, description="Alfred version")
# --- CONTEXT SETTINGS ---
max_history_messages: int = 10
max_tool_iterations: int = 10
request_timeout: int = 30
# TODO: Finish
deepseek_base_url: str = "https://api.deepseek.com"
deepseek_model: str = "deepseek-chat"
# --- API KEYS ---
anthropic_api_key: str | None = Field(None, description="Claude API key")
deepseek_api_key: str | None = Field(None, description="Deepseek API key")
google_api_key: str | None = Field(None, description="Gemini API key")
kimi_api_key: str | None = Field(None, description="Kimi API key")
openai_api_key: str | None = Field(None, description="ChatGPT API key")
# --- SECURITY KEYS ---
# Generated automatically if not in .env to ensure "Secure by Default"
jwt_secret: str = Field(default_factory=lambda: secrets.token_urlsafe(32))
jwt_refresh_secret: str = Field(default_factory=lambda: secrets.token_urlsafe(32))
# We keep these for encryption of keys in MongoDB (AES-256 Hex format)
creds_key: str = Field(default_factory=lambda: secrets.token_hex(32))
creds_iv: str = Field(default_factory=lambda: secrets.token_hex(16))
# --- SERVICES ---
qbittorrent_url: str = "http://qbittorrent:16140"
qbittorrent_username: str = "admin"
qbittorrent_password: str = Field(default_factory=lambda: secrets.token_urlsafe(16))
mongo_host: str = "mongodb"
mongo_user: str = "alfred"
mongo_password: str = Field(
default_factory=lambda: secrets.token_urlsafe(24), repr=False, exclude=True
)
mongo_port: int = 27017
mongo_db_name: str = "alfred"
@computed_field(repr=False)
@property
def mongo_uri(self) -> str:
return (
f"mongodb://{self.mongo_user}:{self.mongo_password}"
f"@{self.mongo_host}:{self.mongo_port}/{self.mongo_db_name}"
f"?authSource=admin"
)
postgres_host: str = "vectordb"
postgres_user: str = "alfred"
postgres_password: str = Field(
default_factory=lambda: secrets.token_urlsafe(24), repr=False, exclude=True
)
postgres_port: int = 5432
postgres_db_name: str = "alfred"
@computed_field(repr=False)
@property
def postgres_uri(self) -> str:
return (
f"postgresql://{self.postgres_user}:{self.postgres_password}"
f"@{self.postgres_host}:{self.postgres_port}/{self.postgres_db_name}"
)
tmdb_api_key: str | None = Field(None, description="The Movie Database API key")
tmdb_base_url: str = "https://api.themoviedb.org/3"
# --- LLM PICKER & CONFIG ---
# Providers: 'local', 'deepseek', ...
default_llm_provider: str = "local"
ollama_base_url: str = "http://ollama:11434"
# Models: ...
ollama_model: str = "llama3.3:latest"
llm_temperature: float = 0.2
# --- RAG ENGINE ---
rag_enabled: bool = True # TODO: Handle False
rag_api_url: str = "http://rag_api:8000"
embeddings_provider: str = "ollama"
# Models: ...
embeddings_model: str = "nomic-embed-text"
# --- MEILISEARCH ---
meili_enabled: bool = Field(True, description="Enable meili")
meili_no_analytics: bool = True
meili_host: str = "http://meilisearch:7700"
meili_master_key: str = Field(
default_factory=lambda: secrets.token_urlsafe(32),
description="Master key for Meilisearch",
repr=False,
)
# --- VALIDATORS ---
@field_validator("llm_temperature")
@classmethod
def validate_temperature(cls, v: float) -> float:
if not 0.0 <= v <= 2.0:
raise ConfigurationError(
f"Temperature must be between 0.0 and 2.0, got {v}"
)
return v
@field_validator("max_tool_iterations")
@classmethod
def validate_max_iterations(cls, v: int) -> int:
if not 1 <= v <= 20:
raise ConfigurationError(
f"max_tool_iterations must be between 1 and 50, got {v}"
)
return v
@field_validator("request_timeout")
@classmethod
def validate_timeout(cls, v: int) -> int:
if not 1 <= v <= 300:
raise ConfigurationError(
f"request_timeout must be between 1 and 300 seconds, got {v}"
)
return v
@field_validator("deepseek_base_url", "tmdb_base_url")
@classmethod
def validate_url(cls, v: str, info) -> str:
if not v.startswith(("http://", "https://")):
raise ConfigurationError(f"Invalid {info.field_name}")
return v
def is_tmdb_configured(self):
return bool(self.tmdb_api_key)
def is_deepseek_configured(self):
return bool(self.deepseek_api_key)
def dump_safe(self):
return self.model_dump(exclude_none=False)
settings = Settings()

View File

@@ -1,115 +0,0 @@
"""Configuration management with validation."""
import os
from dataclasses import dataclass, field
from pathlib import Path
from dotenv import load_dotenv
# Load environment variables from .env file
load_dotenv()
class ConfigurationError(Exception):
"""Raised when configuration is invalid."""
pass
@dataclass
class Settings:
"""Application settings loaded from environment variables."""
# LLM Configuration
deepseek_api_key: str = field(
default_factory=lambda: os.getenv("DEEPSEEK_API_KEY", "")
)
deepseek_base_url: str = field(
default_factory=lambda: os.getenv(
"DEEPSEEK_BASE_URL", "https://api.deepseek.com"
)
)
model: str = field(
default_factory=lambda: os.getenv("DEEPSEEK_MODEL", "deepseek-chat")
)
temperature: float = field(
default_factory=lambda: float(os.getenv("TEMPERATURE", "0.2"))
)
# TMDB Configuration
tmdb_api_key: str = field(default_factory=lambda: os.getenv("TMDB_API_KEY", ""))
tmdb_base_url: str = field(
default_factory=lambda: os.getenv(
"TMDB_BASE_URL", "https://api.themoviedb.org/3"
)
)
# Storage Configuration
memory_file: str = field(
default_factory=lambda: os.getenv("MEMORY_FILE", "memory.json")
)
# Security Configuration
max_tool_iterations: int = field(
default_factory=lambda: int(os.getenv("MAX_TOOL_ITERATIONS", "5"))
)
request_timeout: int = field(
default_factory=lambda: int(os.getenv("REQUEST_TIMEOUT", "30"))
)
# Memory Configuration
max_history_messages: int = field(
default_factory=lambda: int(os.getenv("MAX_HISTORY_MESSAGES", "10"))
)
def __post_init__(self):
"""Validate settings after initialization."""
self._validate()
def _validate(self) -> None:
"""Validate configuration values."""
# Validate temperature
if not 0.0 <= self.temperature <= 2.0:
raise ConfigurationError(
f"Temperature must be between 0.0 and 2.0, got {self.temperature}"
)
# Validate max_tool_iterations
if self.max_tool_iterations < 1 or self.max_tool_iterations > 20:
raise ConfigurationError(
f"max_tool_iterations must be between 1 and 20, got {self.max_tool_iterations}"
)
# Validate request_timeout
if self.request_timeout < 1 or self.request_timeout > 300:
raise ConfigurationError(
f"request_timeout must be between 1 and 300 seconds, got {self.request_timeout}"
)
# Validate URLs
if not self.deepseek_base_url.startswith(("http://", "https://")):
raise ConfigurationError(
f"Invalid deepseek_base_url: {self.deepseek_base_url}"
)
if not self.tmdb_base_url.startswith(("http://", "https://")):
raise ConfigurationError(f"Invalid tmdb_base_url: {self.tmdb_base_url}")
# Validate memory file path
memory_path = Path(self.memory_file)
if memory_path.exists() and not memory_path.is_file():
raise ConfigurationError(
f"memory_file exists but is not a file: {self.memory_file}"
)
def is_deepseek_configured(self) -> bool:
"""Check if DeepSeek API is properly configured."""
return bool(self.deepseek_api_key and self.deepseek_base_url)
def is_tmdb_configured(self) -> bool:
"""Check if TMDB API is properly configured."""
return bool(self.tmdb_api_key and self.tmdb_base_url)
# Global settings instance
settings = Settings()

231
cli.py Normal file
View File

@@ -0,0 +1,231 @@
#!/usr/bin/env python3
import os
import secrets
import shutil
import subprocess
import sys
from datetime import datetime
from enum import StrEnum
from pathlib import Path
from typing import NoReturn
REQUIRED_VARS = ["DEEPSEEK_API_KEY", "TMDB_API_KEY", "QBITTORRENT_URL"]
# Size in bytes
KEYS_TO_GENERATE = {
"JWT_SECRET": 32,
"JWT_REFRESH_SECRET": 32,
"CREDS_KEY": 32,
"CREDS_IV": 16,
}
class Style(StrEnum):
"""ANSI codes for styling output.
Usage: f"{Style.RED}Error{Style.RESET}"
"""
RESET = "\033[0m"
BOLD = "\033[1m"
RED = "\033[31m"
GREEN = "\033[32m"
YELLOW = "\033[33m"
CYAN = "\033[36m"
DIM = "\033[2m"
# Only for terminals and if not specified otherwise
USE_COLORS = sys.stdout.isatty() and "NO_COLOR" not in os.environ
def styled(text: str, color_code: str) -> str:
"""Apply color only if supported by the terminal."""
if USE_COLORS:
return f"{color_code}{text}{Style.RESET}"
return text
def log(msg: str, color: str | None = None, prefix="") -> None:
"""Print a formatted message."""
formatted_msg = styled(msg, color) if color else msg
print(f"{prefix}{formatted_msg}")
def error_exit(msg: str) -> NoReturn:
"""Print an error message in red and exit."""
log(f"{msg}", Style.RED)
sys.exit(1)
def is_docker_running() -> bool:
""" "Check if Docker is available and responsive."""
if shutil.which("docker") is None:
error_exit("Docker is not installed.")
result = subprocess.run(
["docker", "info"],
# Redirect stdout/stderr to keep output clean on success
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
# Prevent exception being raised
check=False,
)
return result.returncode == 0
def parse_env(content: str) -> dict[str, str]:
"""Parses existing keys and values into a dict (ignoring comments)."""
env_vars = {}
for raw_line in content.splitlines():
line = raw_line.strip()
if line and not line.startswith("#") and "=" in line:
key, value = line.split("=", 1)
env_vars[key.strip()] = value.strip()
return env_vars
def dump_env(content: str, data: dict[str, str]) -> str:
new_content: list[str] = []
processed_keys = set()
for raw_line in content.splitlines():
line = raw_line.strip()
# Fast line (empty, comment or not an assignation)
if len(line) == 0 or line.startswith("#") or "=" not in line:
new_content.append(raw_line)
continue
# Slow line (inline comment to be kept)
key_chunk, value_chunk = raw_line.split("=", 1)
key = key_chunk.strip()
# Not in the update list
if key not in data:
new_content.append(raw_line)
continue
processed_keys.add(key)
new_value = data[key]
if " #" not in value_chunk:
new_line = f"{key_chunk}={new_value}"
else:
_, comment = value_chunk.split(" #", 1)
new_line = f"{key_chunk}={new_value} #{comment}"
new_content.append(new_line)
for key, value in data.items():
if key not in processed_keys:
new_content.append(f"{key}={value}")
return "\n".join(new_content) + "\n"
def ensure_env() -> None:
"""Manage .env lifecycle: creation, secret generation, prompts."""
env_path = Path(".env")
env_example_path = Path(".env.example")
updated: bool = False
# Read .env if exists
if env_path.exists():
content: str = env_path.read_text(encoding="utf-8")
else:
content: str = env_example_path.read_text(encoding="utf-8")
existing_vars: dict[str, str] = parse_env(content)
# Generate missing secrets
for key, length in KEYS_TO_GENERATE.items():
if key not in existing_vars or not existing_vars[key]:
log(f"Generating {key}...", Style.GREEN, prefix=" ")
existing_vars[key] = secrets.token_hex(length)
updated = True
log("Done", Style.GREEN, prefix=" ")
# Prompt for missing mandatory keys
color = Style.YELLOW if USE_COLORS else ""
reset = Style.RESET if USE_COLORS else ""
for key in REQUIRED_VARS:
if key not in existing_vars or not existing_vars[key]:
try:
existing_vars[key] = input(
f" {color}Enter value for {key}: {reset}"
).strip()
updated = True
except KeyboardInterrupt:
print()
error_exit("Aborted by user.")
# Write to disk
if updated:
# But backup original first
if env_path.exists():
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
backup_path = Path(f"{env_path}.{timestamp}.bak")
shutil.copy(env_path, backup_path)
log(f"Backup created: {backup_path}", Style.DIM)
new_content = dump_env(content, existing_vars)
env_path.write_text(new_content, encoding="utf-8")
log(".env updated successfully.", Style.GREEN)
else:
log("Configuration is up to date.", Style.GREEN)
def setup() -> None:
"""Orchestrate initialization."""
is_docker_running()
ensure_env()
def status() -> None:
"""Display simple dashboard."""
# Hardcoded bold style for title if colors are enabled
title_style = Style.BOLD if USE_COLORS else ""
reset_style = Style.RESET if USE_COLORS else ""
print(f"\n{title_style}ALFRED STATUS{reset_style}")
print(f"{title_style}==============={reset_style}\n")
# Docker Check
if is_docker_running():
print(f" Docker: {styled('✓ running', Style.GREEN)}")
else:
print(f" Docker: {styled('✗ stopped', Style.RED)}")
# Env Check
if Path(".env").exists():
print(f" .env: {styled('✓ present', Style.GREEN)}")
else:
print(f" .env: {styled('✗ missing', Style.RED)}")
print("")
def check() -> None:
"""Silent check for prerequisites (used by 'make up')."""
setup()
def main() -> None:
if len(sys.argv) < 2:
print("Usage: python cli.py [setup|check|status]")
sys.exit(1)
cmd = sys.argv[1]
if cmd == "setup":
setup()
elif cmd == "check":
check()
elif cmd == "status":
status()
else:
error_exit(f"Unknown command: {cmd}")
if __name__ == "__main__":
main()

206
docker-compose.yaml Normal file
View File

@@ -0,0 +1,206 @@
services:
# - CORE SERVICES -
# --- .ENV INIT ---
alfred-init:
container_name: alfred-init
build:
context: .
target: builder
args:
PYTHON_VERSION: ${PYTHON_VERSION}
PYTHON_VERSION_SHORT: ${PYTHON_VERSION_SHORT}
RUNNER: ${RUNNER}
command: python scripts/bootstrap.py
networks:
- alfred-net
# --- MAIN APPLICATION ---
alfred:
container_name: alfred-core
build:
context: .
args:
PYTHON_VERSION: ${PYTHON_VERSION}
PYTHON_VERSION_SHORT: ${PYTHON_VERSION_SHORT}
RUNNER: ${RUNNER}
depends_on:
alfred-init:
condition: service_completed_successfully
restart: unless-stopped
env_file:
- path: .env
required: true
volumes:
- ./data:/data
- ./logs:/logs
# TODO: Hot reload (comment out in production)
#- ./alfred:/home/appuser/alfred
networks:
- alfred-net
# --- FRONTEND LIBRECHAT ---
librechat:
container_name: alfred-librechat
image: ghcr.io/danny-avila/librechat:${LIBRECHAT_VERSION}
depends_on:
alfred-init:
condition: service_completed_successfully
mongodb:
condition: service_healthy
restart: unless-stopped
env_file:
- path: .env
required: true
environment:
# Remap value name
- SEARCH=${MEILI_ENABLED}
ports:
- "${PORT}:${PORT}"
volumes:
- ./data/librechat/images:/app/client/public/images
- ./data/librechat/uploads:/app/client/uploads
- ./logs:/app/api/logs
# Mount custom endpoint
- ./librechat/manifests:/app/manifests:ro
- ./librechat/librechat.yaml:/app/librechat.yaml:ro
networks:
- alfred-net
# --- DATABASE #1 - APP STATE ---
mongodb:
container_name: alfred-mongodb
image: mongo:latest
restart: unless-stopped
depends_on:
alfred-init:
condition: service_completed_successfully
env_file:
- path: .env
required: true
environment:
# Remap value name
- MONGO_INITDB_ROOT_USERNAME=${MONGO_USER}
- MONGO_INITDB_ROOT_PASSWORD=${MONGO_PASSWORD}
ports:
- "${MONGO_PORT}:${MONGO_PORT}"
volumes:
- ./data/mongo:/data/db
command: mongod --quiet --setParameter logComponentVerbosity='{"network":{"verbosity":0}}'
healthcheck:
test: |
mongosh --quiet --eval "db.adminCommand('ping')" || \
mongosh --quiet -u "${MONGO_USER}" -p "${MONGO_PASSWORD}" --authenticationDatabase admin --eval "db.adminCommand('ping')"
interval: 10s
timeout: 5s
retries: 5
networks:
- alfred-net
# --- OLLAMA - LOCAL LLM ENGINE ---
ollama:
image: ollama/ollama:latest
container_name: alfred-ollama
depends_on:
alfred-init:
condition: service_completed_successfully
restart: unless-stopped
env_file:
- path: .env
required: true
volumes:
- ./data/ollama:/root/.ollama
networks:
- alfred-net
# - OPTIONAL SERVICES -
# --- SEARCH ENGINE SUPER FAST (Optional) ---
meilisearch:
container_name: alfred-meilisearch
image: getmeili/meilisearch:v1.12.3
depends_on:
alfred-init:
condition: service_completed_successfully
restart: unless-stopped
env_file:
- path: .env
required: true
volumes:
- ./data/meilisearch:/meili_data
profiles: ["meili", "full"]
networks:
- alfred-net
# --- RETRIEVAL AUGMENTED GENERATION SYSTEM (Optional) ---
rag_api:
container_name: alfred-rag
image: ghcr.io/danny-avila/librechat-rag-api-dev-lite:${RAG_VERSION}
depends_on:
alfred-init:
condition: service_completed_successfully
vectordb:
condition: service_healthy
restart: unless-stopped
env_file:
- path: .env
required: true
ports:
- "${RAG_API_PORT}:${RAG_API_PORT}"
volumes:
- ./data/rag/uploads:/app/uploads
profiles: ["rag", "full"]
networks:
- alfred-net
# --- DATABASE #2 - Vector RAG (Optional) ---
vectordb:
container_name: alfred-vectordb
image: pgvector/pgvector:0.8.0-pg16-bookworm
depends_on:
alfred-init:
condition: service_completed_successfully
restart: unless-stopped
env_file:
- path: .env
required: true
ports:
- "${POSTGRES_PORT}:${POSTGRES_PORT}"
volumes:
- ./data/vectordb:/var/lib/postgresql/data
profiles: ["rag", "full"]
healthcheck:
test: [ "CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-alfred} -d ${POSTGRES_DB_NAME:-alfred}" ]
interval: 5s
timeout: 5s
retries: 5
networks:
- alfred-net
# --- QBITTORENT (Optional) ---
qbittorrent:
image: lscr.io/linuxserver/qbittorrent:latest
container_name: alfred-qbittorrent
depends_on:
alfred-init:
condition: service_completed_successfully
restart: unless-stopped
env_file:
- path: .env
required: true
environment:
- PUID=1000
- PGID=1000
- TZ=Europe/Paris
- WEBUI_PORT=${QBITTORRENT_PORT}
volumes:
- ./data/qbittorrent/config:/config
- ./data/qbittorrent/downloads:/downloads
profiles: ["qbittorrent", "full"]
ports:
- "${QBITTORRENT_PORT}:${QBITTORRENT_PORT}"
networks:
- alfred-net
networks:
alfred-net:
name: alfred-internal
driver: bridge

View File

@@ -1,206 +0,0 @@
version: "3.4"
services:
# Da brain
agent-brain:
build:
context: ./brain
dockerfile: Dockerfile
args:
RUNNER: ${RUNNER} # Get it from Makefile
container_name: agent-brain
restart: unless-stopped
env_file: .env
ports:
- "8000:8000"
volumes:
# Persistent data volumes (outside container /app)
- agent-memory:/data/memory
- agent-logs:/data/logs
# Development: mount code for hot reload (comment out in production)
# - ./brain:/app
environment:
# LLM Configuration
LLM_PROVIDER: ${LLM_PROVIDER:-deepseek}
DEEPSEEK_API_KEY: ${DEEPSEEK_API_KEY:-}
# Memory storage
MEMORY_STORAGE_DIR: /data/memory
# External services
TMDB_API_KEY: ${TMDB_API_KEY:-}
QBITTORRENT_URL: ${QBITTORRENT_URL:-}
QBITTORRENT_USERNAME: ${QBITTORRENT_USERNAME:-}
QBITTORRENT_PASSWORD: ${QBITTORRENT_PASSWORD:-}
networks:
- agent-network
# Da face (LibreChat)
librechat:
image: ghcr.io/danny-avila/librechat-dev:latest
container_name: librechat-frontend
restart: unless-stopped
ports:
- "3080:3080"
depends_on:
- mongodb
- meilisearch
- rag_api
- agent-brain
env_file: .env
environment:
# MongoDB connection (no auth, matching LibreChat default)
MONGO_URI: mongodb://mongodb:27017/LibreChat
# App configuration
HOST: 0.0.0.0
PORT: 3080
# Security
JWT_SECRET: ${JWT_SECRET:-your-super-secret-jwt-key-change-this-in-production}
JWT_REFRESH_SECRET: ${JWT_REFRESH_SECRET:-your-super-secret-refresh-key-change-this-too}
CREDS_KEY: ${CREDS_KEY:-your-32-character-secret-key-here}
CREDS_IV: ${CREDS_IV:-your-16-character-iv-here}
# Session
SESSION_EXPIRY: ${SESSION_EXPIRY:-1000 * 60 * 15}
REFRESH_TOKEN_EXPIRY: ${REFRESH_TOKEN_EXPIRY:-1000 * 60 * 60 * 24 * 7}
# Domain
DOMAIN_CLIENT: ${DOMAIN_CLIENT:-http://localhost:3080}
DOMAIN_SERVER: ${DOMAIN_SERVER:-http://localhost:3080}
# Meilisearch
MEILI_HOST: http://meilisearch:7700
MEILI_MASTER_KEY: ${MEILI_MASTER_KEY:-DrhYf7zENyR6AlUCKmnz0eYASOQdl6zxH7s7MKFSfFU}
# RAG API
RAG_API_URL: http://rag_api:8000
# Endpoints
ENDPOINTS: custom
# Custom endpoint pointing to agent-brain
CUSTOM_API_KEY: ${AGENT_BRAIN_API_KEY:-agent-brain-secret-key}
# Debug (optional)
DEBUG_LOGGING: ${DEBUG_LOGGING:-false}
DEBUG_CONSOLE: ${DEBUG_CONSOLE:-false}
volumes:
- ./librechat/librechat.yaml:/app/librechat.yaml:ro
- librechat-images:/app/client/public/images
- librechat-logs:/app/api/logs
networks:
- agent-network
# MongoDB for LibreChat
mongodb:
image: mongo:latest
container_name: librechat-mongodb
restart: unless-stopped
volumes:
- mongodb-data:/data/db
command: mongod --noauth
ports:
- "27017:27017"
networks:
- agent-network
# Meilisearch - Search engine for LibreChat
meilisearch:
image: getmeili/meilisearch:v1.11.3
container_name: librechat-meilisearch
restart: unless-stopped
volumes:
- meilisearch-data:/meili_data
environment:
MEILI_HOST: http://meilisearch:7700
MEILI_HTTP_ADDR: meilisearch:7700
MEILI_MASTER_KEY: ${MEILI_MASTER_KEY:-DrhYf7zENyR6AlUCKmnz0eYASOQdl6zxH7s7MKFSfFU}
ports:
- "7700:7700"
networks:
- agent-network
# PostgreSQL with pgvector for RAG API
pgvector:
image: ankane/pgvector:latest
container_name: librechat-pgvector
restart: unless-stopped
environment:
POSTGRES_DB: ${POSTGRES_DB:-librechat_rag}
POSTGRES_USER: ${POSTGRES_USER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
volumes:
- pgvector-data:/var/lib/postgresql/data
ports:
- "5432:5432"
networks:
- agent-network
# RAG API - Vector database for LibreChat
rag_api:
image: ghcr.io/danny-avila/librechat-rag-api-dev-lite:latest
container_name: librechat-rag-api
restart: unless-stopped
depends_on:
- pgvector
environment:
PORT: 8000
HOST: 0.0.0.0
# PostgreSQL connection (multiple variable names for compatibility)
DB_HOST: pgvector
DB_PORT: 5432
DB_NAME: ${POSTGRES_DB:-librechat_rag}
DB_USER: ${POSTGRES_USER:-postgres}
DB_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
POSTGRES_DB: ${POSTGRES_DB:-librechat_rag}
POSTGRES_USER: ${POSTGRES_USER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
# RAG configuration
COLLECTION_NAME: ${RAG_COLLECTION_NAME:-testcollection}
EMBEDDINGS_PROVIDER: ${RAG_EMBEDDINGS_PROVIDER:-openai}
EMBEDDINGS_MODEL: ${RAG_EMBEDDINGS_MODEL:-text-embedding-3-small}
OPENAI_API_KEY: ${OPENAI_API_KEY:-}
RAG_UPLOAD_DIR: /app/uploads
volumes:
- rag-uploads:/app/uploads
ports:
- "8001:8000"
networks:
- agent-network
# Named volumes for persistent data
volumes:
# MongoDB data
mongodb-data:
driver: local
# Meilisearch data
meilisearch-data:
driver: local
# PostgreSQL pgvector data
pgvector-data:
driver: local
# RAG API uploads
rag-uploads:
driver: local
# LibreChat data
librechat-images:
driver: local
librechat-logs:
driver: local
# Agent Brain data
agent-memory:
driver: local
agent-logs:
driver: local
# Network for inter-service communication
networks:
agent-network:
driver: bridge

View File

@@ -1,27 +0,0 @@
#!/bin/bash
# Script to generate secure keys for LibreChat
# Run this script to generate random secure keys for your .env file
echo "==================================="
echo "LibreChat Security Keys Generator"
echo "==================================="
echo ""
echo "# MongoDB Password"
echo "MONGO_PASSWORD=$(openssl rand -base64 24)"
echo ""
echo "# JWT Secrets"
echo "JWT_SECRET=$(openssl rand -base64 32)"
echo "JWT_REFRESH_SECRET=$(openssl rand -base64 32)"
echo ""
echo "# Credentials Encryption Keys"
echo "CREDS_KEY=$(openssl rand -hex 16)"
echo "CREDS_IV=$(openssl rand -hex 8)"
echo ""
echo "==================================="
echo "Copy these values to your .env file"
echo "==================================="

View File

@@ -4,6 +4,16 @@
version: 1.2.1 version: 1.2.1
cache: true cache: true
endpoints: endpoints:
anthropic:
apiKey: "${ANTHROPIC_API_KEY}"
models:
default: ["claude-sonnet-4-5", "claude-haiku-4-5", "claude-opus-4-5"]
fetch: false
titleConvo: true
titleModel: "claude-haiku-4-5"
modelDisplayLabel: "Claude AI"
streamRate: 1
custom: custom:
# Deepseek # Deepseek
- name: "Deepseek" - name: "Deepseek"
@@ -22,7 +32,7 @@ endpoints:
manifest: manifest:
schema: schema:
type: openapi type: openapi
url: "http://agent-brain:8000/manifests/find_media_imdb_id.json" url: "http://alfred:8000/manifests/find_media_imdb_id.json"
auth: auth:
type: none type: none
@@ -32,7 +42,7 @@ endpoints:
manifest: manifest:
schema: schema:
type: openapi type: openapi
url: "http://agent-brain:8000/manifests/find_torrent.json" url: "http://alfred:8000/manifests/find_torrent.json"
auth: auth:
type: none type: none
@@ -42,7 +52,7 @@ endpoints:
manifest: manifest:
schema: schema:
type: openapi type: openapi
url: "http://agent-brain:8000/manifests/add_torrent_by_index.json" url: "http://alfred:8000/manifests/add_torrent_by_index.json"
auth: auth:
type: none type: none
@@ -52,7 +62,7 @@ endpoints:
manifest: manifest:
schema: schema:
type: openapi type: openapi
url: "http://agent-brain:8000/manifests/set_language.json" url: "http://alfred:8000/manifests/set_language.json"
auth: auth:
type: none type: none
@@ -60,7 +70,7 @@ endpoints:
# Backend Local Agent # Backend Local Agent
- name: "Local Agent" - name: "Local Agent"
apiKey: "dummy_key" apiKey: "dummy_key"
baseURL: "http://agent-brain:8000/v1" baseURL: "http://alfred:8000/v1"
models: models:
default: ["local-deepseek-agent"] default: ["local-deepseek-agent"]
fetch: false fetch: false
@@ -75,7 +85,7 @@ endpoints:
manifest: manifest:
schema: schema:
type: openapi type: openapi
url: "http://agent-brain:8000/manifests/find_media_imdb_id.json" url: "http://alfred:8000/manifests/find_media_imdb_id.json"
auth: auth:
type: none type: none
@@ -85,7 +95,7 @@ endpoints:
manifest: manifest:
schema: schema:
type: openapi type: openapi
url: "http://agent-brain:8000/manifests/find_torrent.json" url: "http://alfred:8000/manifests/find_torrent.json"
auth: auth:
type: none type: none
@@ -95,7 +105,7 @@ endpoints:
manifest: manifest:
schema: schema:
type: openapi type: openapi
url: "http://agent-brain:8000/manifests/add_torrent_by_index.json" url: "http://alfred:8000/manifests/add_torrent_by_index.json"
auth: auth:
type: none type: none
@@ -105,6 +115,6 @@ endpoints:
manifest: manifest:
schema: schema:
type: openapi type: openapi
url: "http://agent-brain:8000/manifests/set_language.json" url: "http://alfred:8000/manifests/set_language.json"
auth: auth:
type: none type: none

View File

@@ -35,7 +35,6 @@ files = [
[package.dependencies] [package.dependencies]
idna = ">=2.8" idna = ">=2.8"
typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""}
[package.extras] [package.extras]
trio = ["trio (>=0.31.0)", "trio (>=0.32.0)"] trio = ["trio (>=0.31.0)", "trio (>=0.32.0)"]
@@ -53,17 +52,17 @@ files = [
[[package]] [[package]]
name = "bump-my-version" name = "bump-my-version"
version = "1.2.5" version = "1.2.6"
description = "Version bump your Python project" description = "Version bump your Python project"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "bump_my_version-1.2.5-py3-none-any.whl", hash = "sha256:57e5718d9fe7d7b6f5ceb68e70cd3c4bd0570d300b4aade15fd1e355febdd351"}, {file = "bump_my_version-1.2.6-py3-none-any.whl", hash = "sha256:a2f567c10574a374b81a9bd6d2bd3cb2ca74befe5c24c3021123773635431659"},
{file = "bump_my_version-1.2.5.tar.gz", hash = "sha256:827af6c7b13111c62b45340f25defd105f566fe0cdbbb70e2c4b2f005b667e1f"}, {file = "bump_my_version-1.2.6.tar.gz", hash = "sha256:1f2f0daa5d699904e9739be8efb51c4c945461bad83cd4da4c89d324d9a18343"},
] ]
[package.dependencies] [package.dependencies]
click = "<8.2.2" click = "<8.4"
httpx = ">=0.28.1" httpx = ">=0.28.1"
pydantic = ">=2.0.0" pydantic = ">=2.0.0"
pydantic-settings = "*" pydantic-settings = "*"
@@ -219,13 +218,13 @@ files = [
[[package]] [[package]]
name = "click" name = "click"
version = "8.2.1" version = "8.3.1"
description = "Composable command line interface toolkit" description = "Composable command line interface toolkit"
optional = false optional = false
python-versions = ">=3.10" python-versions = ">=3.10"
files = [ files = [
{file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, {file = "click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6"},
{file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, {file = "click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a"},
] ]
[package.dependencies] [package.dependencies]
@@ -244,103 +243,103 @@ files = [
[[package]] [[package]]
name = "coverage" name = "coverage"
version = "7.13.0" version = "7.13.1"
description = "Code coverage measurement for Python" description = "Code coverage measurement for Python"
optional = false optional = false
python-versions = ">=3.10" python-versions = ">=3.10"
files = [ files = [
{file = "coverage-7.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:02d9fb9eccd48f6843c98a37bd6817462f130b86da8660461e8f5e54d4c06070"}, {file = "coverage-7.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e1fa280b3ad78eea5be86f94f461c04943d942697e0dac889fa18fff8f5f9147"},
{file = "coverage-7.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:367449cf07d33dc216c083f2036bb7d976c6e4903ab31be400ad74ad9f85ce98"}, {file = "coverage-7.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c3d8c679607220979434f494b139dfb00131ebf70bb406553d69c1ff01a5c33d"},
{file = "coverage-7.13.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cdb3c9f8fef0a954c632f64328a3935988d33a6604ce4bf67ec3e39670f12ae5"}, {file = "coverage-7.13.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:339dc63b3eba969067b00f41f15ad161bf2946613156fb131266d8debc8e44d0"},
{file = "coverage-7.13.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d10fd186aac2316f9bbb46ef91977f9d394ded67050ad6d84d94ed6ea2e8e54e"}, {file = "coverage-7.13.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:db622b999ffe49cb891f2fff3b340cdc2f9797d01a0a202a0973ba2562501d90"},
{file = "coverage-7.13.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f88ae3e69df2ab62fb0bc5219a597cb890ba5c438190ffa87490b315190bb33"}, {file = "coverage-7.13.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1443ba9acbb593fa7c1c29e011d7c9761545fe35e7652e85ce7f51a16f7e08d"},
{file = "coverage-7.13.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c4be718e51e86f553bcf515305a158a1cd180d23b72f07ae76d6017c3cc5d791"}, {file = "coverage-7.13.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c832ec92c4499ac463186af72f9ed4d8daec15499b16f0a879b0d1c8e5cf4a3b"},
{file = "coverage-7.13.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a00d3a393207ae12f7c49bb1c113190883b500f48979abb118d8b72b8c95c032"}, {file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:562ec27dfa3f311e0db1ba243ec6e5f6ab96b1edfcfc6cf86f28038bc4961ce6"},
{file = "coverage-7.13.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a7b1cd820e1b6116f92c6128f1188e7afe421c7e1b35fa9836b11444e53ebd9"}, {file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4de84e71173d4dada2897e5a0e1b7877e5eefbfe0d6a44edee6ce31d9b8ec09e"},
{file = "coverage-7.13.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:37eee4e552a65866f15dedd917d5e5f3d59805994260720821e2c1b51ac3248f"}, {file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:a5a68357f686f8c4d527a2dc04f52e669c2fc1cbde38f6f7eb6a0e58cbd17cae"},
{file = "coverage-7.13.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:62d7c4f13102148c78d7353c6052af6d899a7f6df66a32bddcc0c0eb7c5326f8"}, {file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:77cc258aeb29a3417062758975521eae60af6f79e930d6993555eeac6a8eac29"},
{file = "coverage-7.13.0-cp310-cp310-win32.whl", hash = "sha256:24e4e56304fdb56f96f80eabf840eab043b3afea9348b88be680ec5986780a0f"}, {file = "coverage-7.13.1-cp310-cp310-win32.whl", hash = "sha256:bb4f8c3c9a9f34423dba193f241f617b08ffc63e27f67159f60ae6baf2dcfe0f"},
{file = "coverage-7.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:74c136e4093627cf04b26a35dab8cbfc9b37c647f0502fc313376e11726ba303"}, {file = "coverage-7.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:c8e2706ceb622bc63bac98ebb10ef5da80ed70fbd8a7999a5076de3afaef0fb1"},
{file = "coverage-7.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0dfa3855031070058add1a59fdfda0192fd3e8f97e7c81de0596c145dea51820"}, {file = "coverage-7.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a55d509a1dc5a5b708b5dad3b5334e07a16ad4c2185e27b40e4dba796ab7f88"},
{file = "coverage-7.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fdb6f54f38e334db97f72fa0c701e66d8479af0bc3f9bfb5b90f1c30f54500f"}, {file = "coverage-7.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d010d080c4888371033baab27e47c9df7d6fb28d0b7b7adf85a4a49be9298b3"},
{file = "coverage-7.13.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7e442c013447d1d8d195be62852270b78b6e255b79b8675bad8479641e21fd96"}, {file = "coverage-7.13.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d938b4a840fb1523b9dfbbb454f652967f18e197569c32266d4d13f37244c3d9"},
{file = "coverage-7.13.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ed5630d946859de835a85e9a43b721123a8a44ec26e2830b296d478c7fd4259"}, {file = "coverage-7.13.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bf100a3288f9bb7f919b87eb84f87101e197535b9bd0e2c2b5b3179633324fee"},
{file = "coverage-7.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f15a931a668e58087bc39d05d2b4bf4b14ff2875b49c994bbdb1c2217a8daeb"}, {file = "coverage-7.13.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef6688db9bf91ba111ae734ba6ef1a063304a881749726e0d3575f5c10a9facf"},
{file = "coverage-7.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:30a3a201a127ea57f7e14ba43c93c9c4be8b7d17a26e03bb49e6966d019eede9"}, {file = "coverage-7.13.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0b609fc9cdbd1f02e51f67f51e5aee60a841ef58a68d00d5ee2c0faf357481a3"},
{file = "coverage-7.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a485ff48fbd231efa32d58f479befce52dcb6bfb2a88bb7bf9a0b89b1bc8030"}, {file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c43257717611ff5e9a1d79dce8e47566235ebda63328718d9b65dd640bc832ef"},
{file = "coverage-7.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:22486cdafba4f9e471c816a2a5745337742a617fef68e890d8baf9f3036d7833"}, {file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e09fbecc007f7b6afdfb3b07ce5bd9f8494b6856dd4f577d26c66c391b829851"},
{file = "coverage-7.13.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:263c3dbccc78e2e331e59e90115941b5f53e85cfcc6b3b2fbff1fd4e3d2c6ea8"}, {file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:a03a4f3a19a189919c7055098790285cc5c5b0b3976f8d227aea39dbf9f8bfdb"},
{file = "coverage-7.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e5330fa0cc1f5c3c4c3bb8e101b742025933e7848989370a1d4c8c5e401ea753"}, {file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3820778ea1387c2b6a818caec01c63adc5b3750211af6447e8dcfb9b6f08dbba"},
{file = "coverage-7.13.0-cp311-cp311-win32.whl", hash = "sha256:0f4872f5d6c54419c94c25dd6ae1d015deeb337d06e448cd890a1e89a8ee7f3b"}, {file = "coverage-7.13.1-cp311-cp311-win32.whl", hash = "sha256:ff10896fa55167371960c5908150b434b71c876dfab97b69478f22c8b445ea19"},
{file = "coverage-7.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51a202e0f80f241ccb68e3e26e19ab5b3bf0f813314f2c967642f13ebcf1ddfe"}, {file = "coverage-7.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:a998cc0aeeea4c6d5622a3754da5a493055d2d95186bad877b0a34ea6e6dbe0a"},
{file = "coverage-7.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:d2a9d7f1c11487b1c69367ab3ac2d81b9b3721f097aa409a3191c3e90f8f3dd7"}, {file = "coverage-7.13.1-cp311-cp311-win_arm64.whl", hash = "sha256:fea07c1a39a22614acb762e3fbbb4011f65eedafcb2948feeef641ac78b4ee5c"},
{file = "coverage-7.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0b3d67d31383c4c68e19a88e28fc4c2e29517580f1b0ebec4a069d502ce1e0bf"}, {file = "coverage-7.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6f34591000f06e62085b1865c9bc5f7858df748834662a51edadfd2c3bfe0dd3"},
{file = "coverage-7.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:581f086833d24a22c89ae0fe2142cfaa1c92c930adf637ddf122d55083fb5a0f"}, {file = "coverage-7.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b67e47c5595b9224599016e333f5ec25392597a89d5744658f837d204e16c63e"},
{file = "coverage-7.13.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0a3a30f0e257df382f5f9534d4ce3d4cf06eafaf5192beb1a7bd066cb10e78fb"}, {file = "coverage-7.13.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e7b8bd70c48ffb28461ebe092c2345536fb18bbbf19d287c8913699735f505c"},
{file = "coverage-7.13.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:583221913fbc8f53b88c42e8dbb8fca1d0f2e597cb190ce45916662b8b9d9621"}, {file = "coverage-7.13.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c223d078112e90dc0e5c4e35b98b9584164bea9fbbd221c0b21c5241f6d51b62"},
{file = "coverage-7.13.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f5d9bd30756fff3e7216491a0d6d520c448d5124d3d8e8f56446d6412499e74"}, {file = "coverage-7.13.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:794f7c05af0763b1bbd1b9e6eff0e52ad068be3b12cd96c87de037b01390c968"},
{file = "coverage-7.13.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a23e5a1f8b982d56fa64f8e442e037f6ce29322f1f9e6c2344cd9e9f4407ee57"}, {file = "coverage-7.13.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0642eae483cc8c2902e4af7298bf886d605e80f26382124cddc3967c2a3df09e"},
{file = "coverage-7.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9b01c22bc74a7fb44066aaf765224c0d933ddf1f5047d6cdfe4795504a4493f8"}, {file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5e772ed5fef25b3de9f2008fe67b92d46831bd2bc5bdc5dd6bfd06b83b316f"},
{file = "coverage-7.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:898cce66d0836973f48dda4e3514d863d70142bdf6dfab932b9b6a90ea5b222d"}, {file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:45980ea19277dc0a579e432aef6a504fe098ef3a9032ead15e446eb0f1191aee"},
{file = "coverage-7.13.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:3ab483ea0e251b5790c2aac03acde31bff0c736bf8a86829b89382b407cd1c3b"}, {file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:e4f18eca6028ffa62adbd185a8f1e1dd242f2e68164dba5c2b74a5204850b4cf"},
{file = "coverage-7.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1d84e91521c5e4cb6602fe11ece3e1de03b2760e14ae4fcf1a4b56fa3c801fcd"}, {file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8dca5590fec7a89ed6826fce625595279e586ead52e9e958d3237821fbc750c"},
{file = "coverage-7.13.0-cp312-cp312-win32.whl", hash = "sha256:193c3887285eec1dbdb3f2bd7fbc351d570ca9c02ca756c3afbc71b3c98af6ef"}, {file = "coverage-7.13.1-cp312-cp312-win32.whl", hash = "sha256:ff86d4e85188bba72cfb876df3e11fa243439882c55957184af44a35bd5880b7"},
{file = "coverage-7.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:4f3e223b2b2db5e0db0c2b97286aba0036ca000f06aca9b12112eaa9af3d92ae"}, {file = "coverage-7.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:16cc1da46c04fb0fb128b4dc430b78fa2aba8a6c0c9f8eb391fd5103409a6ac6"},
{file = "coverage-7.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:086cede306d96202e15a4b77ace8472e39d9f4e5f9fd92dd4fecdfb2313b2080"}, {file = "coverage-7.13.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d9bc218650022a768f3775dd7fdac1886437325d8d295d923ebcfef4892ad5c"},
{file = "coverage-7.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:28ee1c96109974af104028a8ef57cec21447d42d0e937c0275329272e370ebcf"}, {file = "coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78"},
{file = "coverage-7.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d1e97353dcc5587b85986cda4ff3ec98081d7e84dd95e8b2a6d59820f0545f8a"}, {file = "coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b"},
{file = "coverage-7.13.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:99acd4dfdfeb58e1937629eb1ab6ab0899b131f183ee5f23e0b5da5cba2fec74"}, {file = "coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd"},
{file = "coverage-7.13.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ff45e0cd8451e293b63ced93161e189780baf444119391b3e7d25315060368a6"}, {file = "coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992"},
{file = "coverage-7.13.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f4f72a85316d8e13234cafe0a9f81b40418ad7a082792fa4165bd7d45d96066b"}, {file = "coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4"},
{file = "coverage-7.13.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:11c21557d0e0a5a38632cbbaca5f008723b26a89d70db6315523df6df77d6232"}, {file = "coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a"},
{file = "coverage-7.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76541dc8d53715fb4f7a3a06b34b0dc6846e3c69bc6204c55653a85dd6220971"}, {file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766"},
{file = "coverage-7.13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6e9e451dee940a86789134b6b0ffbe31c454ade3b849bb8a9d2cca2541a8e91d"}, {file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4"},
{file = "coverage-7.13.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:5c67dace46f361125e6b9cace8fe0b729ed8479f47e70c89b838d319375c8137"}, {file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398"},
{file = "coverage-7.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f59883c643cb19630500f57016f76cfdcd6845ca8c5b5ea1f6e17f74c8e5f511"}, {file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784"},
{file = "coverage-7.13.0-cp313-cp313-win32.whl", hash = "sha256:58632b187be6f0be500f553be41e277712baa278147ecb7559983c6d9faf7ae1"}, {file = "coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461"},
{file = "coverage-7.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:73419b89f812f498aca53f757dd834919b48ce4799f9d5cad33ca0ae442bdb1a"}, {file = "coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500"},
{file = "coverage-7.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:eb76670874fdd6091eedcc856128ee48c41a9bbbb9c3f1c7c3cf169290e3ffd6"}, {file = "coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9"},
{file = "coverage-7.13.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6e63ccc6e0ad8986386461c3c4b737540f20426e7ec932f42e030320896c311a"}, {file = "coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc"},
{file = "coverage-7.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:494f5459ffa1bd45e18558cd98710c36c0b8fbfa82a5eabcbe671d80ecffbfe8"}, {file = "coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a"},
{file = "coverage-7.13.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:06cac81bf10f74034e055e903f5f946e3e26fc51c09fc9f584e4a1605d977053"}, {file = "coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4"},
{file = "coverage-7.13.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f2ffc92b46ed6e6760f1d47a71e56b5664781bc68986dbd1836b2b70c0ce2071"}, {file = "coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6"},
{file = "coverage-7.13.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0602f701057c6823e5db1b74530ce85f17c3c5be5c85fc042ac939cbd909426e"}, {file = "coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1"},
{file = "coverage-7.13.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:25dc33618d45456ccb1d37bce44bc78cf269909aa14c4db2e03d63146a8a1493"}, {file = "coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd"},
{file = "coverage-7.13.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:71936a8b3b977ddd0b694c28c6a34f4fff2e9dd201969a4ff5d5fc7742d614b0"}, {file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c"},
{file = "coverage-7.13.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:936bc20503ce24770c71938d1369461f0c5320830800933bc3956e2a4ded930e"}, {file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0"},
{file = "coverage-7.13.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:af0a583efaacc52ae2521f8d7910aff65cdb093091d76291ac5820d5e947fc1c"}, {file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e"},
{file = "coverage-7.13.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f1c23e24a7000da892a312fb17e33c5f94f8b001de44b7cf8ba2e36fbd15859e"}, {file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53"},
{file = "coverage-7.13.0-cp313-cp313t-win32.whl", hash = "sha256:5f8a0297355e652001015e93be345ee54393e45dc3050af4a0475c5a2b767d46"}, {file = "coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842"},
{file = "coverage-7.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6abb3a4c52f05e08460bd9acf04fec027f8718ecaa0d09c40ffbc3fbd70ecc39"}, {file = "coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2"},
{file = "coverage-7.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:3ad968d1e3aa6ce5be295ab5fe3ae1bf5bb4769d0f98a80a0252d543a2ef2e9e"}, {file = "coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09"},
{file = "coverage-7.13.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:453b7ec753cf5e4356e14fe858064e5520c460d3bbbcb9c35e55c0d21155c256"}, {file = "coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894"},
{file = "coverage-7.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:af827b7cbb303e1befa6c4f94fd2bf72f108089cfa0f8abab8f4ca553cf5ca5a"}, {file = "coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a"},
{file = "coverage-7.13.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9987a9e4f8197a1000280f7cc089e3ea2c8b3c0a64d750537809879a7b4ceaf9"}, {file = "coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f"},
{file = "coverage-7.13.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3188936845cd0cb114fa6a51842a304cdbac2958145d03be2377ec41eb285d19"}, {file = "coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909"},
{file = "coverage-7.13.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2bdb3babb74079f021696cb46b8bb5f5661165c385d3a238712b031a12355be"}, {file = "coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4"},
{file = "coverage-7.13.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7464663eaca6adba4175f6c19354feea61ebbdd735563a03d1e472c7072d27bb"}, {file = "coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75"},
{file = "coverage-7.13.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8069e831f205d2ff1f3d355e82f511eb7c5522d7d413f5db5756b772ec8697f8"}, {file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9"},
{file = "coverage-7.13.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6fb2d5d272341565f08e962cce14cdf843a08ac43bd621783527adb06b089c4b"}, {file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465"},
{file = "coverage-7.13.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:5e70f92ef89bac1ac8a99b3324923b4749f008fdbd7aa9cb35e01d7a284a04f9"}, {file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864"},
{file = "coverage-7.13.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4b5de7d4583e60d5fd246dd57fcd3a8aa23c6e118a8c72b38adf666ba8e7e927"}, {file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9"},
{file = "coverage-7.13.0-cp314-cp314-win32.whl", hash = "sha256:a6c6e16b663be828a8f0b6c5027d36471d4a9f90d28444aa4ced4d48d7d6ae8f"}, {file = "coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5"},
{file = "coverage-7.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:0900872f2fdb3ee5646b557918d02279dc3af3dfb39029ac4e945458b13f73bc"}, {file = "coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a"},
{file = "coverage-7.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:3a10260e6a152e5f03f26db4a407c4c62d3830b9af9b7c0450b183615f05d43b"}, {file = "coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0"},
{file = "coverage-7.13.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:9097818b6cc1cfb5f174e3263eba4a62a17683bcfe5c4b5d07f4c97fa51fbf28"}, {file = "coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a"},
{file = "coverage-7.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0018f73dfb4301a89292c73be6ba5f58722ff79f51593352759c1790ded1cabe"}, {file = "coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6"},
{file = "coverage-7.13.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:166ad2a22ee770f5656e1257703139d3533b4a0b6909af67c6b4a3adc1c98657"}, {file = "coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673"},
{file = "coverage-7.13.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f6aaef16d65d1787280943f1c8718dc32e9cf141014e4634d64446702d26e0ff"}, {file = "coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5"},
{file = "coverage-7.13.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e999e2dcc094002d6e2c7bbc1fb85b58ba4f465a760a8014d97619330cdbbbf3"}, {file = "coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d"},
{file = "coverage-7.13.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:00c3d22cf6fb1cf3bf662aaaa4e563be8243a5ed2630339069799835a9cc7f9b"}, {file = "coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8"},
{file = "coverage-7.13.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22ccfe8d9bb0d6134892cbe1262493a8c70d736b9df930f3f3afae0fe3ac924d"}, {file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486"},
{file = "coverage-7.13.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:9372dff5ea15930fea0445eaf37bbbafbc771a49e70c0aeed8b4e2c2614cc00e"}, {file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564"},
{file = "coverage-7.13.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:69ac2c492918c2461bc6ace42d0479638e60719f2a4ef3f0815fa2df88e9f940"}, {file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7"},
{file = "coverage-7.13.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:739c6c051a7540608d097b8e13c76cfa85263ced467168dc6b477bae3df7d0e2"}, {file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416"},
{file = "coverage-7.13.0-cp314-cp314t-win32.whl", hash = "sha256:fe81055d8c6c9de76d60c94ddea73c290b416e061d40d542b24a5871bad498b7"}, {file = "coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f"},
{file = "coverage-7.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:445badb539005283825959ac9fa4a28f712c214b65af3a2c464f1adc90f5fcbc"}, {file = "coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79"},
{file = "coverage-7.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:de7f6748b890708578fc4b7bb967d810aeb6fcc9bff4bb77dbca77dab2f9df6a"}, {file = "coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4"},
{file = "coverage-7.13.0-py3-none-any.whl", hash = "sha256:850d2998f380b1e266459ca5b47bc9e7daf9af1d070f66317972f382d46f1904"}, {file = "coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573"},
{file = "coverage-7.13.0.tar.gz", hash = "sha256:a394aa27f2d7ff9bc04cf703817773a59ad6dfbd577032e690f961d2460ee936"}, {file = "coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd"},
] ]
[package.extras] [package.extras]
@@ -373,25 +372,25 @@ testing = ["hatch", "pre-commit", "pytest", "tox"]
[[package]] [[package]]
name = "fastapi" name = "fastapi"
version = "0.121.3" version = "0.127.1"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.9"
files = [ files = [
{file = "fastapi-0.121.3-py3-none-any.whl", hash = "sha256:0c78fc87587fcd910ca1bbf5bc8ba37b80e119b388a7206b39f0ecc95ebf53e9"}, {file = "fastapi-0.127.1-py3-none-any.whl", hash = "sha256:31d670a4f9373cc6d7994420f98e4dc46ea693145207abc39696746c83a44430"},
{file = "fastapi-0.121.3.tar.gz", hash = "sha256:0055bc24fe53e56a40e9e0ad1ae2baa81622c406e548e501e717634e2dfbc40b"}, {file = "fastapi-0.127.1.tar.gz", hash = "sha256:946a87ee5d931883b562b6bada787d6c8178becee2683cb3f9b980d593206359"},
] ]
[package.dependencies] [package.dependencies]
annotated-doc = ">=0.0.2" annotated-doc = ">=0.0.2"
pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" pydantic = ">=2.7.0"
starlette = ">=0.40.0,<0.51.0" starlette = ">=0.40.0,<0.51.0"
typing-extensions = ">=4.8.0" typing-extensions = ">=4.8.0"
[package.extras] [package.extras]
all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"]
standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[standard-no-fastapi-cloud-cli] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[standard-no-fastapi-cloud-cli] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"]
[[package]] [[package]]
name = "filelock" name = "filelock"
@@ -535,13 +534,13 @@ files = [
[[package]] [[package]]
name = "nodeenv" name = "nodeenv"
version = "1.9.1" version = "1.10.0"
description = "Node.js virtual environment builder" description = "Node.js virtual environment builder"
optional = false optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
files = [ files = [
{file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, {file = "nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827"},
{file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, {file = "nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb"},
] ]
[[package]] [[package]]
@@ -1037,13 +1036,13 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
[[package]] [[package]]
name = "rich-click" name = "rich-click"
version = "1.9.4" version = "1.9.5"
description = "Format click help output nicely with rich" description = "Format click help output nicely with rich"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "rich_click-1.9.4-py3-none-any.whl", hash = "sha256:d70f39938bcecaf5543e8750828cbea94ef51853f7d0e174cda1e10543767389"}, {file = "rich_click-1.9.5-py3-none-any.whl", hash = "sha256:9b195721a773b1acf0e16ff9ec68cef1e7d237e53471e6e3f7ade462f86c403a"},
{file = "rich_click-1.9.4.tar.gz", hash = "sha256:af73dc68e85f3bebb80ce302a642b9fe3b65f3df0ceb42eb9a27c467c1b678c8"}, {file = "rich_click-1.9.5.tar.gz", hash = "sha256:48120531493f1533828da80e13e839d471979ec8d7d0ca7b35f86a1379cc74b6"},
] ]
[package.dependencies] [package.dependencies]
@@ -1057,30 +1056,30 @@ docs = ["markdown-include (>=0.8.1)", "mike (>=2.1.3)", "mkdocs-github-admonitio
[[package]] [[package]]
name = "ruff" name = "ruff"
version = "0.14.9" version = "0.14.10"
description = "An extremely fast Python linter and code formatter, written in Rust." description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
{file = "ruff-0.14.9-py3-none-linux_armv6l.whl", hash = "sha256:f1ec5de1ce150ca6e43691f4a9ef5c04574ad9ca35c8b3b0e18877314aba7e75"}, {file = "ruff-0.14.10-py3-none-linux_armv6l.whl", hash = "sha256:7a3ce585f2ade3e1f29ec1b92df13e3da262178df8c8bdf876f48fa0e8316c49"},
{file = "ruff-0.14.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ed9d7417a299fc6030b4f26333bf1117ed82a61ea91238558c0268c14e00d0c2"}, {file = "ruff-0.14.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:674f9be9372907f7257c51f1d4fc902cb7cf014b9980152b802794317941f08f"},
{file = "ruff-0.14.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d5dc3473c3f0e4a1008d0ef1d75cee24a48e254c8bed3a7afdd2b4392657ed2c"}, {file = "ruff-0.14.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d85713d522348837ef9df8efca33ccb8bd6fcfc86a2cde3ccb4bc9d28a18003d"},
{file = "ruff-0.14.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84bf7c698fc8f3cb8278830fb6b5a47f9bcc1ed8cb4f689b9dd02698fa840697"}, {file = "ruff-0.14.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6987ebe0501ae4f4308d7d24e2d0fe3d7a98430f5adfd0f1fead050a740a3a77"},
{file = "ruff-0.14.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aa733093d1f9d88a5d98988d8834ef5d6f9828d03743bf5e338bf980a19fce27"}, {file = "ruff-0.14.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16a01dfb7b9e4eee556fbfd5392806b1b8550c9b4a9f6acd3dbe6812b193c70a"},
{file = "ruff-0.14.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a1cfb04eda979b20c8c19550c8b5f498df64ff8da151283311ce3199e8b3648"}, {file = "ruff-0.14.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7165d31a925b7a294465fa81be8c12a0e9b60fb02bf177e79067c867e71f8b1f"},
{file = "ruff-0.14.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1e5cb521e5ccf0008bd74d5595a4580313844a42b9103b7388eca5a12c970743"}, {file = "ruff-0.14.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c561695675b972effb0c0a45db233f2c816ff3da8dcfbe7dfc7eed625f218935"},
{file = "ruff-0.14.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd429a8926be6bba4befa8cdcf3f4dd2591c413ea5066b1e99155ed245ae42bb"}, {file = "ruff-0.14.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bb98fcbbc61725968893682fd4df8966a34611239c9fd07a1f6a07e7103d08e"},
{file = "ruff-0.14.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab208c1b7a492e37caeaf290b1378148f75e13c2225af5d44628b95fd7834273"}, {file = "ruff-0.14.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f24b47993a9d8cb858429e97bdf8544c78029f09b520af615c1d261bf827001d"},
{file = "ruff-0.14.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72034534e5b11e8a593f517b2f2f2b273eb68a30978c6a2d40473ad0aaa4cb4a"}, {file = "ruff-0.14.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59aabd2e2c4fd614d2862e7939c34a532c04f1084476d6833dddef4afab87e9f"},
{file = "ruff-0.14.9-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:712ff04f44663f1b90a1195f51525836e3413c8a773574a7b7775554269c30ed"}, {file = "ruff-0.14.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:213db2b2e44be8625002dbea33bb9c60c66ea2c07c084a00d55732689d697a7f"},
{file = "ruff-0.14.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a111fee1db6f1d5d5810245295527cda1d367c5aa8f42e0fca9a78ede9b4498b"}, {file = "ruff-0.14.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b914c40ab64865a17a9a5b67911d14df72346a634527240039eb3bd650e5979d"},
{file = "ruff-0.14.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8769efc71558fecc25eb295ddec7d1030d41a51e9dcf127cbd63ec517f22d567"}, {file = "ruff-0.14.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1484983559f026788e3a5c07c81ef7d1e97c1c78ed03041a18f75df104c45405"},
{file = "ruff-0.14.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:347e3bf16197e8a2de17940cd75fd6491e25c0aa7edf7d61aa03f146a1aa885a"}, {file = "ruff-0.14.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c70427132db492d25f982fffc8d6c7535cc2fd2c83fc8888f05caaa248521e60"},
{file = "ruff-0.14.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7715d14e5bccf5b660f54516558aa94781d3eb0838f8e706fb60e3ff6eff03a8"}, {file = "ruff-0.14.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5bcf45b681e9f1ee6445d317ce1fa9d6cba9a6049542d1c3d5b5958986be8830"},
{file = "ruff-0.14.9-py3-none-win32.whl", hash = "sha256:df0937f30aaabe83da172adaf8937003ff28172f59ca9f17883b4213783df197"}, {file = "ruff-0.14.10-py3-none-win32.whl", hash = "sha256:104c49fc7ab73f3f3a758039adea978869a918f31b73280db175b43a2d9b51d6"},
{file = "ruff-0.14.9-py3-none-win_amd64.whl", hash = "sha256:c0b53a10e61df15a42ed711ec0bda0c582039cf6c754c49c020084c55b5b0bc2"}, {file = "ruff-0.14.10-py3-none-win_amd64.whl", hash = "sha256:466297bd73638c6bdf06485683e812db1c00c7ac96d4ddd0294a338c62fdc154"},
{file = "ruff-0.14.9-py3-none-win_arm64.whl", hash = "sha256:8e821c366517a074046d92f0e9213ed1c13dbc5b37a7fc20b07f79b64d62cc84"}, {file = "ruff-0.14.10-py3-none-win_arm64.whl", hash = "sha256:e51d046cf6dda98a4633b8a8a771451107413b0f07183b2bef03f075599e44e6"},
{file = "ruff-0.14.9.tar.gz", hash = "sha256:35f85b25dd586381c0cc053f48826109384c81c00ad7ef1bd977bfcc28119d5b"}, {file = "ruff-0.14.10.tar.gz", hash = "sha256:9a2e830f075d1a42cd28420d7809ace390832a490ed0966fe373ba288e77aaf4"},
] ]
[[package]] [[package]]
@@ -1096,7 +1095,6 @@ files = [
[package.dependencies] [package.dependencies]
anyio = ">=3.6.2,<5" anyio = ">=3.6.2,<5"
typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\""}
[package.extras] [package.extras]
full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"]
@@ -1156,13 +1154,13 @@ zstd = ["backports-zstd (>=1.0.0)"]
[[package]] [[package]]
name = "uvicorn" name = "uvicorn"
version = "0.38.0" version = "0.40.0"
description = "The lightning-fast ASGI server." description = "The lightning-fast ASGI server."
optional = false optional = false
python-versions = ">=3.9" python-versions = ">=3.10"
files = [ files = [
{file = "uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02"}, {file = "uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee"},
{file = "uvicorn-0.38.0.tar.gz", hash = "sha256:fd97093bdd120a2609fc0d3afe931d4d4ad688b6e75f0f929fde1bc36fe0e91d"}, {file = "uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea"},
] ]
[package.dependencies] [package.dependencies]
@@ -1219,5 +1217,5 @@ files = [
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.12" python-versions = "==3.14.2"
content-hash = "b6cec0647accef2c235cededb06cab49f30033fb5e5ce1f6b589a4da5d2a2d8d" content-hash = "ec920fd78ea55c063bf2e4696c328056b50d8d1694f057c2d455ca2619938aac"

Some files were not shown because too many files have changed in this diff Show More